mirror of
https://github.com/adamhathcock/sharpcompress.git
synced 2026-02-08 05:27:04 +00:00
Compare commits
10 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b3a4fed8be | ||
|
|
d0b4af6666 | ||
|
|
81ab5c189d | ||
|
|
6ef3be4b5c | ||
|
|
9f90a1d651 | ||
|
|
ce9a3fd1ef | ||
|
|
7c6f05058e | ||
|
|
a8c3a7439e | ||
|
|
839b3ab0cf | ||
|
|
44d54db80e |
@@ -1,11 +1,13 @@
|
||||
# Archive Formats
|
||||
|
||||
## Accessing Archives
|
||||
|
||||
Archive classes allow random access to a seekable stream.
|
||||
Reader classes allow forward-only reading
|
||||
Writer classes allow forward-only Writing
|
||||
|
||||
## Supported Format Table
|
||||
|
||||
| Archive Format | Compression Format(s) | Compress/Decompress | Archive API | Reader API | Writer API |
|
||||
| --- | --- | --- | --- | --- | --- |
|
||||
| Rar | Rar | Decompress (1) | RarArchive | RarReader | N/A |
|
||||
@@ -15,11 +17,12 @@ Writer classes allow forward-only Writing
|
||||
| 7Zip (4) | LZMA, LZMA2, BZip2, PPMd, BCJ, BCJ2, Deflate | Decompress | SevenZipArchive | N/A | N/A |
|
||||
|
||||
1. SOLID Rars are only supported in the RarReader API.
|
||||
2. Zip format supports pkware and WinzipAES encryption. However, encrypted LZMA is not supported. Zip64 reading is supported with the Archive API
|
||||
2. Zip format supports pkware and WinzipAES encryption. However, encrypted LZMA is not supported. Zip64 reading is supported.
|
||||
3. The Tar format requires a file size in the header. If no size is specified to the TarWriter and the stream is not seekable, then an exception will be thrown.
|
||||
4. The 7Zip format doesn't allow for reading as a forward-only stream so 7Zip is only supported through the Archive API
|
||||
|
||||
## Compressors
|
||||
|
||||
For those who want to directly compress/decompress bits
|
||||
|
||||
| Compressor | Compress/Decompress |
|
||||
|
||||
@@ -31,6 +31,14 @@ I'm always looking for help or ideas. Please submit code or email with ideas. Un
|
||||
|
||||
## Version Log
|
||||
|
||||
### Version 0.15.2
|
||||
|
||||
* [Fix invalid headers](https://github.com/adamhathcock/sharpcompress/pull/210) - fixes an issue creating large-ish zip archives that was introduced with zip64 reading.
|
||||
|
||||
### Version 0.15.1
|
||||
|
||||
* [Zip64 extending information and ZipReader](https://github.com/adamhathcock/sharpcompress/pull/206)
|
||||
|
||||
### Version 0.15.0
|
||||
|
||||
* [Add zip64 support for ZipArchive extraction](https://github.com/adamhathcock/sharpcompress/pull/205)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
using System.IO;
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
|
||||
namespace SharpCompress.Common.Zip.Headers
|
||||
@@ -41,6 +42,23 @@ namespace SharpCompress.Common.Zip.Headers
|
||||
{
|
||||
Name = ((ExtraUnicodePathExtraField)unicodePathExtra).UnicodeName;
|
||||
}
|
||||
|
||||
var zip64ExtraData = Extra.OfType<Zip64ExtendedInformationExtraField>().FirstOrDefault();
|
||||
if (zip64ExtraData != null)
|
||||
{
|
||||
if (CompressedSize == uint.MaxValue)
|
||||
{
|
||||
CompressedSize = zip64ExtraData.CompressedSize;
|
||||
}
|
||||
if (UncompressedSize == uint.MaxValue)
|
||||
{
|
||||
UncompressedSize = zip64ExtraData.UncompressedSize;
|
||||
}
|
||||
if (RelativeOffsetOfEntryHeader == uint.MaxValue)
|
||||
{
|
||||
RelativeOffsetOfEntryHeader = zip64ExtraData.RelativeOffsetOfEntryHeader;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
internal override void Write(BinaryWriter writer)
|
||||
@@ -52,8 +70,8 @@ namespace SharpCompress.Common.Zip.Headers
|
||||
writer.Write(LastModifiedTime);
|
||||
writer.Write(LastModifiedDate);
|
||||
writer.Write(Crc);
|
||||
writer.Write(CompressedSize);
|
||||
writer.Write(UncompressedSize);
|
||||
writer.Write((uint)CompressedSize);
|
||||
writer.Write((uint)UncompressedSize);
|
||||
|
||||
byte[] nameBytes = EncodeString(Name);
|
||||
writer.Write((ushort)nameBytes.Length);
|
||||
@@ -77,7 +95,7 @@ namespace SharpCompress.Common.Zip.Headers
|
||||
|
||||
public ushort VersionNeededToExtract { get; set; }
|
||||
|
||||
public uint RelativeOffsetOfEntryHeader { get; set; }
|
||||
public long RelativeOffsetOfEntryHeader { get; set; }
|
||||
|
||||
public uint ExternalFileAttributes { get; set; }
|
||||
|
||||
|
||||
@@ -32,6 +32,19 @@ namespace SharpCompress.Common.Zip.Headers
|
||||
{
|
||||
Name = ((ExtraUnicodePathExtraField)unicodePathExtra).UnicodeName;
|
||||
}
|
||||
|
||||
var zip64ExtraData = Extra.OfType<Zip64ExtendedInformationExtraField>().FirstOrDefault();
|
||||
if (zip64ExtraData != null)
|
||||
{
|
||||
if (CompressedSize == uint.MaxValue)
|
||||
{
|
||||
CompressedSize = zip64ExtraData.CompressedSize;
|
||||
}
|
||||
if (UncompressedSize == uint.MaxValue)
|
||||
{
|
||||
UncompressedSize = zip64ExtraData.UncompressedSize;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
internal override void Write(BinaryWriter writer)
|
||||
@@ -42,8 +55,8 @@ namespace SharpCompress.Common.Zip.Headers
|
||||
writer.Write(LastModifiedTime);
|
||||
writer.Write(LastModifiedDate);
|
||||
writer.Write(Crc);
|
||||
writer.Write(CompressedSize);
|
||||
writer.Write(UncompressedSize);
|
||||
writer.Write((uint)CompressedSize);
|
||||
writer.Write((uint)UncompressedSize);
|
||||
|
||||
byte[] nameBytes = EncodeString(Name);
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using System;
|
||||
using System.Text;
|
||||
using SharpCompress.Converters;
|
||||
|
||||
namespace SharpCompress.Common.Zip.Headers
|
||||
{
|
||||
@@ -11,7 +12,8 @@ namespace SharpCompress.Common.Zip.Headers
|
||||
|
||||
// Third Party Mappings
|
||||
// -Info-ZIP Unicode Path Extra Field
|
||||
UnicodePathExtraField = 0x7075
|
||||
UnicodePathExtraField = 0x7075,
|
||||
Zip64ExtendedInformationExtraField = 0x0001
|
||||
}
|
||||
|
||||
internal class ExtraData
|
||||
@@ -47,6 +49,73 @@ namespace SharpCompress.Common.Zip.Headers
|
||||
}
|
||||
}
|
||||
|
||||
internal class Zip64ExtendedInformationExtraField : ExtraData
|
||||
{
|
||||
|
||||
public Zip64ExtendedInformationExtraField(ExtraDataType type, ushort length, byte[] dataBytes)
|
||||
{
|
||||
Type = type;
|
||||
Length = length;
|
||||
DataBytes = dataBytes;
|
||||
Process();
|
||||
}
|
||||
|
||||
//From the spec values are only in the extradata if the standard
|
||||
//value is set to 0xFFFF, but if one of the sizes are present, both are.
|
||||
//Hence if length == 4 volume only
|
||||
// if length == 8 offset only
|
||||
// if length == 12 offset + volume
|
||||
// if length == 16 sizes only
|
||||
// if length == 20 sizes + volume
|
||||
// if length == 24 sizes + offset
|
||||
// if length == 28 everything.
|
||||
//It is unclear how many of these are used in the wild.
|
||||
|
||||
private void Process()
|
||||
{
|
||||
switch (DataBytes.Length)
|
||||
{
|
||||
case 4:
|
||||
VolumeNumber = DataConverter.LittleEndian.GetUInt32(DataBytes, 0);
|
||||
return;
|
||||
case 8:
|
||||
RelativeOffsetOfEntryHeader = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
|
||||
return;
|
||||
case 12:
|
||||
RelativeOffsetOfEntryHeader = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
|
||||
VolumeNumber = DataConverter.LittleEndian.GetUInt32(DataBytes, 8);
|
||||
return;
|
||||
case 16:
|
||||
UncompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
|
||||
CompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 8);
|
||||
return;
|
||||
case 20:
|
||||
UncompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
|
||||
CompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 8);
|
||||
VolumeNumber = DataConverter.LittleEndian.GetUInt32(DataBytes, 16);
|
||||
return;
|
||||
case 24:
|
||||
UncompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
|
||||
CompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 8);
|
||||
RelativeOffsetOfEntryHeader = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 16);
|
||||
return;
|
||||
case 28:
|
||||
UncompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
|
||||
CompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 8);
|
||||
RelativeOffsetOfEntryHeader = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 16);
|
||||
VolumeNumber = DataConverter.LittleEndian.GetUInt32(DataBytes, 24);
|
||||
return;
|
||||
default:
|
||||
throw new ArchiveException("Unexpected size of of Zip64 extended information extra field");
|
||||
}
|
||||
}
|
||||
|
||||
public long UncompressedSize { get; private set; }
|
||||
public long CompressedSize { get; private set; }
|
||||
public long RelativeOffsetOfEntryHeader { get; private set; }
|
||||
public uint VolumeNumber { get; private set; }
|
||||
}
|
||||
|
||||
internal static class LocalEntryHeaderExtraFactory
|
||||
{
|
||||
internal static ExtraData Create(ExtraDataType type, ushort length, byte[] extraData)
|
||||
@@ -60,6 +129,13 @@ namespace SharpCompress.Common.Zip.Headers
|
||||
Length = length,
|
||||
DataBytes = extraData
|
||||
};
|
||||
case ExtraDataType.Zip64ExtendedInformationExtraField:
|
||||
return new Zip64ExtendedInformationExtraField
|
||||
(
|
||||
type,
|
||||
length,
|
||||
extraData
|
||||
);
|
||||
default:
|
||||
return new ExtraData
|
||||
{
|
||||
|
||||
@@ -28,7 +28,7 @@ namespace SharpCompress.Common.Zip
|
||||
ZipHeader header = null;
|
||||
BinaryReader reader = new BinaryReader(rewindableStream);
|
||||
if (lastEntryHeader != null &&
|
||||
FlagUtility.HasFlag(lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor))
|
||||
(FlagUtility.HasFlag(lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor) || lastEntryHeader.IsZip64))
|
||||
{
|
||||
reader = (lastEntryHeader.Part as StreamingZipFilePart).FixStreamedFileLocation(ref rewindableStream);
|
||||
long? pos = rewindableStream.CanSeek ? (long?)rewindableStream.Position : null;
|
||||
|
||||
@@ -51,7 +51,7 @@ namespace SharpCompress.Common.Zip
|
||||
|
||||
protected abstract Stream CreateBaseStream();
|
||||
|
||||
protected bool LeaveStreamOpen { get { return FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor); } }
|
||||
protected bool LeaveStreamOpen { get { return FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor) || Header.IsZip64; } }
|
||||
|
||||
protected Stream CreateDecompressionStream(Stream stream)
|
||||
{
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"version": "0.15.0",
|
||||
"version": "0.15.2",
|
||||
"title": "SharpCompress - Pure C# Decompression/Compression",
|
||||
"authors": [ "Adam Hathcock" ],
|
||||
"language": "en-US",
|
||||
|
||||
@@ -46,7 +46,7 @@ namespace SharpCompress.Test
|
||||
[Fact]
|
||||
public void GZip_Archive_NoAdd()
|
||||
{
|
||||
string jpg = Path.Combine(ORIGINAL_FILES_PATH, "jpg\\test.jpg");
|
||||
string jpg = Path.Combine(ORIGINAL_FILES_PATH, "jpg", "test.jpg");
|
||||
ResetScratch();
|
||||
using (Stream stream = File.Open(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"), FileMode.Open))
|
||||
using (var archive = GZipArchive.Open(stream))
|
||||
|
||||
@@ -115,7 +115,7 @@ namespace SharpCompress.Test
|
||||
[Fact]
|
||||
public void Tar_Random_Write_Add()
|
||||
{
|
||||
string jpg = Path.Combine(ORIGINAL_FILES_PATH, "jpg\\test.jpg");
|
||||
string jpg = Path.Combine(ORIGINAL_FILES_PATH, "jpg","test.jpg");
|
||||
string scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.mod.tar");
|
||||
string unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.mod.tar");
|
||||
string modified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar");
|
||||
|
||||
@@ -210,16 +210,26 @@ namespace SharpCompress.Test
|
||||
|
||||
protected void CompareArchivesByPath(string file1, string file2)
|
||||
{
|
||||
//don't compare the order. OS X reads files from the file system in a different order therefore makes the archive ordering different
|
||||
var archive1Entries = new List<string>();
|
||||
var archive2Entries = new List<string>();
|
||||
using (var archive1 = ReaderFactory.Open(File.OpenRead(file1)))
|
||||
using (var archive2 = ReaderFactory.Open(File.OpenRead(file2)))
|
||||
{
|
||||
while (archive1.MoveToNextEntry())
|
||||
{
|
||||
Assert.True(archive2.MoveToNextEntry());
|
||||
Assert.Equal(archive1.Entry.Key, archive2.Entry.Key);
|
||||
archive1Entries.Add(archive1.Entry.Key);
|
||||
archive2Entries.Add(archive2.Entry.Key);
|
||||
}
|
||||
Assert.False(archive2.MoveToNextEntry());
|
||||
}
|
||||
archive1Entries.Sort();
|
||||
archive2Entries.Sort();
|
||||
for (int i = 0; i < archive1Entries.Count; i++)
|
||||
{
|
||||
Assert.Equal(archive1Entries[i], archive2Entries[i]);
|
||||
}
|
||||
}
|
||||
|
||||
private static object lockObject = new object();
|
||||
|
||||
@@ -161,7 +161,7 @@ namespace SharpCompress.Test
|
||||
[Fact]
|
||||
public void Zip_Random_Write_Add()
|
||||
{
|
||||
string jpg = Path.Combine(ORIGINAL_FILES_PATH, "jpg\\test.jpg");
|
||||
string jpg = Path.Combine(ORIGINAL_FILES_PATH, "jpg","test.jpg");
|
||||
string scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Zip.deflate.mod.zip");
|
||||
string unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.mod.zip");
|
||||
string modified = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.mod2.zip");
|
||||
|
||||
@@ -14,6 +14,13 @@ namespace SharpCompress.Test
|
||||
{
|
||||
UseExtensionInsteadOfNameToVerify = true;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Zip_Zip64_Streamed_Read()
|
||||
{
|
||||
Read("Zip.Zip64.zip", CompressionType.Deflate);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Zip_ZipX_Streamed_Read()
|
||||
{
|
||||
|
||||
Reference in New Issue
Block a user