Compare commits

..

20 Commits
0.23 ... 0.24

Author SHA1 Message Date
Adam Hathcock
5072a0f6f5 Merge pull request #471 from adamhathcock/release-024
Bump version and dependencies
2019-08-20 20:36:38 +01:00
Adam Hathcock
357dff1403 Bump version and dependencies 2019-08-20 14:29:47 -05:00
Adam Hathcock
a2bd66ded8 Merge pull request #460 from itn3000/tar-fix-garbled2
fix filename garbling in tar(#414)
2019-06-27 12:16:53 +01:00
itn3000
6bfa3c25a4 add more comments 2019-06-27 20:01:40 +09:00
itn3000
1ea9ab72c1 add comment for subtracting 2019-06-27 19:59:16 +09:00
itn3000
07c42b8725 replace magic number 2019-06-27 10:59:21 +09:00
itn3000
70392c32e2 use Buffer.BlockCopy for performance 2019-06-27 09:47:26 +09:00
itn3000
9b4b2a9f7c considering encoding in processing filename(#414)
modify test tar archive because it was not expected one.
(expected "тест.txt" in encoding 866, but actual is omitted upper byte)
2019-06-26 17:34:12 +09:00
Adam Hathcock
d3dd708b58 Merge pull request #457 from DannyBoyk/issue_456_zip_bounded_substreams_data_descriptors
Return a bounded substream when data descriptors are used in seekable zips
2019-06-04 13:42:24 +01:00
Daniel Nash
af264cdc58 Return a bounded substream when data descriptors are used in seekable zips 2019-06-04 08:31:42 -04:00
Adam Hathcock
cfd6df976f Merge pull request #455 from DannyBoyk/issue_454_zip_bad_extra_field
Handle a bad extra field in a local file header in zip files
2019-06-04 09:24:55 +01:00
Daniel Nash
b2bd20b47e Handle a bad extra field in a local file header in zip files 2019-06-03 13:02:28 -04:00
Adam Hathcock
ffea093e95 Merge pull request #453 from Lssikkes/master
Fix for clients failing on just having a 64 bit offset in ZIP64
2019-05-24 19:33:59 +01:00
Leroy Sikkes
78eb8fcf92 Fix for clients that don't support ZIP64 standard correctly in case headers are only pointed to in ZIP64 directory structure 2019-05-24 18:27:49 +02:00
Adam Hathcock
a052956881 Merge pull request #452 from Lssikkes/master
Various fixes for ZIP64 writer (zero byte entries, 32 bit where supported)
2019-05-24 16:17:48 +01:00
Lssikkes
9319ea6992 Updated ZIP64 writer to write 32 bit values to directory entries for better compatibility.
Support for zero byte files without corruption errors from WinRAR/7-zip
2019-05-24 16:14:30 +02:00
Adam Hathcock
4e5b70dbfa Merge pull request #444 from eugeny-trunin/mem-opt
Memory and speed optimization
2019-03-20 15:13:00 +00:00
evgeny
c68eaa8397 Memory and speed optimization 2019-03-20 17:46:57 +03:00
Adam Hathcock
bbb7c85ba7 Merge pull request #442 from turbolocust/master
Fix: ArchiveEncoding was ignored in TarWriterOptions
2019-03-19 08:31:31 +00:00
Matthias Fussenegger
8174359228 Fix: ArchiveEncoding was ignored in TarWriterOptions 2019-03-18 18:25:00 +01:00
17 changed files with 170 additions and 36 deletions

View File

@@ -1,7 +1,6 @@
#if !NO_CRYPTO
using System;
using System.Collections.Generic;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using SharpCompress.Crypto;
@@ -23,12 +22,6 @@ namespace SharpCompress.Common.Rar
_salt = salt;
}
private byte[] ComputeHash(byte[] input)
{
var sha = SHA1.Create();
return sha.ComputeHash(input);
}
private void Initialize()
{
@@ -47,28 +40,27 @@ namespace SharpCompress.Common.Rar
rawPassword[i + rawLength] = _salt[i];
}
const int noOfRounds = (1 << 18);
IList<byte> bytes = new List<byte>();
const int iblock = 3;
byte[] digest;
byte[] data = new byte[(rawPassword.Length + iblock) * noOfRounds];
//TODO slow code below, find ways to optimize
for (int i = 0; i < noOfRounds; i++)
{
bytes.AddRange(rawPassword);
rawPassword.CopyTo(data, i * (rawPassword.Length + iblock));
bytes.AddRange(new[]
data[i * (rawPassword.Length + iblock) + rawPassword.Length + 0] = (byte)i;
data[i * (rawPassword.Length + iblock) + rawPassword.Length + 1] = (byte)(i >> 8);
data[i * (rawPassword.Length + iblock) + rawPassword.Length + 2] = (byte)(i >> CRYPTO_BLOCK_SIZE);
if (i % (noOfRounds / CRYPTO_BLOCK_SIZE) == 0)
{
(byte) i, (byte) (i >> 8), (byte) (i >> CRYPTO_BLOCK_SIZE)
});
if (i%(noOfRounds/CRYPTO_BLOCK_SIZE) == 0)
{
digest = ComputeHash(bytes.ToArray());
_aesInitializationVector[i/(noOfRounds/CRYPTO_BLOCK_SIZE)] = digest[19];
digest = SHA1.Create().ComputeHash(data, 0, (i + 1) * (rawPassword.Length + iblock));
_aesInitializationVector[i / (noOfRounds / CRYPTO_BLOCK_SIZE)] = digest[19];
}
}
digest = ComputeHash(bytes.ToArray());
digest = SHA1.Create().ComputeHash(data);
//slow code ends
byte[] aesKey = new byte[CRYPTO_BLOCK_SIZE];

View File

@@ -39,16 +39,17 @@ namespace SharpCompress.Common.Tar.Headers
WriteOctalBytes(0, buffer, 116, 8); // group ID
//ArchiveEncoding.UTF8.GetBytes("magic").CopyTo(buffer, 257);
if (Name.Length > 100)
var nameByteCount = ArchiveEncoding.GetEncoding().GetByteCount(Name);
if (nameByteCount > 100)
{
// Set mock filename and filetype to indicate the next block is the actual name of the file
WriteStringBytes("././@LongLink", buffer, 0, 100);
buffer[156] = (byte)EntryType.LongName;
WriteOctalBytes(Name.Length + 1, buffer, 124, 12);
WriteOctalBytes(nameByteCount + 1, buffer, 124, 12);
}
else
{
WriteStringBytes(Name, buffer, 0, 100);
WriteStringBytes(ArchiveEncoding.Encode(Name), buffer, 0, 100);
WriteOctalBytes(Size, buffer, 124, 12);
var time = (long)(LastModifiedTime.ToUniversalTime() - EPOCH).TotalSeconds;
WriteOctalBytes(time, buffer, 136, 12);
@@ -69,10 +70,17 @@ namespace SharpCompress.Common.Tar.Headers
output.Write(buffer, 0, buffer.Length);
if (Name.Length > 100)
if (nameByteCount > 100)
{
WriteLongFilenameHeader(output);
Name = Name.Substring(0, 100);
// update to short name lower than 100 - [max bytes of one character].
// subtracting bytes is needed because preventing infinite loop(example code is here).
//
// var bytes = Encoding.UTF8.GetBytes(new string(0x3042, 100));
// var truncated = Encoding.UTF8.GetBytes(Encoding.UTF8.GetString(bytes, 0, 100));
//
// and then infinite recursion is occured in WriteLongFilenameHeader because truncated.Length is 102.
Name = ArchiveEncoding.Decode(ArchiveEncoding.Encode(Name), 0, 100 - ArchiveEncoding.GetEncoding().GetMaxByteCount(1));
Write(output);
}
}
@@ -184,6 +192,17 @@ namespace SharpCompress.Common.Tar.Headers
return buffer;
}
private static void WriteStringBytes(byte[] name, byte[] buffer, int offset, int length)
{
int i = Math.Min(length, name.Length);
Buffer.BlockCopy(name, 0, buffer, offset, i);
// if Span<byte>.Fill can be used, it is more efficient
for (; i < length; ++i)
{
buffer[offset + i] = 0;
}
}
private static void WriteStringBytes(string name, byte[] buffer, int offset, int length)
{
int i;

View File

@@ -87,6 +87,15 @@ namespace SharpCompress.Common.Zip.Headers
}
ushort length = DataConverter.LittleEndian.GetUInt16(extra, i + 2);
// 7zip has this same kind of check to ignore extras blocks that don't conform to the standard 2-byte ID, 2-byte length, N-byte value.
// CPP/7Zip/Zip/ZipIn.cpp: CInArchive::ReadExtra
if (length > extra.Length)
{
// bad extras block
return;
}
byte[] data = new byte[length];
Buffer.BlockCopy(extra, i + 4, data, 0, length);
Extra.Add(LocalEntryHeaderExtraFactory.Create(type, length, data));
@@ -99,4 +108,4 @@ namespace SharpCompress.Common.Zip.Headers
internal bool IsZip64 => CompressedSize == uint.MaxValue;
}
}
}

View File

@@ -1,5 +1,6 @@
using System.IO;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip
{
@@ -7,11 +8,13 @@ namespace SharpCompress.Common.Zip
{
private bool _isLocalHeaderLoaded;
private readonly SeekableZipHeaderFactory _headerFactory;
private readonly DirectoryEntryHeader _directoryEntryHeader;
internal SeekableZipFilePart(SeekableZipHeaderFactory headerFactory, DirectoryEntryHeader header, Stream stream)
: base(header, stream)
{
_headerFactory = headerFactory;
_directoryEntryHeader = header;
}
internal override Stream GetCompressedStream()
@@ -36,6 +39,15 @@ namespace SharpCompress.Common.Zip
protected override Stream CreateBaseStream()
{
BaseStream.Position = Header.DataStartPosition.Value;
if ((Header.CompressedSize == 0)
&& FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor)
&& (_directoryEntryHeader?.HasData == true)
&& (_directoryEntryHeader?.CompressedSize != 0))
{
return new ReadOnlySubStream(BaseStream, _directoryEntryHeader.CompressedSize);
}
return BaseStream;
}
}

View File

@@ -2,9 +2,9 @@
<PropertyGroup>
<AssemblyTitle>SharpCompress - Pure C# Decompression/Compression</AssemblyTitle>
<NeutralLanguage>en-US</NeutralLanguage>
<VersionPrefix>0.23.0</VersionPrefix>
<AssemblyVersion>0.23.0</AssemblyVersion>
<FileVersion>0.23.0</FileVersion>
<VersionPrefix>0.24.0</VersionPrefix>
<AssemblyVersion>0.24.0</AssemblyVersion>
<FileVersion>0.24.0</FileVersion>
<Authors>Adam Hathcock</Authors>
<TargetFrameworks Condition="'$(LibraryFrameworks)'==''">net45;net35;netstandard1.0;netstandard1.3;netstandard2.0</TargetFrameworks>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>

View File

@@ -1,4 +1,3 @@
using SharpCompress.Archives;
using SharpCompress.Common;
namespace SharpCompress.Writers.Tar
@@ -18,6 +17,7 @@ namespace SharpCompress.Writers.Tar
internal TarWriterOptions(WriterOptions options) : this(options.CompressionType, true)
{
ArchiveEncoding = options.ArchiveEncoding;
}
}
}

View File

@@ -36,7 +36,8 @@ namespace SharpCompress.Writers.Zip
byte[] encodedComment = archiveEncoding.Encode(Comment);
var zip64_stream = Compressed >= uint.MaxValue || Decompressed >= uint.MaxValue;
var zip64 = zip64_stream || HeaderOffset >= uint.MaxValue || Zip64HeaderOffset != 0;
var zip64 = zip64_stream || HeaderOffset >= uint.MaxValue;
var usedCompression = compression;
var compressedvalue = zip64 ? uint.MaxValue : (uint)Compressed;
var decompressedvalue = zip64 ? uint.MaxValue : (uint)Decompressed;
@@ -56,17 +57,21 @@ namespace SharpCompress.Writers.Zip
if (!zip64_stream)
flags |= HeaderFlags.UsePostDataDescriptor;
if (compression == ZipCompressionMethod.LZMA)
if (usedCompression == ZipCompressionMethod.LZMA)
{
flags |= HeaderFlags.Bit1; // eos marker
}
}
// Support for zero byte files
if (Decompressed == 0 && Compressed == 0)
usedCompression = ZipCompressionMethod.None;
//constant sig, then version made by, then version to extract
outputStream.Write(new byte[] { 80, 75, 1, 2, version, 0, version, 0 }, 0, 8);
outputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)flags), 0, 2);
outputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)compression), 0, 2); // zipping method
outputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)usedCompression), 0, 2); // zipping method
outputStream.Write(DataConverter.LittleEndian.GetBytes(ModificationTime.DateTimeToDosTime()), 0, 4);
// zipping date and time

View File

@@ -383,6 +383,14 @@ namespace SharpCompress.Writers.Zip
originalStream.Position = (long)(entry.HeaderOffset + 6);
originalStream.WriteByte(0);
if (counting.Count == 0 && entry.Decompressed == 0)
{
// set compression to STORED for zero byte files (no compression data)
originalStream.Position = (long)(entry.HeaderOffset + 8);
originalStream.WriteByte(0);
originalStream.WriteByte(0);
}
originalStream.Position = (long)(entry.HeaderOffset + 14);
writer.WriteFooter(entry.Crc, compressedvalue, decompressedvalue);

View File

@@ -12,7 +12,7 @@
<ProjectReference Include="..\..\src\SharpCompress\SharpCompress.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="15.9.0" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="16.2.0" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.4.1" />
<PackageReference Include="xunit" Version="2.4.1" />
<PackageReference Include="Xunit.SkippableFact" Version="1.3.12" />

View File

@@ -5,6 +5,10 @@ using SharpCompress.Archives.Tar;
using SharpCompress.Common;
using SharpCompress.Writers;
using Xunit;
using System.Text;
using SharpCompress.Readers;
using SharpCompress.Writers.Tar;
using SharpCompress.Readers.Tar;
namespace SharpCompress.Test.Tar
{
@@ -26,7 +30,7 @@ namespace SharpCompress.Test.Tar
{
ArchiveFileRead("Tar.tar");
}
[Fact]
public void Tar_FileName_Exactly_100_Characters()
{
@@ -135,10 +139,17 @@ namespace SharpCompress.Test.Tar
string scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.tar");
string unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar");
// var aropt = new Ar
using (var archive = TarArchive.Create())
{
archive.AddAllFromDirectory(ORIGINAL_FILES_PATH);
archive.SaveTo(scratchPath, CompressionType.None);
var twopt = new TarWriterOptions(CompressionType.None, true);
twopt.ArchiveEncoding = new ArchiveEncoding()
{
Default = Encoding.GetEncoding(866)
};
archive.SaveTo(scratchPath, twopt);
}
CompareArchivesByPath(unmodified, scratchPath);
}
@@ -195,5 +206,40 @@ namespace SharpCompress.Test.Tar
Assert.True(archive.Type == ArchiveType.Tar);
}
}
[Theory]
[InlineData(10)]
[InlineData(128)]
public void Tar_Japanese_Name(int length)
{
using (var mstm = new MemoryStream())
{
var enc = new ArchiveEncoding()
{
Default = Encoding.UTF8
};
var twopt = new TarWriterOptions(CompressionType.None, true);
twopt.ArchiveEncoding = enc;
var fname = new string((char)0x3042, length);
using (var tw = new TarWriter(mstm, twopt))
using (var input = new MemoryStream(new byte[32]))
{
tw.Write(fname, input, null);
}
using (var inputMemory = new MemoryStream(mstm.ToArray()))
{
var tropt = new ReaderOptions()
{
ArchiveEncoding = enc
};
using (var tr = TarReader.Open(inputMemory, tropt))
{
while (tr.MoveToNextEntry())
{
Assert.Equal(fname, tr.Entry.Key);
}
}
}
}
}
}
}

View File

@@ -512,5 +512,48 @@ namespace SharpCompress.Test.Zip
}
}
}
}
[Fact]
public void Zip_BadLocalExtra_Read()
{
string zipPath = Path.Combine(TEST_ARCHIVES_PATH, "Zip.badlocalextra.zip");
using (ZipArchive za = ZipArchive.Open(zipPath))
{
var ex = Record.Exception(() =>
{
var firstEntry = za.Entries.First(x => x.Key == "first.txt");
var buffer = new byte[4096];
using (var memoryStream = new MemoryStream())
using (var firstStream = firstEntry.OpenEntryStream())
{
firstStream.CopyTo(memoryStream);
Assert.Equal(199, memoryStream.Length);
}
});
Assert.Null(ex);
}
}
[Fact]
public void Zip_NoCompression_DataDescriptors_Read()
{
string zipPath = Path.Combine(TEST_ARCHIVES_PATH, "Zip.none.datadescriptors.zip");
using (ZipArchive za = ZipArchive.Open(zipPath))
{
var firstEntry = za.Entries.First(x => x.Key == "first.txt");
var buffer = new byte[4096];
using (var memoryStream = new MemoryStream())
using (var firstStream = firstEntry.OpenEntryStream())
{
firstStream.CopyTo(memoryStream);
Assert.Equal(199, memoryStream.Length);
}
}
}
}
}

Binary file not shown.