mirror of
https://github.com/adamhathcock/sharpcompress.git
synced 2026-02-08 13:34:57 +00:00
Compare commits
50 Commits
copilot/fi
...
adam/multi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2e301becb4 | ||
|
|
2e7d4eb74b | ||
|
|
5b2030bb98 | ||
|
|
c1169539ea | ||
|
|
8d2463f575 | ||
|
|
af7e270b2d | ||
|
|
1984da6997 | ||
|
|
9433e06b93 | ||
|
|
a92aaa51d5 | ||
|
|
d41908adeb | ||
|
|
81ca15b567 | ||
|
|
b81d0fd730 | ||
|
|
3a1bb187e8 | ||
|
|
3fee14a070 | ||
|
|
5bf789ac65 | ||
|
|
be06049db3 | ||
|
|
a0435f6a60 | ||
|
|
2321e2c90b | ||
|
|
97e98d8629 | ||
|
|
d96e7362d2 | ||
|
|
7dd46fe5ed | ||
|
|
04c044cb2b | ||
|
|
cc10a12fbc | ||
|
|
8b0a1c699f | ||
|
|
15ca7c9807 | ||
|
|
2b4da7e39b | ||
|
|
31f81f38af | ||
|
|
72cf77b7c7 | ||
|
|
0fe48c647e | ||
|
|
7b06652bff | ||
|
|
434ce05416 | ||
|
|
0698031ed4 | ||
|
|
51237a34eb | ||
|
|
b8264a8131 | ||
|
|
cad923018e | ||
|
|
db94b49941 | ||
|
|
72d15d9cbf | ||
|
|
e0186eadc0 | ||
|
|
4cfa5b04af | ||
|
|
f2c54b1f8b | ||
|
|
d7d0bc6582 | ||
|
|
dd9dc2500b | ||
|
|
4efb109da8 | ||
|
|
4c61628078 | ||
|
|
fa1d7af22f | ||
|
|
a771ba3bc0 | ||
|
|
7dd0da5fd7 | ||
|
|
da87e45534 | ||
|
|
2ffaef5563 | ||
|
|
4536fddec2 |
@@ -3,7 +3,7 @@
|
||||
"isRoot": true,
|
||||
"tools": {
|
||||
"csharpier": {
|
||||
"version": "1.1.2",
|
||||
"version": "1.2.1",
|
||||
"commands": [
|
||||
"csharpier"
|
||||
],
|
||||
|
||||
2
.github/workflows/dotnetcore.yml
vendored
2
.github/workflows/dotnetcore.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
os: [windows-latest, ubuntu-latest]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-dotnet@v5
|
||||
with:
|
||||
dotnet-version: 8.0.x
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -11,6 +11,8 @@ TestResults/
|
||||
packages/*/
|
||||
project.lock.json
|
||||
tests/TestArchives/Scratch
|
||||
tests/TestArchives/*/Scratch
|
||||
tests/TestArchives/*/Scratch2
|
||||
.vs
|
||||
tools
|
||||
.vscode
|
||||
@@ -18,4 +20,3 @@ tools
|
||||
|
||||
.DS_Store
|
||||
*.snupkg
|
||||
/tests/TestArchives/6d23a38c-f064-4ef1-ad89-b942396f53b9/Scratch
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
<Project>
|
||||
<ItemGroup>
|
||||
<PackageVersion Include="Bullseye" Version="6.0.0" />
|
||||
<PackageVersion Include="AwesomeAssertions" Version="9.2.1" />
|
||||
<PackageVersion Include="AwesomeAssertions" Version="9.3.0" />
|
||||
<PackageVersion Include="Glob" Version="1.1.9" />
|
||||
<PackageVersion Include="JetBrains.Profiler.SelfApi" Version="2.5.14" />
|
||||
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="8.0.0" />
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# SharpCompress
|
||||
|
||||
SharpCompress is a compression library in pure C# for .NET Framework 4.62, .NET Standard 2.1, .NET 6.0 and NET 8.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip, unzstd with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
|
||||
SharpCompress is a compression library in pure C# for .NET Framework 4.62, .NET Standard 2.1, .NET 6.0 and NET 8.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip, unzstd, unarc and unarj with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
|
||||
|
||||
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).
|
||||
|
||||
|
||||
@@ -172,4 +172,9 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
|
||||
return Entries.All(x => x.IsComplete);
|
||||
}
|
||||
}
|
||||
|
||||
public virtual bool IsMultiVolume =>
|
||||
_sourceStream?.Files.Count > 1 || _sourceStream?.Streams.Count > 1;
|
||||
|
||||
public virtual bool SupportsMultiThreading => false;
|
||||
}
|
||||
|
||||
@@ -45,4 +45,14 @@ public interface IArchive : IDisposable
|
||||
/// The total size of the files as uncompressed in the archive.
|
||||
/// </summary>
|
||||
long TotalUncompressSize { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Is the archive part of a multi-volume set.
|
||||
/// </summary>
|
||||
bool IsMultiVolume { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Does the archive support multi-threaded extraction.
|
||||
/// </summary>
|
||||
bool SupportsMultiThreading { get; }
|
||||
}
|
||||
|
||||
@@ -88,7 +88,7 @@ public static class IArchiveEntryExtensions
|
||||
entry,
|
||||
destinationDirectory,
|
||||
options,
|
||||
(x, opt) => entry.WriteToFileAsync(x, opt, cancellationToken),
|
||||
entry.WriteToFileAsync,
|
||||
cancellationToken
|
||||
);
|
||||
|
||||
@@ -124,10 +124,11 @@ public static class IArchiveEntryExtensions
|
||||
entry,
|
||||
destinationFileName,
|
||||
options,
|
||||
async (x, fm) =>
|
||||
async (x, fm, ct) =>
|
||||
{
|
||||
using var fs = File.Open(destinationFileName, fm);
|
||||
await entry.WriteToAsync(fs, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
await entry.WriteToAsync(fs, ct).ConfigureAwait(false);
|
||||
},
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
|
||||
@@ -74,7 +74,7 @@ public static class IArchiveExtensions
|
||||
}
|
||||
|
||||
// Write file
|
||||
using var fs = File.Create(path);
|
||||
using var fs = File.OpenWrite(path);
|
||||
entry.WriteTo(fs);
|
||||
|
||||
// Update progress
|
||||
|
||||
@@ -1,39 +0,0 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.ObjectModel;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using SharpCompress.Common.Rar;
|
||||
using SharpCompress.Common.Rar.Headers;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives.Rar;
|
||||
|
||||
/// <summary>
|
||||
/// A rar part based on a FileInfo object
|
||||
/// </summary>
|
||||
internal class FileInfoRarArchiveVolume : RarVolume
|
||||
{
|
||||
internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options, int index)
|
||||
: base(StreamingMode.Seekable, fileInfo.OpenRead(), FixOptions(options), index)
|
||||
{
|
||||
FileInfo = fileInfo;
|
||||
FileParts = GetVolumeFileParts().ToArray().ToReadOnly();
|
||||
}
|
||||
|
||||
private static ReaderOptions FixOptions(ReaderOptions options)
|
||||
{
|
||||
//make sure we're closing streams with fileinfo
|
||||
options.LeaveStreamOpen = false;
|
||||
return options;
|
||||
}
|
||||
|
||||
internal ReadOnlyCollection<RarFilePart> FileParts { get; }
|
||||
|
||||
internal FileInfo FileInfo { get; }
|
||||
|
||||
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader) =>
|
||||
new FileInfoRarFilePart(this, ReaderOptions.Password, markHeader, fileHeader, FileInfo);
|
||||
|
||||
internal override IEnumerable<RarFilePart> ReadFileParts() => FileParts;
|
||||
}
|
||||
@@ -1,21 +0,0 @@
|
||||
using System.IO;
|
||||
using SharpCompress.Common.Rar.Headers;
|
||||
|
||||
namespace SharpCompress.Archives.Rar;
|
||||
|
||||
internal sealed class FileInfoRarFilePart : SeekableFilePart
|
||||
{
|
||||
internal FileInfoRarFilePart(
|
||||
FileInfoRarArchiveVolume volume,
|
||||
string? password,
|
||||
MarkHeader mh,
|
||||
FileHeader fh,
|
||||
FileInfo fi
|
||||
)
|
||||
: base(mh, fh, volume.Index, volume.Stream, password) => FileInfo = fi;
|
||||
|
||||
internal FileInfo FileInfo { get; }
|
||||
|
||||
internal override string FilePartName =>
|
||||
"Rar File: " + FileInfo.FullName + " File Entry: " + FileHeader.FileName;
|
||||
}
|
||||
@@ -47,9 +47,9 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
{
|
||||
sourceStream.LoadAllParts(); //request all streams
|
||||
var streams = sourceStream.Streams.ToArray();
|
||||
var i = 0;
|
||||
if (streams.Length > 1 && IsRarFile(streams[1], ReaderOptions)) //test part 2 - true = multipart not split
|
||||
{
|
||||
var i = 0;
|
||||
sourceStream.IsVolumes = true;
|
||||
streams[1].Position = 0;
|
||||
sourceStream.Position = 0;
|
||||
@@ -57,12 +57,18 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
return sourceStream.Streams.Select(a => new StreamRarArchiveVolume(
|
||||
a,
|
||||
ReaderOptions,
|
||||
i++
|
||||
i++,
|
||||
IsMultiVolume
|
||||
));
|
||||
}
|
||||
|
||||
//split mode or single file
|
||||
return new StreamRarArchiveVolume(sourceStream, ReaderOptions, i++).AsEnumerable();
|
||||
return new StreamRarArchiveVolume(
|
||||
sourceStream,
|
||||
ReaderOptions,
|
||||
0,
|
||||
IsMultiVolume
|
||||
).AsEnumerable();
|
||||
}
|
||||
|
||||
protected override IReader CreateReaderForSolidExtraction()
|
||||
@@ -83,6 +89,7 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
}
|
||||
|
||||
public override bool IsSolid => Volumes.First().IsSolidArchive;
|
||||
public override bool SupportsMultiThreading => !IsMultiVolume && !IsSolid;
|
||||
|
||||
public virtual int MinVersion => Volumes.First().MinVersion;
|
||||
public virtual int MaxVersion => Volumes.First().MaxVersion;
|
||||
|
||||
@@ -134,4 +134,6 @@ public class RarArchiveEntry : RarEntry, IArchiveEntry
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public override bool SupportsMultiThreading => Parts.Single().SupportsMultiThreading;
|
||||
}
|
||||
|
||||
@@ -1,25 +1,29 @@
|
||||
using System.IO;
|
||||
using SharpCompress.Common.Rar;
|
||||
using SharpCompress.Common.Rar.Headers;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Archives.Rar;
|
||||
|
||||
internal class SeekableFilePart : RarFilePart
|
||||
internal class SeekableRarFilePart : RarFilePart
|
||||
{
|
||||
private readonly Stream _stream;
|
||||
private readonly string? _password;
|
||||
private readonly bool _isMultiVolume;
|
||||
|
||||
internal SeekableFilePart(
|
||||
internal SeekableRarFilePart(
|
||||
MarkHeader mh,
|
||||
FileHeader fh,
|
||||
int index,
|
||||
Stream stream,
|
||||
string? password
|
||||
string? password,
|
||||
bool isMultiVolume
|
||||
)
|
||||
: base(mh, fh, index)
|
||||
{
|
||||
_stream = stream;
|
||||
_password = password;
|
||||
_isMultiVolume = isMultiVolume;
|
||||
}
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
@@ -42,4 +46,7 @@ internal class SeekableFilePart : RarFilePart
|
||||
}
|
||||
|
||||
internal override string FilePartName => "Unknown Stream - File Entry: " + FileHeader.FileName;
|
||||
|
||||
public override bool SupportsMultiThreading =>
|
||||
!_isMultiVolume && _stream is SourceStream ss && ss.IsFileMode && ss.Files.Count == 1;
|
||||
}
|
||||
@@ -9,11 +9,28 @@ namespace SharpCompress.Archives.Rar;
|
||||
|
||||
internal class StreamRarArchiveVolume : RarVolume
|
||||
{
|
||||
internal StreamRarArchiveVolume(Stream stream, ReaderOptions options, int index)
|
||||
: base(StreamingMode.Seekable, stream, options, index) { }
|
||||
private readonly bool _isMultiVolume;
|
||||
|
||||
internal StreamRarArchiveVolume(
|
||||
Stream stream,
|
||||
ReaderOptions options,
|
||||
int index,
|
||||
bool isMultiVolume
|
||||
)
|
||||
: base(StreamingMode.Seekable, stream, options, index)
|
||||
{
|
||||
_isMultiVolume = isMultiVolume;
|
||||
}
|
||||
|
||||
internal override IEnumerable<RarFilePart> ReadFileParts() => GetVolumeFileParts();
|
||||
|
||||
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader) =>
|
||||
new SeekableFilePart(markHeader, fileHeader, Index, Stream, ReaderOptions.Password);
|
||||
new SeekableRarFilePart(
|
||||
markHeader,
|
||||
fileHeader,
|
||||
Index,
|
||||
Stream,
|
||||
ReaderOptions.Password,
|
||||
_isMultiVolume
|
||||
);
|
||||
}
|
||||
|
||||
@@ -283,7 +283,12 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
|
||||
yield return new ZipArchiveEntry(
|
||||
this,
|
||||
new SeekableZipFilePart(headerFactory.NotNull(), deh, s)
|
||||
new SeekableZipFilePart(
|
||||
headerFactory.NotNull(),
|
||||
deh,
|
||||
s,
|
||||
IsMultiVolume
|
||||
)
|
||||
);
|
||||
}
|
||||
break;
|
||||
@@ -385,4 +390,6 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
((IStreamStack)stream).StackSeek(0);
|
||||
return ZipReader.Open(stream, ReaderOptions, Entries);
|
||||
}
|
||||
|
||||
public override bool SupportsMultiThreading => !IsMultiVolume;
|
||||
}
|
||||
|
||||
@@ -23,5 +23,7 @@ public class ZipArchiveEntry : ZipEntry, IArchiveEntry
|
||||
|
||||
public bool IsComplete => true;
|
||||
|
||||
public override bool SupportsMultiThreading => Parts.Single().SupportsMultiThreading;
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
@@ -57,7 +57,7 @@ namespace SharpCompress.Common.Arc
|
||||
return value switch
|
||||
{
|
||||
1 or 2 => CompressionType.None,
|
||||
3 => CompressionType.RLE90,
|
||||
3 => CompressionType.Packed,
|
||||
4 => CompressionType.Squeezed,
|
||||
5 or 6 or 7 or 8 => CompressionType.Crunched,
|
||||
9 => CompressionType.Squashed,
|
||||
|
||||
@@ -44,7 +44,7 @@ namespace SharpCompress.Common.Arc
|
||||
Header.CompressedSize
|
||||
);
|
||||
break;
|
||||
case CompressionType.RLE90:
|
||||
case CompressionType.Packed:
|
||||
compressedStream = new RunLength90Stream(
|
||||
_stream,
|
||||
(int)Header.CompressedSize
|
||||
@@ -54,6 +54,14 @@ namespace SharpCompress.Common.Arc
|
||||
compressedStream = new SqueezeStream(_stream, (int)Header.CompressedSize);
|
||||
break;
|
||||
case CompressionType.Crunched:
|
||||
if (Header.OriginalSize > 128 * 1024)
|
||||
{
|
||||
throw new NotSupportedException(
|
||||
"CompressionMethod: "
|
||||
+ Header.CompressionMethod
|
||||
+ " with size > 128KB"
|
||||
);
|
||||
}
|
||||
compressedStream = new ArcLzwStream(
|
||||
_stream,
|
||||
(int)Header.CompressedSize,
|
||||
|
||||
@@ -38,6 +38,22 @@ namespace SharpCompress.Common.Arj
|
||||
Header.CompressedSize
|
||||
);
|
||||
break;
|
||||
case CompressionMethod.CompressedMost:
|
||||
case CompressionMethod.Compressed:
|
||||
case CompressionMethod.CompressedFaster:
|
||||
if (Header.OriginalSize > 128 * 1024)
|
||||
{
|
||||
throw new NotSupportedException(
|
||||
"CompressionMethod: "
|
||||
+ Header.CompressionMethod
|
||||
+ " with size > 128KB"
|
||||
);
|
||||
}
|
||||
compressedStream = new LhaStream<Lh7DecoderCfg>(
|
||||
_stream,
|
||||
(int)Header.OriginalSize
|
||||
);
|
||||
break;
|
||||
case CompressionMethod.CompressedFastest:
|
||||
compressedStream = new LHDecoderStream(_stream, (int)Header.OriginalSize);
|
||||
break;
|
||||
|
||||
@@ -23,7 +23,7 @@ public enum CompressionType
|
||||
Reduce4,
|
||||
Explode,
|
||||
Squeezed,
|
||||
RLE90,
|
||||
Packed,
|
||||
Crunched,
|
||||
Squashed,
|
||||
Crushed,
|
||||
|
||||
@@ -87,4 +87,5 @@ public abstract class Entry : IEntry
|
||||
/// Entry file attribute.
|
||||
/// </summary>
|
||||
public virtual int? Attrib => throw new NotImplementedException();
|
||||
public virtual bool SupportsMultiThreading => false;
|
||||
}
|
||||
|
||||
@@ -128,7 +128,7 @@ internal static class ExtractionMethods
|
||||
IEntry entry,
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options,
|
||||
Func<string, ExtractionOptions?, Task> writeAsync,
|
||||
Func<string, ExtractionOptions?, CancellationToken, Task> writeAsync,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
@@ -189,7 +189,7 @@ internal static class ExtractionMethods
|
||||
"Entry is trying to write a file outside of the destination directory."
|
||||
);
|
||||
}
|
||||
await writeAsync(destinationFileName, options).ConfigureAwait(false);
|
||||
await writeAsync(destinationFileName, options, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
|
||||
{
|
||||
@@ -201,7 +201,7 @@ internal static class ExtractionMethods
|
||||
IEntry entry,
|
||||
string destinationFileName,
|
||||
ExtractionOptions? options,
|
||||
Func<string, FileMode, Task> openAndWriteAsync,
|
||||
Func<string, FileMode, CancellationToken, Task> openAndWriteAsync,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
@@ -225,7 +225,8 @@ internal static class ExtractionMethods
|
||||
fm = FileMode.CreateNew;
|
||||
}
|
||||
|
||||
await openAndWriteAsync(destinationFileName, fm).ConfigureAwait(false);
|
||||
await openAndWriteAsync(destinationFileName, fm, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
entry.PreserveExtractionOptions(destinationFileName, options);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,4 +14,6 @@ public abstract class FilePart
|
||||
internal abstract Stream? GetCompressedStream();
|
||||
internal abstract Stream? GetRawStream();
|
||||
internal bool Skipped { get; set; }
|
||||
|
||||
public virtual bool SupportsMultiThreading => false;
|
||||
}
|
||||
|
||||
@@ -21,4 +21,5 @@ public interface IEntry
|
||||
DateTime? LastModifiedTime { get; }
|
||||
long Size { get; }
|
||||
int? Attrib { get; }
|
||||
bool SupportsMultiThreading { get; }
|
||||
}
|
||||
|
||||
@@ -25,6 +25,10 @@ internal sealed class TarHeader
|
||||
|
||||
internal const int BLOCK_SIZE = 512;
|
||||
|
||||
// Maximum size for long name/link headers to prevent memory exhaustion attacks
|
||||
// This is generous enough for most real-world scenarios (32KB)
|
||||
private const int MAX_LONG_NAME_SIZE = 32768;
|
||||
|
||||
internal void Write(Stream output)
|
||||
{
|
||||
var buffer = new byte[BLOCK_SIZE];
|
||||
@@ -186,6 +190,15 @@ internal sealed class TarHeader
|
||||
private string ReadLongName(BinaryReader reader, byte[] buffer)
|
||||
{
|
||||
var size = ReadSize(buffer);
|
||||
|
||||
// Validate size to prevent memory exhaustion from malformed headers
|
||||
if (size < 0 || size > MAX_LONG_NAME_SIZE)
|
||||
{
|
||||
throw new InvalidFormatException(
|
||||
$"Long name size {size} is invalid or exceeds maximum allowed size of {MAX_LONG_NAME_SIZE} bytes"
|
||||
);
|
||||
}
|
||||
|
||||
var nameLength = (int)size;
|
||||
var nameBytes = reader.ReadBytes(nameLength);
|
||||
var remainingBytesToRead = BLOCK_SIZE - (nameLength % BLOCK_SIZE);
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using System.IO;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Common.Zip;
|
||||
|
||||
@@ -7,13 +8,19 @@ internal class SeekableZipFilePart : ZipFilePart
|
||||
{
|
||||
private bool _isLocalHeaderLoaded;
|
||||
private readonly SeekableZipHeaderFactory _headerFactory;
|
||||
private readonly bool _isMultiVolume;
|
||||
|
||||
internal SeekableZipFilePart(
|
||||
SeekableZipHeaderFactory headerFactory,
|
||||
DirectoryEntryHeader header,
|
||||
Stream stream
|
||||
Stream stream,
|
||||
bool isMultiVolume
|
||||
)
|
||||
: base(header, stream) => _headerFactory = headerFactory;
|
||||
: base(header, stream)
|
||||
{
|
||||
_headerFactory = headerFactory;
|
||||
_isMultiVolume = isMultiVolume;
|
||||
}
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
{
|
||||
@@ -30,8 +37,20 @@ internal class SeekableZipFilePart : ZipFilePart
|
||||
|
||||
protected override Stream CreateBaseStream()
|
||||
{
|
||||
if (!_isMultiVolume && BaseStream is SourceStream ss)
|
||||
{
|
||||
if (ss.IsFileMode && ss.Files.Count == 1)
|
||||
{
|
||||
var fileStream = ss.CurrentFile.OpenRead();
|
||||
fileStream.Position = Header.DataStartPosition.NotNull();
|
||||
return fileStream;
|
||||
}
|
||||
}
|
||||
BaseStream.Position = Header.DataStartPosition.NotNull();
|
||||
|
||||
return BaseStream;
|
||||
}
|
||||
|
||||
public override bool SupportsMultiThreading =>
|
||||
!_isMultiVolume && BaseStream is SourceStream ss && ss.IsFileMode && ss.Files.Count == 1;
|
||||
}
|
||||
|
||||
@@ -4,56 +4,68 @@ using System.IO;
|
||||
namespace SharpCompress.Compressors.Arj
|
||||
{
|
||||
[CLSCompliant(true)]
|
||||
public sealed class BitReader
|
||||
public class BitReader
|
||||
{
|
||||
private readonly Stream _stream;
|
||||
private int _bitBuffer;
|
||||
private int _bitsRemaining;
|
||||
private bool _disposed;
|
||||
private readonly Stream _input;
|
||||
private int _bitBuffer; // currently buffered bits
|
||||
private int _bitCount; // number of bits in buffer
|
||||
|
||||
public BitReader(Stream input)
|
||||
{
|
||||
_stream = input ?? throw new ArgumentNullException(nameof(input));
|
||||
if (!input.CanRead)
|
||||
throw new ArgumentException("Stream must be readable.", nameof(input));
|
||||
_input = input ?? throw new ArgumentNullException(nameof(input));
|
||||
_bitBuffer = 0;
|
||||
_bitCount = 0;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reads a single bit from the stream. Returns 0 or 1.
|
||||
/// </summary>
|
||||
public int ReadBit()
|
||||
{
|
||||
if (_bitCount == 0)
|
||||
{
|
||||
int nextByte = _input.ReadByte();
|
||||
if (nextByte < 0)
|
||||
{
|
||||
throw new EndOfStreamException("No more data available in BitReader.");
|
||||
}
|
||||
|
||||
_bitBuffer = nextByte;
|
||||
_bitCount = 8;
|
||||
}
|
||||
|
||||
int bit = (_bitBuffer >> (_bitCount - 1)) & 1;
|
||||
_bitCount--;
|
||||
return bit;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reads n bits (up to 32) from the stream.
|
||||
/// </summary>
|
||||
public int ReadBits(int count)
|
||||
{
|
||||
if (_disposed)
|
||||
throw new ObjectDisposedException(nameof(BitReader));
|
||||
|
||||
if (count <= 0 || count > 32)
|
||||
if (count < 0 || count > 32)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(
|
||||
nameof(count),
|
||||
"Bit count must be between 1 and 32."
|
||||
"Count must be between 0 and 32."
|
||||
);
|
||||
}
|
||||
|
||||
int result = 0;
|
||||
for (int i = 0; i < count; i++)
|
||||
{
|
||||
if (_bitsRemaining == 0)
|
||||
{
|
||||
int nextByte = _stream.ReadByte();
|
||||
if (nextByte == -1)
|
||||
throw new EndOfStreamException();
|
||||
|
||||
_bitBuffer = nextByte;
|
||||
_bitsRemaining = 8;
|
||||
}
|
||||
|
||||
// hoogste bit eerst
|
||||
result = (result << 1) | ((_bitBuffer >> 7) & 1);
|
||||
_bitBuffer <<= 1;
|
||||
_bitsRemaining--;
|
||||
result = (result << 1) | ReadBit();
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Resets any buffered bits.
|
||||
/// </summary>
|
||||
public void AlignToByte()
|
||||
{
|
||||
_bitsRemaining = 0;
|
||||
_bitCount = 0;
|
||||
_bitBuffer = 0;
|
||||
}
|
||||
}
|
||||
|
||||
43
src/SharpCompress/Compressors/Arj/HistoryIterator.cs
Normal file
43
src/SharpCompress/Compressors/Arj/HistoryIterator.cs
Normal file
@@ -0,0 +1,43 @@
|
||||
using System;
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace SharpCompress.Compressors.Arj
|
||||
{
|
||||
/// <summary>
|
||||
/// Iterator that reads & pushes values back into the ring buffer.
|
||||
/// </summary>
|
||||
public class HistoryIterator : IEnumerator<byte>
|
||||
{
|
||||
private int _index;
|
||||
private readonly IRingBuffer _ring;
|
||||
|
||||
public HistoryIterator(IRingBuffer ring, int startIndex)
|
||||
{
|
||||
_ring = ring;
|
||||
_index = startIndex;
|
||||
}
|
||||
|
||||
public bool MoveNext()
|
||||
{
|
||||
Current = _ring[_index];
|
||||
_index = unchecked(_index + 1);
|
||||
|
||||
// Push value back into the ring buffer
|
||||
_ring.Push(Current);
|
||||
|
||||
return true; // iterator is infinite
|
||||
}
|
||||
|
||||
public void Reset()
|
||||
{
|
||||
throw new NotSupportedException();
|
||||
}
|
||||
|
||||
public byte Current { get; private set; }
|
||||
|
||||
object IEnumerator.Current => Current;
|
||||
|
||||
public void Dispose() { }
|
||||
}
|
||||
}
|
||||
218
src/SharpCompress/Compressors/Arj/HuffmanTree.cs
Normal file
218
src/SharpCompress/Compressors/Arj/HuffmanTree.cs
Normal file
@@ -0,0 +1,218 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
|
||||
namespace SharpCompress.Compressors.Arj
|
||||
{
|
||||
[CLSCompliant(true)]
|
||||
public enum NodeType
|
||||
{
|
||||
Leaf,
|
||||
Branch,
|
||||
}
|
||||
|
||||
[CLSCompliant(true)]
|
||||
public sealed class TreeEntry
|
||||
{
|
||||
public readonly NodeType Type;
|
||||
public readonly int LeafValue;
|
||||
public readonly int BranchIndex;
|
||||
|
||||
public const int MAX_INDEX = 4096;
|
||||
|
||||
private TreeEntry(NodeType type, int leafValue, int branchIndex)
|
||||
{
|
||||
Type = type;
|
||||
LeafValue = leafValue;
|
||||
BranchIndex = branchIndex;
|
||||
}
|
||||
|
||||
public static TreeEntry Leaf(int value)
|
||||
{
|
||||
return new TreeEntry(NodeType.Leaf, value, -1);
|
||||
}
|
||||
|
||||
public static TreeEntry Branch(int index)
|
||||
{
|
||||
if (index >= MAX_INDEX)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(
|
||||
nameof(index),
|
||||
"Branch index exceeds MAX_INDEX"
|
||||
);
|
||||
}
|
||||
return new TreeEntry(NodeType.Branch, 0, index);
|
||||
}
|
||||
}
|
||||
|
||||
[CLSCompliant(true)]
|
||||
public sealed class HuffTree
|
||||
{
|
||||
private readonly List<TreeEntry> _tree;
|
||||
|
||||
public HuffTree(int capacity = 0)
|
||||
{
|
||||
_tree = new List<TreeEntry>(capacity);
|
||||
}
|
||||
|
||||
public void SetSingle(int value)
|
||||
{
|
||||
_tree.Clear();
|
||||
_tree.Add(TreeEntry.Leaf(value));
|
||||
}
|
||||
|
||||
public void BuildTree(byte[] lengths, int count)
|
||||
{
|
||||
if (lengths == null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(lengths));
|
||||
}
|
||||
|
||||
if (count < 0 || count > lengths.Length)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(count));
|
||||
}
|
||||
|
||||
if (count > TreeEntry.MAX_INDEX / 2)
|
||||
{
|
||||
throw new ArgumentException(
|
||||
$"Count exceeds maximum allowed: {TreeEntry.MAX_INDEX / 2}"
|
||||
);
|
||||
}
|
||||
byte[] slice = new byte[count];
|
||||
Array.Copy(lengths, slice, count);
|
||||
|
||||
BuildTree(slice);
|
||||
}
|
||||
|
||||
public void BuildTree(byte[] valueLengths)
|
||||
{
|
||||
if (valueLengths == null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(valueLengths));
|
||||
}
|
||||
|
||||
if (valueLengths.Length > TreeEntry.MAX_INDEX / 2)
|
||||
{
|
||||
throw new InvalidOperationException("Too many code lengths");
|
||||
}
|
||||
|
||||
_tree.Clear();
|
||||
|
||||
int maxAllocated = 1; // start with a single (root) node
|
||||
|
||||
for (byte currentLen = 1; ; currentLen++)
|
||||
{
|
||||
// add missing branches up to current limit
|
||||
int maxLimit = maxAllocated;
|
||||
|
||||
for (int i = _tree.Count; i < maxLimit; i++)
|
||||
{
|
||||
// TreeEntry.Branch may throw if index too large
|
||||
try
|
||||
{
|
||||
_tree.Add(TreeEntry.Branch(maxAllocated));
|
||||
}
|
||||
catch (ArgumentOutOfRangeException e)
|
||||
{
|
||||
_tree.Clear();
|
||||
throw new InvalidOperationException("Branch index exceeds limit", e);
|
||||
}
|
||||
|
||||
// each branch node allocates two children
|
||||
maxAllocated += 2;
|
||||
}
|
||||
|
||||
// fill tree with leaves found in the lengths table at the current length
|
||||
bool moreLeaves = false;
|
||||
|
||||
for (int value = 0; value < valueLengths.Length; value++)
|
||||
{
|
||||
byte len = valueLengths[value];
|
||||
if (len == currentLen)
|
||||
{
|
||||
_tree.Add(TreeEntry.Leaf(value));
|
||||
}
|
||||
else if (len > currentLen)
|
||||
{
|
||||
moreLeaves = true; // there are more leaves to process
|
||||
}
|
||||
}
|
||||
|
||||
// sanity check (too many leaves)
|
||||
if (_tree.Count > maxAllocated)
|
||||
{
|
||||
throw new InvalidOperationException("Too many leaves");
|
||||
}
|
||||
|
||||
// stop when no longer finding longer codes
|
||||
if (!moreLeaves)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// ensure tree is complete
|
||||
if (_tree.Count != maxAllocated)
|
||||
{
|
||||
throw new InvalidOperationException(
|
||||
$"Missing some leaves: tree count = {_tree.Count}, expected = {maxAllocated}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public int ReadEntry(BitReader reader)
|
||||
{
|
||||
if (_tree.Count == 0)
|
||||
{
|
||||
throw new InvalidOperationException("Tree not initialized");
|
||||
}
|
||||
|
||||
TreeEntry node = _tree[0];
|
||||
while (true)
|
||||
{
|
||||
if (node.Type == NodeType.Leaf)
|
||||
{
|
||||
return node.LeafValue;
|
||||
}
|
||||
|
||||
int bit = reader.ReadBit();
|
||||
int index = node.BranchIndex + bit;
|
||||
|
||||
if (index >= _tree.Count)
|
||||
{
|
||||
throw new InvalidOperationException("Invalid branch index during read");
|
||||
}
|
||||
|
||||
node = _tree[index];
|
||||
}
|
||||
}
|
||||
|
||||
public override string ToString()
|
||||
{
|
||||
var result = new StringBuilder();
|
||||
|
||||
void FormatStep(int index, string prefix)
|
||||
{
|
||||
var node = _tree[index];
|
||||
if (node.Type == NodeType.Leaf)
|
||||
{
|
||||
result.AppendLine($"{prefix} -> {node.LeafValue}");
|
||||
}
|
||||
else
|
||||
{
|
||||
FormatStep(node.BranchIndex, prefix + "0");
|
||||
FormatStep(node.BranchIndex + 1, prefix + "1");
|
||||
}
|
||||
}
|
||||
|
||||
if (_tree.Count > 0)
|
||||
{
|
||||
FormatStep(0, "");
|
||||
}
|
||||
|
||||
return result.ToString();
|
||||
}
|
||||
}
|
||||
}
|
||||
9
src/SharpCompress/Compressors/Arj/ILhaDecoderConfig.cs
Normal file
9
src/SharpCompress/Compressors/Arj/ILhaDecoderConfig.cs
Normal file
@@ -0,0 +1,9 @@
|
||||
namespace SharpCompress.Compressors.Arj
|
||||
{
|
||||
public interface ILhaDecoderConfig
|
||||
{
|
||||
int HistoryBits { get; }
|
||||
int OffsetBits { get; }
|
||||
RingBuffer RingBuffer { get; }
|
||||
}
|
||||
}
|
||||
17
src/SharpCompress/Compressors/Arj/IRingBuffer.cs
Normal file
17
src/SharpCompress/Compressors/Arj/IRingBuffer.cs
Normal file
@@ -0,0 +1,17 @@
|
||||
namespace SharpCompress.Compressors.Arj
|
||||
{
|
||||
public interface IRingBuffer
|
||||
{
|
||||
int BufferSize { get; }
|
||||
|
||||
int Cursor { get; }
|
||||
void SetCursor(int pos);
|
||||
|
||||
void Push(byte value);
|
||||
|
||||
HistoryIterator IterFromOffset(int offset);
|
||||
HistoryIterator IterFromPos(int pos);
|
||||
|
||||
byte this[int index] { get; }
|
||||
}
|
||||
}
|
||||
9
src/SharpCompress/Compressors/Arj/Lh5DecoderCfg.cs
Normal file
9
src/SharpCompress/Compressors/Arj/Lh5DecoderCfg.cs
Normal file
@@ -0,0 +1,9 @@
|
||||
namespace SharpCompress.Compressors.Arj
|
||||
{
|
||||
public class Lh5DecoderCfg : ILhaDecoderConfig
|
||||
{
|
||||
public int HistoryBits => 14;
|
||||
public int OffsetBits => 4;
|
||||
public RingBuffer RingBuffer { get; } = new RingBuffer(1 << 14);
|
||||
}
|
||||
}
|
||||
9
src/SharpCompress/Compressors/Arj/Lh7DecoderCfg.cs
Normal file
9
src/SharpCompress/Compressors/Arj/Lh7DecoderCfg.cs
Normal file
@@ -0,0 +1,9 @@
|
||||
namespace SharpCompress.Compressors.Arj
|
||||
{
|
||||
public class Lh7DecoderCfg : ILhaDecoderConfig
|
||||
{
|
||||
public int HistoryBits => 17;
|
||||
public int OffsetBits => 5;
|
||||
public RingBuffer RingBuffer { get; } = new RingBuffer(1 << 17);
|
||||
}
|
||||
}
|
||||
363
src/SharpCompress/Compressors/Arj/LhaStream.cs
Normal file
363
src/SharpCompress/Compressors/Arj/LhaStream.cs
Normal file
@@ -0,0 +1,363 @@
|
||||
using System;
|
||||
using System.Data;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Compressors.Arj
|
||||
{
|
||||
[CLSCompliant(true)]
|
||||
public sealed class LhaStream<C> : Stream, IStreamStack
|
||||
where C : ILhaDecoderConfig, new()
|
||||
{
|
||||
private readonly BitReader _bitReader;
|
||||
private readonly Stream _stream;
|
||||
|
||||
private readonly HuffTree _commandTree;
|
||||
private readonly HuffTree _offsetTree;
|
||||
private int _remainingCommands;
|
||||
private (int offset, int count)? _copyProgress;
|
||||
private readonly RingBuffer _ringBuffer;
|
||||
private readonly C _config = new C();
|
||||
|
||||
private const int NUM_COMMANDS = 510;
|
||||
private const int NUM_TEMP_CODELEN = 20;
|
||||
|
||||
private readonly int _originalSize;
|
||||
private int _producedBytes = 0;
|
||||
|
||||
#if DEBUG_STREAMS
|
||||
long IStreamStack.InstanceId { get; set; }
|
||||
#endif
|
||||
int IStreamStack.DefaultBufferSize { get; set; }
|
||||
|
||||
Stream IStreamStack.BaseStream() => _stream;
|
||||
|
||||
int IStreamStack.BufferSize
|
||||
{
|
||||
get => 0;
|
||||
set { }
|
||||
}
|
||||
int IStreamStack.BufferPosition
|
||||
{
|
||||
get => 0;
|
||||
set { }
|
||||
}
|
||||
|
||||
void IStreamStack.SetPosition(long position) { }
|
||||
|
||||
public LhaStream(Stream compressedStream, int originalSize)
|
||||
{
|
||||
_stream = compressedStream ?? throw new ArgumentNullException(nameof(compressedStream));
|
||||
_bitReader = new BitReader(compressedStream);
|
||||
_ringBuffer = _config.RingBuffer;
|
||||
_commandTree = new HuffTree(NUM_COMMANDS * 2);
|
||||
_offsetTree = new HuffTree(NUM_TEMP_CODELEN * 2);
|
||||
_remainingCommands = 0;
|
||||
_copyProgress = null;
|
||||
_originalSize = originalSize;
|
||||
}
|
||||
|
||||
public override bool CanRead => true;
|
||||
public override bool CanSeek => false;
|
||||
public override bool CanWrite => false;
|
||||
public override long Length => throw new NotSupportedException();
|
||||
public override long Position
|
||||
{
|
||||
get => throw new NotSupportedException();
|
||||
set => throw new NotSupportedException();
|
||||
}
|
||||
|
||||
public override void Flush() { }
|
||||
|
||||
public override long Seek(long offset, SeekOrigin origin) =>
|
||||
throw new NotSupportedException();
|
||||
|
||||
public override void SetLength(long value) => throw new NotSupportedException();
|
||||
|
||||
public override void Write(byte[] buffer, int offset, int count) =>
|
||||
throw new NotSupportedException();
|
||||
|
||||
public override int Read(byte[] buffer, int offset, int count)
|
||||
{
|
||||
if (buffer == null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(buffer));
|
||||
}
|
||||
if (offset < 0 || count < 0 || (offset + count) > buffer.Length)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException();
|
||||
}
|
||||
|
||||
if (_producedBytes >= _originalSize)
|
||||
{
|
||||
return 0; // EOF
|
||||
}
|
||||
if (count == 0)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
int bytesRead = FillBuffer(buffer);
|
||||
return bytesRead;
|
||||
}
|
||||
|
||||
private byte ReadCodeLength()
|
||||
{
|
||||
byte len = (byte)_bitReader.ReadBits(3);
|
||||
if (len == 7)
|
||||
{
|
||||
while (_bitReader.ReadBit() != 0)
|
||||
{
|
||||
len++;
|
||||
if (len > 255)
|
||||
{
|
||||
throw new InvalidOperationException("Code length overflow");
|
||||
}
|
||||
}
|
||||
}
|
||||
return len;
|
||||
}
|
||||
|
||||
private int ReadCodeSkip(int skipRange)
|
||||
{
|
||||
int bits;
|
||||
int increment;
|
||||
|
||||
switch (skipRange)
|
||||
{
|
||||
case 0:
|
||||
return 1;
|
||||
case 1:
|
||||
bits = 4;
|
||||
increment = 3; // 3..=18
|
||||
break;
|
||||
default:
|
||||
bits = 9;
|
||||
increment = 20; // 20..=531
|
||||
break;
|
||||
}
|
||||
|
||||
int skip = _bitReader.ReadBits(bits);
|
||||
return skip + increment;
|
||||
}
|
||||
|
||||
private void ReadTempTree()
|
||||
{
|
||||
byte[] codeLengths = new byte[NUM_TEMP_CODELEN];
|
||||
|
||||
// number of codes to read (5 bits)
|
||||
int numCodes = _bitReader.ReadBits(5);
|
||||
|
||||
// single code only
|
||||
if (numCodes == 0)
|
||||
{
|
||||
int code = _bitReader.ReadBits(5);
|
||||
_offsetTree.SetSingle((byte)code);
|
||||
return;
|
||||
}
|
||||
|
||||
if (numCodes > NUM_TEMP_CODELEN)
|
||||
{
|
||||
throw new Exception("temporary codelen table has invalid size");
|
||||
}
|
||||
|
||||
// read actual lengths
|
||||
int count = Math.Min(3, numCodes);
|
||||
for (int i = 0; i < count; i++)
|
||||
{
|
||||
codeLengths[i] = (byte)ReadCodeLength();
|
||||
}
|
||||
|
||||
// 2-bit skip value follows
|
||||
int skip = _bitReader.ReadBits(2);
|
||||
|
||||
if (3 + skip > numCodes)
|
||||
{
|
||||
throw new Exception("temporary codelen table has invalid size");
|
||||
}
|
||||
|
||||
for (int i = 3 + skip; i < numCodes; i++)
|
||||
{
|
||||
codeLengths[i] = (byte)ReadCodeLength();
|
||||
}
|
||||
|
||||
_offsetTree.BuildTree(codeLengths, numCodes);
|
||||
}
|
||||
|
||||
private void ReadCommandTree()
|
||||
{
|
||||
byte[] codeLengths = new byte[NUM_COMMANDS];
|
||||
|
||||
// number of codes to read (9 bits)
|
||||
int numCodes = _bitReader.ReadBits(9);
|
||||
|
||||
// single code only
|
||||
if (numCodes == 0)
|
||||
{
|
||||
int code = _bitReader.ReadBits(9);
|
||||
_commandTree.SetSingle((ushort)code);
|
||||
return;
|
||||
}
|
||||
|
||||
if (numCodes > NUM_COMMANDS)
|
||||
{
|
||||
throw new Exception("commands codelen table has invalid size");
|
||||
}
|
||||
|
||||
int index = 0;
|
||||
while (index < numCodes)
|
||||
{
|
||||
for (int n = 0; n < numCodes - index; n++)
|
||||
{
|
||||
int code = _offsetTree.ReadEntry(_bitReader);
|
||||
|
||||
if (code >= 0 && code <= 2) // skip range
|
||||
{
|
||||
int skipCount = ReadCodeSkip(code);
|
||||
index += n + skipCount;
|
||||
goto outerLoop;
|
||||
}
|
||||
else
|
||||
{
|
||||
codeLengths[index + n] = (byte)(code - 2);
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
outerLoop:
|
||||
;
|
||||
}
|
||||
|
||||
_commandTree.BuildTree(codeLengths, numCodes);
|
||||
}
|
||||
|
||||
private void ReadOffsetTree()
|
||||
{
|
||||
int numCodes = _bitReader.ReadBits(_config.OffsetBits);
|
||||
if (numCodes == 0)
|
||||
{
|
||||
int code = _bitReader.ReadBits(_config.OffsetBits);
|
||||
_offsetTree.SetSingle(code);
|
||||
return;
|
||||
}
|
||||
|
||||
if (numCodes > _config.HistoryBits)
|
||||
{
|
||||
throw new InvalidDataException("Offset code table too large");
|
||||
}
|
||||
|
||||
byte[] codeLengths = new byte[NUM_TEMP_CODELEN];
|
||||
for (int i = 0; i < numCodes; i++)
|
||||
{
|
||||
codeLengths[i] = (byte)ReadCodeLength();
|
||||
}
|
||||
|
||||
_offsetTree.BuildTree(codeLengths, numCodes);
|
||||
}
|
||||
|
||||
private void BeginNewBlock()
|
||||
{
|
||||
ReadTempTree();
|
||||
ReadCommandTree();
|
||||
ReadOffsetTree();
|
||||
}
|
||||
|
||||
private int ReadCommand() => _commandTree.ReadEntry(_bitReader);
|
||||
|
||||
private int ReadOffset()
|
||||
{
|
||||
int bits = _offsetTree.ReadEntry(_bitReader);
|
||||
if (bits <= 1)
|
||||
{
|
||||
return bits;
|
||||
}
|
||||
|
||||
int res = _bitReader.ReadBits(bits - 1);
|
||||
return res | (1 << (bits - 1));
|
||||
}
|
||||
|
||||
private int CopyFromHistory(byte[] target, int targetIndex, int offset, int count)
|
||||
{
|
||||
var historyIter = _ringBuffer.IterFromOffset(offset);
|
||||
int copied = 0;
|
||||
|
||||
while (
|
||||
copied < count && historyIter.MoveNext() && (targetIndex + copied) < target.Length
|
||||
)
|
||||
{
|
||||
target[targetIndex + copied] = historyIter.Current;
|
||||
copied++;
|
||||
}
|
||||
|
||||
if (copied < count)
|
||||
{
|
||||
_copyProgress = (offset, count - copied);
|
||||
}
|
||||
|
||||
return copied;
|
||||
}
|
||||
|
||||
public int FillBuffer(byte[] buffer)
|
||||
{
|
||||
int bufLen = buffer.Length;
|
||||
int bufIndex = 0;
|
||||
|
||||
// stop when we reached original size
|
||||
if (_producedBytes >= _originalSize)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
// calculate limit, so that we don't go over the original size
|
||||
int remaining = (int)Math.Min(bufLen, _originalSize - _producedBytes);
|
||||
|
||||
while (bufIndex < remaining)
|
||||
{
|
||||
if (_copyProgress.HasValue)
|
||||
{
|
||||
var (offset, count) = _copyProgress.Value;
|
||||
int copied = CopyFromHistory(
|
||||
buffer,
|
||||
bufIndex,
|
||||
offset,
|
||||
(int)Math.Min(count, remaining - bufIndex)
|
||||
);
|
||||
bufIndex += copied;
|
||||
_copyProgress = null;
|
||||
}
|
||||
|
||||
if (_remainingCommands == 0)
|
||||
{
|
||||
_remainingCommands = _bitReader.ReadBits(16);
|
||||
if (bufIndex + _remainingCommands > remaining)
|
||||
{
|
||||
break;
|
||||
}
|
||||
BeginNewBlock();
|
||||
}
|
||||
|
||||
_remainingCommands--;
|
||||
|
||||
int command = ReadCommand();
|
||||
|
||||
if (command >= 0 && command <= 0xFF)
|
||||
{
|
||||
byte value = (byte)command;
|
||||
buffer[bufIndex++] = value;
|
||||
_ringBuffer.Push(value);
|
||||
}
|
||||
else
|
||||
{
|
||||
int count = command - 0x100 + 3;
|
||||
int offset = ReadOffset();
|
||||
int copyCount = (int)Math.Min(count, remaining - bufIndex);
|
||||
bufIndex += CopyFromHistory(buffer, bufIndex, offset, copyCount);
|
||||
}
|
||||
}
|
||||
|
||||
_producedBytes += bufIndex;
|
||||
return bufIndex;
|
||||
}
|
||||
}
|
||||
}
|
||||
67
src/SharpCompress/Compressors/Arj/RingBuffer.cs
Normal file
67
src/SharpCompress/Compressors/Arj/RingBuffer.cs
Normal file
@@ -0,0 +1,67 @@
|
||||
using System;
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace SharpCompress.Compressors.Arj
|
||||
{
|
||||
/// <summary>
|
||||
/// A fixed-size ring buffer where N must be a power of two.
|
||||
/// </summary>
|
||||
public class RingBuffer : IRingBuffer
|
||||
{
|
||||
private readonly byte[] _buffer;
|
||||
private int _cursor;
|
||||
|
||||
public int BufferSize { get; }
|
||||
|
||||
public int Cursor => _cursor;
|
||||
|
||||
private readonly int _mask;
|
||||
|
||||
public RingBuffer(int size)
|
||||
{
|
||||
if ((size & (size - 1)) != 0)
|
||||
{
|
||||
throw new ArgumentException("RingArrayBuffer size must be a power of two");
|
||||
}
|
||||
|
||||
BufferSize = size;
|
||||
_buffer = new byte[size];
|
||||
_cursor = 0;
|
||||
_mask = size - 1;
|
||||
|
||||
// Fill with spaces
|
||||
for (int i = 0; i < size; i++)
|
||||
{
|
||||
_buffer[i] = (byte)' ';
|
||||
}
|
||||
}
|
||||
|
||||
public void SetCursor(int pos)
|
||||
{
|
||||
_cursor = pos & _mask;
|
||||
}
|
||||
|
||||
public void Push(byte value)
|
||||
{
|
||||
int index = _cursor;
|
||||
_buffer[index & _mask] = value;
|
||||
_cursor = (index + 1) & _mask;
|
||||
}
|
||||
|
||||
public byte this[int index] => _buffer[index & _mask];
|
||||
|
||||
public HistoryIterator IterFromOffset(int offset)
|
||||
{
|
||||
int masked = (offset & _mask) + 1;
|
||||
int startIndex = _cursor + BufferSize - masked;
|
||||
return new HistoryIterator(this, startIndex);
|
||||
}
|
||||
|
||||
public HistoryIterator IterFromPos(int pos)
|
||||
{
|
||||
int startIndex = pos & _mask;
|
||||
return new HistoryIterator(this, startIndex);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -62,10 +62,6 @@ internal sealed class MultiVolumeReadOnlyStream : Stream, IStreamStack
|
||||
base.Dispose(disposing);
|
||||
if (disposing)
|
||||
{
|
||||
#if DEBUG_STREAMS
|
||||
this.DebugDispose(typeof(MultiVolumeReadOnlyStream));
|
||||
#endif
|
||||
|
||||
if (filePartEnumerator != null)
|
||||
{
|
||||
filePartEnumerator.Dispose();
|
||||
|
||||
@@ -82,9 +82,6 @@ internal class RarStream : Stream, IStreamStack
|
||||
{
|
||||
if (disposing)
|
||||
{
|
||||
#if DEBUG_STREAMS
|
||||
this.DebugDispose(typeof(RarStream));
|
||||
#endif
|
||||
ArrayPool<byte>.Shared.Return(this.tmpBuffer);
|
||||
this.tmpBuffer = null;
|
||||
}
|
||||
|
||||
@@ -27,7 +27,7 @@ internal sealed partial class Unpack : BitInput, IRarUnpack
|
||||
if (!disposed)
|
||||
{
|
||||
base.Dispose();
|
||||
if (!externalWindow)
|
||||
if (!externalWindow && window is not null)
|
||||
{
|
||||
ArrayPool<byte>.Shared.Return(window);
|
||||
window = null;
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Compressors.Xz;
|
||||
|
||||
@@ -30,6 +32,28 @@ public static class BinaryUtils
|
||||
internal static uint ReadLittleEndianUInt32(this Stream stream) =>
|
||||
unchecked((uint)ReadLittleEndianInt32(stream));
|
||||
|
||||
public static async Task<int> ReadLittleEndianInt32Async(
|
||||
this Stream stream,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var bytes = new byte[4];
|
||||
var read = await stream.ReadFullyAsync(bytes, cancellationToken).ConfigureAwait(false);
|
||||
if (!read)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
return BinaryPrimitives.ReadInt32LittleEndian(bytes);
|
||||
}
|
||||
|
||||
internal static async Task<uint> ReadLittleEndianUInt32Async(
|
||||
this Stream stream,
|
||||
CancellationToken cancellationToken = default
|
||||
) =>
|
||||
unchecked(
|
||||
(uint)await ReadLittleEndianInt32Async(stream, cancellationToken).ConfigureAwait(false)
|
||||
);
|
||||
|
||||
internal static byte[] ToBigEndianBytes(this uint uint32)
|
||||
{
|
||||
var result = BitConverter.GetBytes(uint32);
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
|
||||
namespace SharpCompress.Compressors.Xz;
|
||||
@@ -39,4 +41,75 @@ internal static class MultiByteIntegers
|
||||
}
|
||||
return Output;
|
||||
}
|
||||
|
||||
public static async Task<ulong> ReadXZIntegerAsync(
|
||||
this BinaryReader reader,
|
||||
CancellationToken cancellationToken = default,
|
||||
int MaxBytes = 9
|
||||
)
|
||||
{
|
||||
if (MaxBytes <= 0)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(MaxBytes));
|
||||
}
|
||||
|
||||
if (MaxBytes > 9)
|
||||
{
|
||||
MaxBytes = 9;
|
||||
}
|
||||
|
||||
var LastByte = await ReadByteAsync(reader, cancellationToken).ConfigureAwait(false);
|
||||
var Output = (ulong)LastByte & 0x7F;
|
||||
|
||||
var i = 0;
|
||||
while ((LastByte & 0x80) != 0)
|
||||
{
|
||||
if (++i >= MaxBytes)
|
||||
{
|
||||
throw new InvalidFormatException();
|
||||
}
|
||||
|
||||
LastByte = await ReadByteAsync(reader, cancellationToken).ConfigureAwait(false);
|
||||
if (LastByte == 0)
|
||||
{
|
||||
throw new InvalidFormatException();
|
||||
}
|
||||
|
||||
Output |= ((ulong)(LastByte & 0x7F)) << (i * 7);
|
||||
}
|
||||
return Output;
|
||||
}
|
||||
|
||||
public static async Task<byte> ReadByteAsync(
|
||||
this BinaryReader reader,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var buffer = new byte[1];
|
||||
var bytesRead = await reader
|
||||
.BaseStream.ReadAsync(buffer, 0, 1, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (bytesRead != 1)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
return buffer[0];
|
||||
}
|
||||
|
||||
public static async Task<byte[]> ReadBytesAsync(
|
||||
this BinaryReader reader,
|
||||
int count,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var buffer = new byte[count];
|
||||
var bytesRead = await reader
|
||||
.BaseStream.ReadAsync(buffer, 0, count, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (bytesRead != count)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
return buffer;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,8 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Compressors.Xz.Filters;
|
||||
|
||||
@@ -72,6 +74,49 @@ public sealed class XZBlock : XZReadOnlyStream
|
||||
return bytesRead;
|
||||
}
|
||||
|
||||
public override async Task<int> ReadAsync(
|
||||
byte[] buffer,
|
||||
int offset,
|
||||
int count,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var bytesRead = 0;
|
||||
if (!HeaderIsLoaded)
|
||||
{
|
||||
await LoadHeaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
if (!_streamConnected)
|
||||
{
|
||||
ConnectStream();
|
||||
}
|
||||
|
||||
if (!_endOfStream)
|
||||
{
|
||||
bytesRead = await _decomStream
|
||||
.ReadAsync(buffer, offset, count, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
if (bytesRead != count)
|
||||
{
|
||||
_endOfStream = true;
|
||||
}
|
||||
|
||||
if (_endOfStream && !_paddingSkipped)
|
||||
{
|
||||
await SkipPaddingAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
if (_endOfStream && !_crcChecked)
|
||||
{
|
||||
await CheckCrcAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
return bytesRead;
|
||||
}
|
||||
|
||||
private void SkipPadding()
|
||||
{
|
||||
var bytes = (BaseStream.Position - _startPosition) % 4;
|
||||
@@ -87,6 +132,23 @@ public sealed class XZBlock : XZReadOnlyStream
|
||||
_paddingSkipped = true;
|
||||
}
|
||||
|
||||
private async Task SkipPaddingAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
var bytes = (BaseStream.Position - _startPosition) % 4;
|
||||
if (bytes > 0)
|
||||
{
|
||||
var paddingBytes = new byte[4 - bytes];
|
||||
await BaseStream
|
||||
.ReadAsync(paddingBytes, 0, paddingBytes.Length, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (paddingBytes.Any(b => b != 0))
|
||||
{
|
||||
throw new InvalidFormatException("Padding bytes were non-null");
|
||||
}
|
||||
}
|
||||
_paddingSkipped = true;
|
||||
}
|
||||
|
||||
private void CheckCrc()
|
||||
{
|
||||
var crc = new byte[_checkSize];
|
||||
@@ -96,6 +158,15 @@ public sealed class XZBlock : XZReadOnlyStream
|
||||
_crcChecked = true;
|
||||
}
|
||||
|
||||
private async Task CheckCrcAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
var crc = new byte[_checkSize];
|
||||
await BaseStream.ReadAsync(crc, 0, _checkSize, cancellationToken).ConfigureAwait(false);
|
||||
// Actually do a check (and read in the bytes
|
||||
// into the function throughout the stream read).
|
||||
_crcChecked = true;
|
||||
}
|
||||
|
||||
private void ConnectStream()
|
||||
{
|
||||
_decomStream = BaseStream;
|
||||
@@ -123,6 +194,21 @@ public sealed class XZBlock : XZReadOnlyStream
|
||||
HeaderIsLoaded = true;
|
||||
}
|
||||
|
||||
private async Task LoadHeaderAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
await ReadHeaderSizeAsync(cancellationToken).ConfigureAwait(false);
|
||||
var headerCache = await CacheHeaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
using (var cache = new MemoryStream(headerCache))
|
||||
using (var cachedReader = new BinaryReader(cache))
|
||||
{
|
||||
cachedReader.BaseStream.Position = 1; // skip the header size byte
|
||||
ReadBlockFlags(cachedReader);
|
||||
ReadFilters(cachedReader);
|
||||
}
|
||||
HeaderIsLoaded = true;
|
||||
}
|
||||
|
||||
private void ReadHeaderSize()
|
||||
{
|
||||
_blockHeaderSizeByte = (byte)BaseStream.ReadByte();
|
||||
@@ -132,6 +218,17 @@ public sealed class XZBlock : XZReadOnlyStream
|
||||
}
|
||||
}
|
||||
|
||||
private async Task ReadHeaderSizeAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
var buffer = new byte[1];
|
||||
await BaseStream.ReadAsync(buffer, 0, 1, cancellationToken).ConfigureAwait(false);
|
||||
_blockHeaderSizeByte = buffer[0];
|
||||
if (_blockHeaderSizeByte == 0)
|
||||
{
|
||||
throw new XZIndexMarkerReachedException();
|
||||
}
|
||||
}
|
||||
|
||||
private byte[] CacheHeader()
|
||||
{
|
||||
var blockHeaderWithoutCrc = new byte[BlockHeaderSize - 4];
|
||||
@@ -139,7 +236,7 @@ public sealed class XZBlock : XZReadOnlyStream
|
||||
var read = BaseStream.Read(blockHeaderWithoutCrc, 1, BlockHeaderSize - 5);
|
||||
if (read != BlockHeaderSize - 5)
|
||||
{
|
||||
throw new EndOfStreamException("Reached end of stream unexectedly");
|
||||
throw new EndOfStreamException("Reached end of stream unexpectedly");
|
||||
}
|
||||
|
||||
var crc = BaseStream.ReadLittleEndianUInt32();
|
||||
@@ -152,6 +249,30 @@ public sealed class XZBlock : XZReadOnlyStream
|
||||
return blockHeaderWithoutCrc;
|
||||
}
|
||||
|
||||
private async Task<byte[]> CacheHeaderAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
var blockHeaderWithoutCrc = new byte[BlockHeaderSize - 4];
|
||||
blockHeaderWithoutCrc[0] = _blockHeaderSizeByte;
|
||||
var read = await BaseStream
|
||||
.ReadAsync(blockHeaderWithoutCrc, 1, BlockHeaderSize - 5, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (read != BlockHeaderSize - 5)
|
||||
{
|
||||
throw new EndOfStreamException("Reached end of stream unexpectedly");
|
||||
}
|
||||
|
||||
var crc = await BaseStream
|
||||
.ReadLittleEndianUInt32Async(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
var calcCrc = Crc32.Compute(blockHeaderWithoutCrc);
|
||||
if (crc != calcCrc)
|
||||
{
|
||||
throw new InvalidFormatException("Block header corrupt");
|
||||
}
|
||||
|
||||
return blockHeaderWithoutCrc;
|
||||
}
|
||||
|
||||
private void ReadBlockFlags(BinaryReader reader)
|
||||
{
|
||||
var blockFlags = reader.ReadByte();
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.IO;
|
||||
|
||||
@@ -27,6 +29,16 @@ public class XZFooter
|
||||
return footer;
|
||||
}
|
||||
|
||||
public static async Task<XZFooter> FromStreamAsync(
|
||||
Stream stream,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var footer = new XZFooter(new BinaryReader(stream, Encoding.UTF8, true));
|
||||
await footer.ProcessAsync(cancellationToken).ConfigureAwait(false);
|
||||
return footer;
|
||||
}
|
||||
|
||||
public void Process()
|
||||
{
|
||||
var crc = _reader.ReadLittleEndianUInt32();
|
||||
@@ -49,4 +61,29 @@ public class XZFooter
|
||||
throw new InvalidFormatException("Magic footer missing");
|
||||
}
|
||||
}
|
||||
|
||||
public async Task ProcessAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
var crc = await _reader
|
||||
.BaseStream.ReadLittleEndianUInt32Async(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
var footerBytes = await _reader.ReadBytesAsync(6, cancellationToken).ConfigureAwait(false);
|
||||
var myCrc = Crc32.Compute(footerBytes);
|
||||
if (crc != myCrc)
|
||||
{
|
||||
throw new InvalidFormatException("Footer corrupt");
|
||||
}
|
||||
|
||||
using (var stream = new MemoryStream(footerBytes))
|
||||
using (var reader = new BinaryReader(stream))
|
||||
{
|
||||
BackwardSize = (reader.ReadLittleEndianUInt32() + 1) * 4;
|
||||
StreamFlags = reader.ReadBytes(2);
|
||||
}
|
||||
var magBy = await _reader.ReadBytesAsync(2, cancellationToken).ConfigureAwait(false);
|
||||
if (!magBy.AsSpan().SequenceEqual(_magicBytes))
|
||||
{
|
||||
throw new InvalidFormatException("Magic footer missing");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.IO;
|
||||
|
||||
@@ -23,12 +25,28 @@ public class XZHeader
|
||||
return header;
|
||||
}
|
||||
|
||||
public static async Task<XZHeader> FromStreamAsync(
|
||||
Stream stream,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var header = new XZHeader(new BinaryReader(stream, Encoding.UTF8, true));
|
||||
await header.ProcessAsync(cancellationToken).ConfigureAwait(false);
|
||||
return header;
|
||||
}
|
||||
|
||||
public void Process()
|
||||
{
|
||||
CheckMagicBytes(_reader.ReadBytes(6));
|
||||
ProcessStreamFlags();
|
||||
}
|
||||
|
||||
public async Task ProcessAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
CheckMagicBytes(await _reader.ReadBytesAsync(6, cancellationToken).ConfigureAwait(false));
|
||||
await ProcessStreamFlagsAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private void ProcessStreamFlags()
|
||||
{
|
||||
var streamFlags = _reader.ReadBytes(2);
|
||||
@@ -47,6 +65,26 @@ public class XZHeader
|
||||
}
|
||||
}
|
||||
|
||||
private async Task ProcessStreamFlagsAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
var streamFlags = await _reader.ReadBytesAsync(2, cancellationToken).ConfigureAwait(false);
|
||||
var crc = await _reader
|
||||
.BaseStream.ReadLittleEndianUInt32Async(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
var calcCrc = Crc32.Compute(streamFlags);
|
||||
if (crc != calcCrc)
|
||||
{
|
||||
throw new InvalidFormatException("Stream header corrupt");
|
||||
}
|
||||
|
||||
BlockCheckType = (CheckType)(streamFlags[1] & 0x0F);
|
||||
var futureUse = (byte)(streamFlags[1] & 0xF0);
|
||||
if (futureUse != 0 || streamFlags[0] != 0)
|
||||
{
|
||||
throw new InvalidFormatException("Unknown XZ Stream Version");
|
||||
}
|
||||
}
|
||||
|
||||
private void CheckMagicBytes(byte[] header)
|
||||
{
|
||||
if (!header.SequenceEqual(MagicHeader))
|
||||
|
||||
@@ -3,6 +3,8 @@ using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.IO;
|
||||
|
||||
@@ -39,6 +41,20 @@ public class XZIndex
|
||||
return index;
|
||||
}
|
||||
|
||||
public static async Task<XZIndex> FromStreamAsync(
|
||||
Stream stream,
|
||||
bool indexMarkerAlreadyVerified,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var index = new XZIndex(
|
||||
new BinaryReader(stream, Encoding.UTF8, true),
|
||||
indexMarkerAlreadyVerified
|
||||
);
|
||||
await index.ProcessAsync(cancellationToken).ConfigureAwait(false);
|
||||
return index;
|
||||
}
|
||||
|
||||
public void Process()
|
||||
{
|
||||
if (!_indexMarkerAlreadyVerified)
|
||||
@@ -55,6 +71,26 @@ public class XZIndex
|
||||
VerifyCrc32();
|
||||
}
|
||||
|
||||
public async Task ProcessAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (!_indexMarkerAlreadyVerified)
|
||||
{
|
||||
await VerifyIndexMarkerAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
NumberOfRecords = await _reader.ReadXZIntegerAsync(cancellationToken).ConfigureAwait(false);
|
||||
for (ulong i = 0; i < NumberOfRecords; i++)
|
||||
{
|
||||
Records.Add(
|
||||
await XZIndexRecord
|
||||
.FromBinaryReaderAsync(_reader, cancellationToken)
|
||||
.ConfigureAwait(false)
|
||||
);
|
||||
}
|
||||
await SkipPaddingAsync(cancellationToken).ConfigureAwait(false);
|
||||
await VerifyCrc32Async(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private void VerifyIndexMarker()
|
||||
{
|
||||
var marker = _reader.ReadByte();
|
||||
@@ -64,6 +100,15 @@ public class XZIndex
|
||||
}
|
||||
}
|
||||
|
||||
private async Task VerifyIndexMarkerAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
var marker = await _reader.ReadByteAsync(cancellationToken).ConfigureAwait(false);
|
||||
if (marker != 0)
|
||||
{
|
||||
throw new InvalidFormatException("Not an index block");
|
||||
}
|
||||
}
|
||||
|
||||
private void SkipPadding()
|
||||
{
|
||||
var bytes = (int)(_reader.BaseStream.Position - StreamStartPosition) % 4;
|
||||
@@ -77,9 +122,32 @@ public class XZIndex
|
||||
}
|
||||
}
|
||||
|
||||
private async Task SkipPaddingAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
var bytes = (int)(_reader.BaseStream.Position - StreamStartPosition) % 4;
|
||||
if (bytes > 0)
|
||||
{
|
||||
var paddingBytes = await _reader
|
||||
.ReadBytesAsync(4 - bytes, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (paddingBytes.Any(b => b != 0))
|
||||
{
|
||||
throw new InvalidFormatException("Padding bytes were non-null");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void VerifyCrc32()
|
||||
{
|
||||
var crc = _reader.ReadLittleEndianUInt32();
|
||||
// TODO verify this matches
|
||||
}
|
||||
|
||||
private async Task VerifyCrc32Async(CancellationToken cancellationToken = default)
|
||||
{
|
||||
var crc = await _reader
|
||||
.BaseStream.ReadLittleEndianUInt32Async(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
// TODO verify this matches
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Compressors.Xz;
|
||||
|
||||
@@ -18,4 +20,16 @@ public class XZIndexRecord
|
||||
record.UncompressedSize = br.ReadXZInteger();
|
||||
return record;
|
||||
}
|
||||
|
||||
public static async Task<XZIndexRecord> FromBinaryReaderAsync(
|
||||
BinaryReader br,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var record = new XZIndexRecord();
|
||||
record.UnpaddedSize = await br.ReadXZIntegerAsync(cancellationToken).ConfigureAwait(false);
|
||||
record.UncompressedSize = await br.ReadXZIntegerAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
return record;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.IO;
|
||||
|
||||
@@ -104,6 +106,35 @@ public sealed class XZStream : XZReadOnlyStream, IStreamStack
|
||||
return bytesRead;
|
||||
}
|
||||
|
||||
public override async Task<int> ReadAsync(
|
||||
byte[] buffer,
|
||||
int offset,
|
||||
int count,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var bytesRead = 0;
|
||||
if (_endOfStream)
|
||||
{
|
||||
return bytesRead;
|
||||
}
|
||||
|
||||
if (!HeaderIsRead)
|
||||
{
|
||||
await ReadHeaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
bytesRead = await ReadBlocksAsync(buffer, offset, count, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (bytesRead < count)
|
||||
{
|
||||
_endOfStream = true;
|
||||
await ReadIndexAsync(cancellationToken).ConfigureAwait(false);
|
||||
await ReadFooterAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
return bytesRead;
|
||||
}
|
||||
|
||||
private void ReadHeader()
|
||||
{
|
||||
Header = XZHeader.FromStream(BaseStream);
|
||||
@@ -111,12 +142,31 @@ public sealed class XZStream : XZReadOnlyStream, IStreamStack
|
||||
HeaderIsRead = true;
|
||||
}
|
||||
|
||||
private async Task ReadHeaderAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
Header = await XZHeader
|
||||
.FromStreamAsync(BaseStream, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
AssertBlockCheckTypeIsSupported();
|
||||
HeaderIsRead = true;
|
||||
}
|
||||
|
||||
private void ReadIndex() => Index = XZIndex.FromStream(BaseStream, true);
|
||||
|
||||
// TODO veryfy Index
|
||||
private async Task ReadIndexAsync(CancellationToken cancellationToken = default) =>
|
||||
Index = await XZIndex
|
||||
.FromStreamAsync(BaseStream, true, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
// TODO verify Index
|
||||
private void ReadFooter() => Footer = XZFooter.FromStream(BaseStream);
|
||||
|
||||
// TODO verify footer
|
||||
private async Task ReadFooterAsync(CancellationToken cancellationToken = default) =>
|
||||
Footer = await XZFooter
|
||||
.FromStreamAsync(BaseStream, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
private int ReadBlocks(byte[] buffer, int offset, int count)
|
||||
{
|
||||
var bytesRead = 0;
|
||||
@@ -152,6 +202,48 @@ public sealed class XZStream : XZReadOnlyStream, IStreamStack
|
||||
return bytesRead;
|
||||
}
|
||||
|
||||
private async Task<int> ReadBlocksAsync(
|
||||
byte[] buffer,
|
||||
int offset,
|
||||
int count,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var bytesRead = 0;
|
||||
if (_currentBlock is null)
|
||||
{
|
||||
NextBlock();
|
||||
}
|
||||
|
||||
for (; ; )
|
||||
{
|
||||
try
|
||||
{
|
||||
if (bytesRead >= count)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
var remaining = count - bytesRead;
|
||||
var newOffset = offset + bytesRead;
|
||||
var justRead = await _currentBlock
|
||||
.ReadAsync(buffer, newOffset, remaining, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (justRead < remaining)
|
||||
{
|
||||
NextBlock();
|
||||
}
|
||||
|
||||
bytesRead += justRead;
|
||||
}
|
||||
catch (XZIndexMarkerReachedException)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
return bytesRead;
|
||||
}
|
||||
|
||||
private void NextBlock() =>
|
||||
_currentBlock = new XZBlock(BaseStream, Header.BlockCheckType, Header.BlockCheckSize);
|
||||
}
|
||||
|
||||
@@ -15,7 +15,7 @@ public class SourceStream : Stream, IStreamStack
|
||||
#endif
|
||||
int IStreamStack.DefaultBufferSize { get; set; }
|
||||
|
||||
Stream IStreamStack.BaseStream() => _streams[_stream];
|
||||
Stream IStreamStack.BaseStream() => _streams[_streamIndex];
|
||||
|
||||
int IStreamStack.BufferSize
|
||||
{
|
||||
@@ -35,7 +35,7 @@ public class SourceStream : Stream, IStreamStack
|
||||
private readonly List<Stream> _streams;
|
||||
private readonly Func<int, FileInfo?>? _getFilePart;
|
||||
private readonly Func<int, Stream?>? _getStreamPart;
|
||||
private int _stream;
|
||||
private int _streamIndex;
|
||||
|
||||
public SourceStream(FileInfo file, Func<int, FileInfo?> getPart, ReaderOptions options)
|
||||
: this(null, null, file, getPart, options) { }
|
||||
@@ -59,7 +59,7 @@ public class SourceStream : Stream, IStreamStack
|
||||
|
||||
if (!IsFileMode)
|
||||
{
|
||||
_streams.Add(stream!);
|
||||
_streams.Add(stream.NotNull("stream is null"));
|
||||
_getStreamPart = getStreamPart;
|
||||
_getFilePart = _ => null;
|
||||
if (stream is FileStream fileStream)
|
||||
@@ -69,12 +69,12 @@ public class SourceStream : Stream, IStreamStack
|
||||
}
|
||||
else
|
||||
{
|
||||
_files.Add(file!);
|
||||
_files.Add(file.NotNull("file is null"));
|
||||
_streams.Add(_files[0].OpenRead());
|
||||
_getFilePart = getFilePart;
|
||||
_getStreamPart = _ => null;
|
||||
}
|
||||
_stream = 0;
|
||||
_streamIndex = 0;
|
||||
_prevSize = 0;
|
||||
|
||||
#if DEBUG_STREAMS
|
||||
@@ -93,10 +93,12 @@ public class SourceStream : Stream, IStreamStack
|
||||
public ReaderOptions ReaderOptions { get; }
|
||||
public bool IsFileMode { get; }
|
||||
|
||||
public IEnumerable<FileInfo> Files => _files;
|
||||
public IEnumerable<Stream> Streams => _streams;
|
||||
public IReadOnlyList<FileInfo> Files => _files;
|
||||
public IReadOnlyList<Stream> Streams => _streams;
|
||||
|
||||
private Stream Current => _streams[_stream];
|
||||
private Stream Current => _streams[_streamIndex];
|
||||
|
||||
public FileInfo CurrentFile => _files[_streamIndex];
|
||||
|
||||
public bool LoadStream(int index) //ensure all parts to id are loaded
|
||||
{
|
||||
@@ -107,7 +109,7 @@ public class SourceStream : Stream, IStreamStack
|
||||
var f = _getFilePart.NotNull("GetFilePart is null")(_streams.Count);
|
||||
if (f == null)
|
||||
{
|
||||
_stream = _streams.Count - 1;
|
||||
_streamIndex = _streams.Count - 1;
|
||||
return false;
|
||||
}
|
||||
//throw new Exception($"File part {idx} not available.");
|
||||
@@ -119,7 +121,7 @@ public class SourceStream : Stream, IStreamStack
|
||||
var s = _getStreamPart.NotNull("GetStreamPart is null")(_streams.Count);
|
||||
if (s == null)
|
||||
{
|
||||
_stream = _streams.Count - 1;
|
||||
_streamIndex = _streams.Count - 1;
|
||||
return false;
|
||||
}
|
||||
//throw new Exception($"Stream part {idx} not available.");
|
||||
@@ -137,10 +139,10 @@ public class SourceStream : Stream, IStreamStack
|
||||
{
|
||||
if (LoadStream(idx))
|
||||
{
|
||||
_stream = idx;
|
||||
_streamIndex = idx;
|
||||
}
|
||||
|
||||
return _stream == idx;
|
||||
return _streamIndex == idx;
|
||||
}
|
||||
|
||||
public override bool CanRead => true;
|
||||
@@ -184,7 +186,7 @@ public class SourceStream : Stream, IStreamStack
|
||||
var length = Current.Length;
|
||||
|
||||
// Load next file if present
|
||||
if (!SetStream(_stream + 1))
|
||||
if (!SetStream(_streamIndex + 1))
|
||||
{
|
||||
break;
|
||||
}
|
||||
@@ -223,7 +225,7 @@ public class SourceStream : Stream, IStreamStack
|
||||
while (_prevSize + Current.Length < pos)
|
||||
{
|
||||
_prevSize += Current.Length;
|
||||
SetStream(_stream + 1);
|
||||
SetStream(_streamIndex + 1);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -273,7 +275,7 @@ public class SourceStream : Stream, IStreamStack
|
||||
var length = Current.Length;
|
||||
|
||||
// Load next file if present
|
||||
if (!SetStream(_stream + 1))
|
||||
if (!SetStream(_streamIndex + 1))
|
||||
{
|
||||
break;
|
||||
}
|
||||
@@ -322,7 +324,7 @@ public class SourceStream : Stream, IStreamStack
|
||||
var length = Current.Length;
|
||||
|
||||
// Load next file if present
|
||||
if (!SetStream(_stream + 1))
|
||||
if (!SetStream(_streamIndex + 1))
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -82,7 +82,7 @@ public static class IReaderExtensions
|
||||
reader.Entry,
|
||||
destinationDirectory,
|
||||
options,
|
||||
(fileName, opts) => reader.WriteEntryToFileAsync(fileName, opts, cancellationToken),
|
||||
reader.WriteEntryToFileAsync,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
@@ -101,10 +101,10 @@ public static class IReaderExtensions
|
||||
reader.Entry,
|
||||
destinationFileName,
|
||||
options,
|
||||
async (x, fm) =>
|
||||
async (x, fm, ct) =>
|
||||
{
|
||||
using var fs = File.Open(destinationFileName, fm);
|
||||
await reader.WriteEntryToAsync(fs, cancellationToken).ConfigureAwait(false);
|
||||
await reader.WriteEntryToAsync(fs, ct).ConfigureAwait(false);
|
||||
},
|
||||
cancellationToken
|
||||
)
|
||||
|
||||
@@ -447,6 +447,31 @@ internal static class Utility
|
||||
}
|
||||
#endif
|
||||
|
||||
public static async Task<bool> ReadFullyAsync(
|
||||
this Stream stream,
|
||||
byte[] buffer,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var total = 0;
|
||||
int read;
|
||||
while (
|
||||
(
|
||||
read = await stream
|
||||
.ReadAsync(buffer, total, buffer.Length - total, cancellationToken)
|
||||
.ConfigureAwait(false)
|
||||
) > 0
|
||||
)
|
||||
{
|
||||
total += read;
|
||||
if (total >= buffer.Length)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return (total >= buffer.Length);
|
||||
}
|
||||
|
||||
public static string TrimNulls(this string source) => source.Replace('\0', ' ').Trim();
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -48,7 +48,29 @@ internal class ZipCentralDirectoryEntry
|
||||
var decompressedvalue = zip64 ? uint.MaxValue : (uint)Decompressed;
|
||||
var headeroffsetvalue = zip64 ? uint.MaxValue : (uint)HeaderOffset;
|
||||
var extralength = zip64 ? (2 + 2 + 8 + 8 + 8 + 4) : 0;
|
||||
var version = (byte)(zip64 ? 45 : 20); // Version 20 required for deflate/encryption
|
||||
|
||||
// Determine version needed to extract:
|
||||
// - Version 63 for LZMA, PPMd, BZip2, ZStandard (advanced compression methods)
|
||||
// - Version 45 for Zip64 extensions (when Zip64HeaderOffset != 0 or actual sizes require it)
|
||||
// - Version 20 for standard Deflate/None compression
|
||||
byte version;
|
||||
if (
|
||||
compression == ZipCompressionMethod.LZMA
|
||||
|| compression == ZipCompressionMethod.PPMd
|
||||
|| compression == ZipCompressionMethod.BZip2
|
||||
|| compression == ZipCompressionMethod.ZStandard
|
||||
)
|
||||
{
|
||||
version = 63;
|
||||
}
|
||||
else if (zip64 || Zip64HeaderOffset != 0)
|
||||
{
|
||||
version = 45;
|
||||
}
|
||||
else
|
||||
{
|
||||
version = 20;
|
||||
}
|
||||
|
||||
var flags = Equals(archiveEncoding.GetEncoding(), Encoding.UTF8)
|
||||
? HeaderFlags.Efs
|
||||
|
||||
@@ -27,5 +27,22 @@ namespace SharpCompress.Test.Arc
|
||||
|
||||
[Fact]
|
||||
public void Arc_Crunched_Read() => Read("Arc.crunched.arc");
|
||||
|
||||
[Theory]
|
||||
[InlineData("Arc.crunched.largefile.arc", CompressionType.Crunched)]
|
||||
public void Arc_LargeFile_ShouldThrow(string fileName, CompressionType compressionType)
|
||||
{
|
||||
var exception = Assert.Throws<NotSupportedException>(() =>
|
||||
ReadForBufferBoundaryCheck(fileName, compressionType)
|
||||
);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("Arc.uncompressed.largefile.arc", CompressionType.None)]
|
||||
[InlineData("Arc.squeezed.largefile.arc", CompressionType.Squeezed)]
|
||||
public void Arc_LargeFileTest_Read(string fileName, CompressionType compressionType)
|
||||
{
|
||||
ReadForBufferBoundaryCheck(fileName, compressionType);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -134,6 +134,7 @@ public class ArchiveTests : ReaderTests
|
||||
{
|
||||
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
|
||||
{
|
||||
Assert.False(entry.SupportsMultiThreading);
|
||||
entry.WriteToDirectory(
|
||||
SCRATCH_FILES_PATH,
|
||||
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
|
||||
@@ -266,6 +267,31 @@ public class ArchiveTests : ReaderTests
|
||||
VerifyFiles();
|
||||
}
|
||||
|
||||
protected async Task ArchiveFileRead_Multithreaded(
|
||||
IArchiveFactory archiveFactory,
|
||||
string testArchive,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
|
||||
var tasks = new List<Task>();
|
||||
using (var archive = archiveFactory.Open(new FileInfo(testArchive), readerOptions))
|
||||
{
|
||||
Assert.True(archive.SupportsMultiThreading);
|
||||
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
|
||||
{
|
||||
Assert.True(entry.SupportsMultiThreading);
|
||||
var t = entry.WriteToDirectoryAsync(
|
||||
SCRATCH_FILES_PATH,
|
||||
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
|
||||
);
|
||||
tasks.Add(t);
|
||||
}
|
||||
}
|
||||
await Task.WhenAll(tasks);
|
||||
VerifyFiles();
|
||||
}
|
||||
|
||||
protected void ArchiveFileRead(
|
||||
IArchiveFactory archiveFactory,
|
||||
string testArchive,
|
||||
@@ -289,6 +315,11 @@ public class ArchiveTests : ReaderTests
|
||||
protected void ArchiveFileRead(string testArchive, ReaderOptions? readerOptions = null) =>
|
||||
ArchiveFileRead(ArchiveFactory.AutoFactory, testArchive, readerOptions);
|
||||
|
||||
protected Task ArchiveFileRead_Multithreaded(
|
||||
string testArchive,
|
||||
ReaderOptions? readerOptions = null
|
||||
) => ArchiveFileRead_Multithreaded(ArchiveFactory.AutoFactory, testArchive, readerOptions);
|
||||
|
||||
protected void ArchiveFileSkip(
|
||||
string testArchive,
|
||||
string fileOrder,
|
||||
|
||||
@@ -8,6 +8,7 @@ using SharpCompress.Common;
|
||||
using SharpCompress.Readers;
|
||||
using SharpCompress.Readers.Arj;
|
||||
using Xunit;
|
||||
using Xunit.Sdk;
|
||||
|
||||
namespace SharpCompress.Test.Arj
|
||||
{
|
||||
@@ -22,6 +23,15 @@ namespace SharpCompress.Test.Arj
|
||||
[Fact]
|
||||
public void Arj_Uncompressed_Read() => Read("Arj.store.arj", CompressionType.None);
|
||||
|
||||
[Fact]
|
||||
public void Arj_Method1_Read() => Read("Arj.method1.arj");
|
||||
|
||||
[Fact]
|
||||
public void Arj_Method2_Read() => Read("Arj.method2.arj");
|
||||
|
||||
[Fact]
|
||||
public void Arj_Method3_Read() => Read("Arj.method3.arj");
|
||||
|
||||
[Fact]
|
||||
public void Arj_Method4_Read() => Read("Arj.method4.arj");
|
||||
|
||||
@@ -35,19 +45,36 @@ namespace SharpCompress.Test.Arj
|
||||
public void Arj_Multi_Reader()
|
||||
{
|
||||
var exception = Assert.Throws<MultiVolumeExtractionException>(() =>
|
||||
DoArj_Multi_Reader(
|
||||
[
|
||||
"Arj.store.split.arj",
|
||||
"Arj.store.split.a01",
|
||||
"Arj.store.split.a02",
|
||||
"Arj.store.split.a03",
|
||||
"Arj.store.split.a04",
|
||||
"Arj.store.split.a05",
|
||||
]
|
||||
)
|
||||
DoArj_Multi_Reader([
|
||||
"Arj.store.split.arj",
|
||||
"Arj.store.split.a01",
|
||||
"Arj.store.split.a02",
|
||||
"Arj.store.split.a03",
|
||||
"Arj.store.split.a04",
|
||||
"Arj.store.split.a05",
|
||||
])
|
||||
);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("Arj.method1.largefile.arj", CompressionType.ArjLZ77)]
|
||||
[InlineData("Arj.method2.largefile.arj", CompressionType.ArjLZ77)]
|
||||
[InlineData("Arj.method3.largefile.arj", CompressionType.ArjLZ77)]
|
||||
public void Arj_LargeFile_ShouldThrow(string fileName, CompressionType compressionType)
|
||||
{
|
||||
var exception = Assert.Throws<NotSupportedException>(() =>
|
||||
ReadForBufferBoundaryCheck(fileName, compressionType)
|
||||
);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("Arj.store.largefile.arj", CompressionType.None)]
|
||||
[InlineData("Arj.method4.largefile.arj", CompressionType.ArjLZ77)]
|
||||
public void Arj_LargeFileTest_Read(string fileName, CompressionType compressionType)
|
||||
{
|
||||
ReadForBufferBoundaryCheck(fileName, compressionType);
|
||||
}
|
||||
|
||||
private void DoArj_Multi_Reader(string[] archives)
|
||||
{
|
||||
using (
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Archives;
|
||||
using SharpCompress.Archives.Rar;
|
||||
using SharpCompress.Common;
|
||||
@@ -292,9 +293,15 @@ public class RarArchiveTests : ArchiveTests
|
||||
[Fact]
|
||||
public void Rar_ArchiveFileRead() => ArchiveFileRead("Rar.rar");
|
||||
|
||||
[Fact]
|
||||
public Task Rar_ArchiveFileRead_Multithreaded() => ArchiveFileRead_Multithreaded("Rar.rar");
|
||||
|
||||
[Fact]
|
||||
public void Rar5_ArchiveFileRead() => ArchiveFileRead("Rar5.rar");
|
||||
|
||||
[Fact]
|
||||
public Task Rar5_ArchiveFileRead_Multithreaded() => ArchiveFileRead_Multithreaded("Rar5.rar");
|
||||
|
||||
[Fact]
|
||||
public void Rar_ArchiveFileRead_HasDirectories() =>
|
||||
DoRar_ArchiveFileRead_HasDirectories("Rar.rar");
|
||||
@@ -359,6 +366,9 @@ public class RarArchiveTests : ArchiveTests
|
||||
[Fact]
|
||||
public void Rar2_ArchiveFileRead() => ArchiveFileRead("Rar2.rar");
|
||||
|
||||
[Fact]
|
||||
public Task Rar2_ArchiveFileRead_Multithreaded() => ArchiveFileRead_Multithreaded("Rar2.rar");
|
||||
|
||||
[Fact]
|
||||
public void Rar15_ArchiveFileRead()
|
||||
{
|
||||
|
||||
@@ -15,29 +15,25 @@ public class RarReaderAsyncTests : ReaderTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task Rar_Multi_Reader_Async() =>
|
||||
await DoRar_Multi_Reader_Async(
|
||||
[
|
||||
"Rar.multi.part01.rar",
|
||||
"Rar.multi.part02.rar",
|
||||
"Rar.multi.part03.rar",
|
||||
"Rar.multi.part04.rar",
|
||||
"Rar.multi.part05.rar",
|
||||
"Rar.multi.part06.rar",
|
||||
]
|
||||
);
|
||||
await DoRar_Multi_Reader_Async([
|
||||
"Rar.multi.part01.rar",
|
||||
"Rar.multi.part02.rar",
|
||||
"Rar.multi.part03.rar",
|
||||
"Rar.multi.part04.rar",
|
||||
"Rar.multi.part05.rar",
|
||||
"Rar.multi.part06.rar",
|
||||
]);
|
||||
|
||||
[Fact]
|
||||
public async Task Rar5_Multi_Reader_Async() =>
|
||||
await DoRar_Multi_Reader_Async(
|
||||
[
|
||||
"Rar5.multi.part01.rar",
|
||||
"Rar5.multi.part02.rar",
|
||||
"Rar5.multi.part03.rar",
|
||||
"Rar5.multi.part04.rar",
|
||||
"Rar5.multi.part05.rar",
|
||||
"Rar5.multi.part06.rar",
|
||||
]
|
||||
);
|
||||
await DoRar_Multi_Reader_Async([
|
||||
"Rar5.multi.part01.rar",
|
||||
"Rar5.multi.part02.rar",
|
||||
"Rar5.multi.part03.rar",
|
||||
"Rar5.multi.part04.rar",
|
||||
"Rar5.multi.part05.rar",
|
||||
"Rar5.multi.part06.rar",
|
||||
]);
|
||||
|
||||
private async Task DoRar_Multi_Reader_Async(string[] archives)
|
||||
{
|
||||
@@ -95,29 +91,25 @@ public class RarReaderAsyncTests : ReaderTests
|
||||
|
||||
[Fact]
|
||||
public async Task Rar_Multi_Reader_Delete_Files_Async() =>
|
||||
await DoRar_Multi_Reader_Delete_Files_Async(
|
||||
[
|
||||
"Rar.multi.part01.rar",
|
||||
"Rar.multi.part02.rar",
|
||||
"Rar.multi.part03.rar",
|
||||
"Rar.multi.part04.rar",
|
||||
"Rar.multi.part05.rar",
|
||||
"Rar.multi.part06.rar",
|
||||
]
|
||||
);
|
||||
await DoRar_Multi_Reader_Delete_Files_Async([
|
||||
"Rar.multi.part01.rar",
|
||||
"Rar.multi.part02.rar",
|
||||
"Rar.multi.part03.rar",
|
||||
"Rar.multi.part04.rar",
|
||||
"Rar.multi.part05.rar",
|
||||
"Rar.multi.part06.rar",
|
||||
]);
|
||||
|
||||
[Fact]
|
||||
public async Task Rar5_Multi_Reader_Delete_Files_Async() =>
|
||||
await DoRar_Multi_Reader_Delete_Files_Async(
|
||||
[
|
||||
"Rar5.multi.part01.rar",
|
||||
"Rar5.multi.part02.rar",
|
||||
"Rar5.multi.part03.rar",
|
||||
"Rar5.multi.part04.rar",
|
||||
"Rar5.multi.part05.rar",
|
||||
"Rar5.multi.part06.rar",
|
||||
]
|
||||
);
|
||||
await DoRar_Multi_Reader_Delete_Files_Async([
|
||||
"Rar5.multi.part01.rar",
|
||||
"Rar5.multi.part02.rar",
|
||||
"Rar5.multi.part03.rar",
|
||||
"Rar5.multi.part04.rar",
|
||||
"Rar5.multi.part05.rar",
|
||||
"Rar5.multi.part06.rar",
|
||||
]);
|
||||
|
||||
private async Task DoRar_Multi_Reader_Delete_Files_Async(string[] archives)
|
||||
{
|
||||
|
||||
@@ -14,29 +14,25 @@ public class RarReaderTests : ReaderTests
|
||||
{
|
||||
[Fact]
|
||||
public void Rar_Multi_Reader() =>
|
||||
DoRar_Multi_Reader(
|
||||
[
|
||||
"Rar.multi.part01.rar",
|
||||
"Rar.multi.part02.rar",
|
||||
"Rar.multi.part03.rar",
|
||||
"Rar.multi.part04.rar",
|
||||
"Rar.multi.part05.rar",
|
||||
"Rar.multi.part06.rar",
|
||||
]
|
||||
);
|
||||
DoRar_Multi_Reader([
|
||||
"Rar.multi.part01.rar",
|
||||
"Rar.multi.part02.rar",
|
||||
"Rar.multi.part03.rar",
|
||||
"Rar.multi.part04.rar",
|
||||
"Rar.multi.part05.rar",
|
||||
"Rar.multi.part06.rar",
|
||||
]);
|
||||
|
||||
[Fact]
|
||||
public void Rar5_Multi_Reader() =>
|
||||
DoRar_Multi_Reader(
|
||||
[
|
||||
"Rar5.multi.part01.rar",
|
||||
"Rar5.multi.part02.rar",
|
||||
"Rar5.multi.part03.rar",
|
||||
"Rar5.multi.part04.rar",
|
||||
"Rar5.multi.part05.rar",
|
||||
"Rar5.multi.part06.rar",
|
||||
]
|
||||
);
|
||||
DoRar_Multi_Reader([
|
||||
"Rar5.multi.part01.rar",
|
||||
"Rar5.multi.part02.rar",
|
||||
"Rar5.multi.part03.rar",
|
||||
"Rar5.multi.part04.rar",
|
||||
"Rar5.multi.part05.rar",
|
||||
"Rar5.multi.part06.rar",
|
||||
]);
|
||||
|
||||
private void DoRar_Multi_Reader(string[] archives)
|
||||
{
|
||||
@@ -61,16 +57,14 @@ public class RarReaderTests : ReaderTests
|
||||
|
||||
[Fact]
|
||||
public void Rar_Multi_Reader_Encrypted() =>
|
||||
DoRar_Multi_Reader_Encrypted(
|
||||
[
|
||||
"Rar.EncryptedParts.part01.rar",
|
||||
"Rar.EncryptedParts.part02.rar",
|
||||
"Rar.EncryptedParts.part03.rar",
|
||||
"Rar.EncryptedParts.part04.rar",
|
||||
"Rar.EncryptedParts.part05.rar",
|
||||
"Rar.EncryptedParts.part06.rar",
|
||||
]
|
||||
);
|
||||
DoRar_Multi_Reader_Encrypted([
|
||||
"Rar.EncryptedParts.part01.rar",
|
||||
"Rar.EncryptedParts.part02.rar",
|
||||
"Rar.EncryptedParts.part03.rar",
|
||||
"Rar.EncryptedParts.part04.rar",
|
||||
"Rar.EncryptedParts.part05.rar",
|
||||
"Rar.EncryptedParts.part06.rar",
|
||||
]);
|
||||
|
||||
private void DoRar_Multi_Reader_Encrypted(string[] archives) =>
|
||||
Assert.Throws<InvalidFormatException>(() =>
|
||||
@@ -97,29 +91,25 @@ public class RarReaderTests : ReaderTests
|
||||
|
||||
[Fact]
|
||||
public void Rar_Multi_Reader_Delete_Files() =>
|
||||
DoRar_Multi_Reader_Delete_Files(
|
||||
[
|
||||
"Rar.multi.part01.rar",
|
||||
"Rar.multi.part02.rar",
|
||||
"Rar.multi.part03.rar",
|
||||
"Rar.multi.part04.rar",
|
||||
"Rar.multi.part05.rar",
|
||||
"Rar.multi.part06.rar",
|
||||
]
|
||||
);
|
||||
DoRar_Multi_Reader_Delete_Files([
|
||||
"Rar.multi.part01.rar",
|
||||
"Rar.multi.part02.rar",
|
||||
"Rar.multi.part03.rar",
|
||||
"Rar.multi.part04.rar",
|
||||
"Rar.multi.part05.rar",
|
||||
"Rar.multi.part06.rar",
|
||||
]);
|
||||
|
||||
[Fact]
|
||||
public void Rar5_Multi_Reader_Delete_Files() =>
|
||||
DoRar_Multi_Reader_Delete_Files(
|
||||
[
|
||||
"Rar5.multi.part01.rar",
|
||||
"Rar5.multi.part02.rar",
|
||||
"Rar5.multi.part03.rar",
|
||||
"Rar5.multi.part04.rar",
|
||||
"Rar5.multi.part05.rar",
|
||||
"Rar5.multi.part06.rar",
|
||||
]
|
||||
);
|
||||
DoRar_Multi_Reader_Delete_Files([
|
||||
"Rar5.multi.part01.rar",
|
||||
"Rar5.multi.part02.rar",
|
||||
"Rar5.multi.part03.rar",
|
||||
"Rar5.multi.part04.rar",
|
||||
"Rar5.multi.part05.rar",
|
||||
"Rar5.multi.part06.rar",
|
||||
]);
|
||||
|
||||
private void DoRar_Multi_Reader_Delete_Files(string[] archives)
|
||||
{
|
||||
@@ -407,16 +397,14 @@ public class RarReaderTests : ReaderTests
|
||||
Path.Combine("exe", "test.exe"),
|
||||
}
|
||||
);
|
||||
using var reader = RarReader.Open(
|
||||
[
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part01.rar"),
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part02.rar"),
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part03.rar"),
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part04.rar"),
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part05.rar"),
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part06.rar"),
|
||||
]
|
||||
);
|
||||
using var reader = RarReader.Open([
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part01.rar"),
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part02.rar"),
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part03.rar"),
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part04.rar"),
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part05.rar"),
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part06.rar"),
|
||||
]);
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
Assert.Equal(expectedOrder.Pop(), reader.Entry.Key);
|
||||
|
||||
@@ -176,6 +176,27 @@ public abstract class ReaderTests : TestBase
|
||||
}
|
||||
}
|
||||
|
||||
protected void ReadForBufferBoundaryCheck(string fileName, CompressionType compressionType)
|
||||
{
|
||||
using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, fileName));
|
||||
using var reader = ReaderFactory.Open(stream, new ReaderOptions { LookForHeader = true });
|
||||
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
Assert.Equal(compressionType, reader.Entry.CompressionType);
|
||||
|
||||
reader.WriteEntryToDirectory(
|
||||
SCRATCH_FILES_PATH,
|
||||
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
|
||||
);
|
||||
}
|
||||
|
||||
CompareFilesByPath(
|
||||
Path.Combine(SCRATCH_FILES_PATH, "alice29.txt"),
|
||||
Path.Combine(MISC_TEST_FILES_PATH, "alice29.txt")
|
||||
);
|
||||
}
|
||||
|
||||
protected void Iterate(
|
||||
string testArchive,
|
||||
string fileOrder,
|
||||
|
||||
@@ -254,4 +254,58 @@ public class TarReaderTests : ReaderTests
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
[Fact]
|
||||
public void Tar_Malformed_LongName_Excessive_Size()
|
||||
{
|
||||
// Create a malformed TAR header with an excessively large LongName size
|
||||
// This simulates what happens during auto-detection of compressed files
|
||||
var buffer = new byte[512];
|
||||
|
||||
// Set up a basic TAR header structure
|
||||
// Name field (offset 0, 100 bytes) - set to "././@LongLink" which is typical for LongName
|
||||
var nameBytes = System.Text.Encoding.ASCII.GetBytes("././@LongLink");
|
||||
Array.Copy(nameBytes, 0, buffer, 0, nameBytes.Length);
|
||||
|
||||
// Set entry type to LongName (offset 156)
|
||||
buffer[156] = (byte)'L'; // EntryType.LongName
|
||||
|
||||
// Set an excessively large size (offset 124, 12 bytes, octal format)
|
||||
// This simulates a corrupted/misinterpreted size field
|
||||
// Using "77777777777" (octal) = 8589934591 bytes (~8GB)
|
||||
var sizeBytes = System.Text.Encoding.ASCII.GetBytes("77777777777 ");
|
||||
Array.Copy(sizeBytes, 0, buffer, 124, sizeBytes.Length);
|
||||
|
||||
// Calculate and set checksum (offset 148, 8 bytes)
|
||||
// Set checksum field to spaces first
|
||||
for (var i = 148; i < 156; i++)
|
||||
{
|
||||
buffer[i] = (byte)' ';
|
||||
}
|
||||
|
||||
// Calculate checksum
|
||||
var checksum = 0;
|
||||
foreach (var b in buffer)
|
||||
{
|
||||
checksum += b;
|
||||
}
|
||||
|
||||
var checksumStr = Convert.ToString(checksum, 8).PadLeft(6, '0') + "\0 ";
|
||||
var checksumBytes = System.Text.Encoding.ASCII.GetBytes(checksumStr);
|
||||
Array.Copy(checksumBytes, 0, buffer, 148, checksumBytes.Length);
|
||||
|
||||
// Create a stream with this malformed header
|
||||
using var stream = new MemoryStream();
|
||||
stream.Write(buffer, 0, buffer.Length);
|
||||
stream.Position = 0;
|
||||
|
||||
// Attempt to read this malformed archive
|
||||
// The InvalidFormatException from the validation gets caught and converted to IncompleteArchiveException
|
||||
// The important thing is it doesn't cause OutOfMemoryException
|
||||
Assert.Throws<IncompleteArchiveException>(() =>
|
||||
{
|
||||
using var reader = TarReader.Open(stream);
|
||||
reader.MoveToNextEntry();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
125
tests/SharpCompress.Test/Xz/XZBlockAsyncTests.cs
Normal file
125
tests/SharpCompress.Test/Xz/XZBlockAsyncTests.cs
Normal file
@@ -0,0 +1,125 @@
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Compressors.Xz;
|
||||
using Xunit;
|
||||
|
||||
namespace SharpCompress.Test.Xz;
|
||||
|
||||
public class XzBlockAsyncTests : XzTestsBase
|
||||
{
|
||||
protected override void Rewind(Stream stream) => stream.Position = 12;
|
||||
|
||||
protected override void RewindIndexed(Stream stream) => stream.Position = 12;
|
||||
|
||||
private static async Task<byte[]> ReadBytesAsync(XZBlock block, int bytesToRead)
|
||||
{
|
||||
var buffer = new byte[bytesToRead];
|
||||
var read = await block.ReadAsync(buffer, 0, bytesToRead).ConfigureAwait(false);
|
||||
if (read != bytesToRead)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task OnFindIndexBlockThrowAsync()
|
||||
{
|
||||
var bytes = new byte[] { 0 };
|
||||
using Stream indexBlockStream = new MemoryStream(bytes);
|
||||
var xzBlock = new XZBlock(indexBlockStream, CheckType.CRC64, 8);
|
||||
await Assert.ThrowsAsync<XZIndexMarkerReachedException>(async () =>
|
||||
{
|
||||
await ReadBytesAsync(xzBlock, 1).ConfigureAwait(false);
|
||||
});
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CrcIncorrectThrowsAsync()
|
||||
{
|
||||
var bytes = (byte[])Compressed.Clone();
|
||||
bytes[20]++;
|
||||
using Stream badCrcStream = new MemoryStream(bytes);
|
||||
Rewind(badCrcStream);
|
||||
var xzBlock = new XZBlock(badCrcStream, CheckType.CRC64, 8);
|
||||
var ex = await Assert.ThrowsAsync<InvalidFormatException>(async () =>
|
||||
{
|
||||
await ReadBytesAsync(xzBlock, 1).ConfigureAwait(false);
|
||||
});
|
||||
Assert.Equal("Block header corrupt", ex.Message);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CanReadMAsync()
|
||||
{
|
||||
var xzBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8);
|
||||
Assert.Equal(
|
||||
Encoding.ASCII.GetBytes("M"),
|
||||
await ReadBytesAsync(xzBlock, 1).ConfigureAwait(false)
|
||||
);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CanReadMaryAsync()
|
||||
{
|
||||
var xzBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8);
|
||||
Assert.Equal(
|
||||
Encoding.ASCII.GetBytes("M"),
|
||||
await ReadBytesAsync(xzBlock, 1).ConfigureAwait(false)
|
||||
);
|
||||
Assert.Equal(
|
||||
Encoding.ASCII.GetBytes("a"),
|
||||
await ReadBytesAsync(xzBlock, 1).ConfigureAwait(false)
|
||||
);
|
||||
Assert.Equal(
|
||||
Encoding.ASCII.GetBytes("ry"),
|
||||
await ReadBytesAsync(xzBlock, 2).ConfigureAwait(false)
|
||||
);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CanReadPoemWithStreamReaderAsync()
|
||||
{
|
||||
var xzBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8);
|
||||
var sr = new StreamReader(xzBlock);
|
||||
Assert.Equal(await sr.ReadToEndAsync().ConfigureAwait(false), Original);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task NoopWhenNoPaddingAsync()
|
||||
{
|
||||
// CompressedStream's only block has no padding.
|
||||
var xzBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8);
|
||||
var sr = new StreamReader(xzBlock);
|
||||
await sr.ReadToEndAsync().ConfigureAwait(false);
|
||||
Assert.Equal(0L, CompressedStream.Position % 4L);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SkipsPaddingWhenPresentAsync()
|
||||
{
|
||||
// CompressedIndexedStream's first block has 1-byte padding.
|
||||
var xzBlock = new XZBlock(CompressedIndexedStream, CheckType.CRC64, 8);
|
||||
var sr = new StreamReader(xzBlock);
|
||||
await sr.ReadToEndAsync().ConfigureAwait(false);
|
||||
Assert.Equal(0L, CompressedIndexedStream.Position % 4L);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task HandlesPaddingInUnalignedBlockAsync()
|
||||
{
|
||||
var compressedUnaligned = new byte[Compressed.Length + 1];
|
||||
Compressed.CopyTo(compressedUnaligned, 1);
|
||||
var compressedUnalignedStream = new MemoryStream(compressedUnaligned);
|
||||
compressedUnalignedStream.Position = 13;
|
||||
|
||||
// Compressed's only block has no padding.
|
||||
var xzBlock = new XZBlock(compressedUnalignedStream, CheckType.CRC64, 8);
|
||||
var sr = new StreamReader(xzBlock);
|
||||
await sr.ReadToEndAsync().ConfigureAwait(false);
|
||||
Assert.Equal(1L, compressedUnalignedStream.Position % 4L);
|
||||
}
|
||||
}
|
||||
83
tests/SharpCompress.Test/Xz/XZHeaderAsyncTests.cs
Normal file
83
tests/SharpCompress.Test/Xz/XZHeaderAsyncTests.cs
Normal file
@@ -0,0 +1,83 @@
|
||||
using System.IO;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Compressors.Xz;
|
||||
using Xunit;
|
||||
|
||||
namespace SharpCompress.Test.Xz;
|
||||
|
||||
public class XzHeaderAsyncTests : XzTestsBase
|
||||
{
|
||||
[Fact]
|
||||
public async Task ChecksMagicNumberAsync()
|
||||
{
|
||||
var bytes = (byte[])Compressed.Clone();
|
||||
bytes[3]++;
|
||||
using Stream badMagicNumberStream = new MemoryStream(bytes);
|
||||
var br = new BinaryReader(badMagicNumberStream);
|
||||
var header = new XZHeader(br);
|
||||
var ex = await Assert.ThrowsAsync<InvalidFormatException>(async () =>
|
||||
{
|
||||
await header.ProcessAsync().ConfigureAwait(false);
|
||||
});
|
||||
Assert.Equal("Invalid XZ Stream", ex.Message);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CorruptHeaderThrowsAsync()
|
||||
{
|
||||
var bytes = (byte[])Compressed.Clone();
|
||||
bytes[8]++;
|
||||
using Stream badCrcStream = new MemoryStream(bytes);
|
||||
var br = new BinaryReader(badCrcStream);
|
||||
var header = new XZHeader(br);
|
||||
var ex = await Assert.ThrowsAsync<InvalidFormatException>(async () =>
|
||||
{
|
||||
await header.ProcessAsync().ConfigureAwait(false);
|
||||
});
|
||||
Assert.Equal("Stream header corrupt", ex.Message);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BadVersionIfCrcOkButStreamFlagUnknownAsync()
|
||||
{
|
||||
var bytes = (byte[])Compressed.Clone();
|
||||
byte[] streamFlags = [0x00, 0xF4];
|
||||
var crc = Crc32.Compute(streamFlags).ToLittleEndianBytes();
|
||||
streamFlags.CopyTo(bytes, 6);
|
||||
crc.CopyTo(bytes, 8);
|
||||
using Stream badFlagStream = new MemoryStream(bytes);
|
||||
var br = new BinaryReader(badFlagStream);
|
||||
var header = new XZHeader(br);
|
||||
var ex = await Assert.ThrowsAsync<InvalidFormatException>(async () =>
|
||||
{
|
||||
await header.ProcessAsync().ConfigureAwait(false);
|
||||
});
|
||||
Assert.Equal("Unknown XZ Stream Version", ex.Message);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ProcessesBlockCheckTypeAsync()
|
||||
{
|
||||
var br = new BinaryReader(CompressedStream);
|
||||
var header = new XZHeader(br);
|
||||
await header.ProcessAsync().ConfigureAwait(false);
|
||||
Assert.Equal(CheckType.CRC64, header.BlockCheckType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CanCalculateBlockCheckSizeAsync()
|
||||
{
|
||||
var br = new BinaryReader(CompressedStream);
|
||||
var header = new XZHeader(br);
|
||||
await header.ProcessAsync().ConfigureAwait(false);
|
||||
Assert.Equal(8, header.BlockCheckSize);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ProcessesStreamHeaderFromFactoryAsync()
|
||||
{
|
||||
var header = await XZHeader.FromStreamAsync(CompressedStream).ConfigureAwait(false);
|
||||
Assert.Equal(CheckType.CRC64, header.BlockCheckType);
|
||||
}
|
||||
}
|
||||
97
tests/SharpCompress.Test/Xz/XZIndexAsyncTests.cs
Normal file
97
tests/SharpCompress.Test/Xz/XZIndexAsyncTests.cs
Normal file
@@ -0,0 +1,97 @@
|
||||
using System.IO;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Compressors.Xz;
|
||||
using Xunit;
|
||||
|
||||
namespace SharpCompress.Test.Xz;
|
||||
|
||||
public class XzIndexAsyncTests : XzTestsBase
|
||||
{
|
||||
protected override void RewindEmpty(Stream stream) => stream.Position = 12;
|
||||
|
||||
protected override void Rewind(Stream stream) => stream.Position = 356;
|
||||
|
||||
protected override void RewindIndexed(Stream stream) => stream.Position = 612;
|
||||
|
||||
[Fact]
|
||||
public void RecordsStreamStartOnInit()
|
||||
{
|
||||
using Stream badStream = new MemoryStream([1, 2, 3, 4, 5]);
|
||||
var br = new BinaryReader(badStream);
|
||||
var index = new XZIndex(br, false);
|
||||
Assert.Equal(0, index.StreamStartPosition);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ThrowsIfHasNoIndexMarkerAsync()
|
||||
{
|
||||
using Stream badStream = new MemoryStream([1, 2, 3, 4, 5]);
|
||||
var br = new BinaryReader(badStream);
|
||||
var index = new XZIndex(br, false);
|
||||
await Assert.ThrowsAsync<InvalidFormatException>(async () =>
|
||||
await index.ProcessAsync().ConfigureAwait(false)
|
||||
);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReadsNoRecordAsync()
|
||||
{
|
||||
var br = new BinaryReader(CompressedEmptyStream);
|
||||
var index = new XZIndex(br, false);
|
||||
await index.ProcessAsync().ConfigureAwait(false);
|
||||
Assert.Equal((ulong)0, index.NumberOfRecords);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReadsOneRecordAsync()
|
||||
{
|
||||
var br = new BinaryReader(CompressedStream);
|
||||
var index = new XZIndex(br, false);
|
||||
await index.ProcessAsync().ConfigureAwait(false);
|
||||
Assert.Equal((ulong)1, index.NumberOfRecords);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReadsMultipleRecordsAsync()
|
||||
{
|
||||
var br = new BinaryReader(CompressedIndexedStream);
|
||||
var index = new XZIndex(br, false);
|
||||
await index.ProcessAsync().ConfigureAwait(false);
|
||||
Assert.Equal((ulong)2, index.NumberOfRecords);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReadsFirstRecordAsync()
|
||||
{
|
||||
var br = new BinaryReader(CompressedStream);
|
||||
var index = new XZIndex(br, false);
|
||||
await index.ProcessAsync().ConfigureAwait(false);
|
||||
Assert.Equal((ulong)OriginalBytes.Length, index.Records[0].UncompressedSize);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SkipsPaddingAsync()
|
||||
{
|
||||
// Index with 3-byte padding.
|
||||
using Stream badStream = new MemoryStream([
|
||||
0x00,
|
||||
0x01,
|
||||
0x10,
|
||||
0x80,
|
||||
0x01,
|
||||
0x00,
|
||||
0x00,
|
||||
0x00,
|
||||
0xB1,
|
||||
0x01,
|
||||
0xD9,
|
||||
0xC9,
|
||||
0xFF,
|
||||
]);
|
||||
var br = new BinaryReader(badStream);
|
||||
var index = new XZIndex(br, false);
|
||||
await index.ProcessAsync().ConfigureAwait(false);
|
||||
Assert.Equal(0L, badStream.Position % 4L);
|
||||
}
|
||||
}
|
||||
@@ -71,9 +71,21 @@ public class XzIndexTests : XzTestsBase
|
||||
public void SkipsPadding()
|
||||
{
|
||||
// Index with 3-byte padding.
|
||||
using Stream badStream = new MemoryStream(
|
||||
[0x00, 0x01, 0x10, 0x80, 0x01, 0x00, 0x00, 0x00, 0xB1, 0x01, 0xD9, 0xC9, 0xFF]
|
||||
);
|
||||
using Stream badStream = new MemoryStream([
|
||||
0x00,
|
||||
0x01,
|
||||
0x10,
|
||||
0x80,
|
||||
0x01,
|
||||
0x00,
|
||||
0x00,
|
||||
0x00,
|
||||
0xB1,
|
||||
0x01,
|
||||
0xD9,
|
||||
0xC9,
|
||||
0xFF,
|
||||
]);
|
||||
var br = new BinaryReader(badStream);
|
||||
var index = new XZIndex(br, false);
|
||||
index.Process();
|
||||
|
||||
36
tests/SharpCompress.Test/Xz/XZStreamAsyncTests.cs
Normal file
36
tests/SharpCompress.Test/Xz/XZStreamAsyncTests.cs
Normal file
@@ -0,0 +1,36 @@
|
||||
using System.IO;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Compressors.Xz;
|
||||
using Xunit;
|
||||
|
||||
namespace SharpCompress.Test.Xz;
|
||||
|
||||
public class XzStreamAsyncTests : XzTestsBase
|
||||
{
|
||||
[Fact]
|
||||
public async Task CanReadEmptyStreamAsync()
|
||||
{
|
||||
var xz = new XZStream(CompressedEmptyStream);
|
||||
using var sr = new StreamReader(xz);
|
||||
var uncompressed = await sr.ReadToEndAsync().ConfigureAwait(false);
|
||||
Assert.Equal(OriginalEmpty, uncompressed);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CanReadStreamAsync()
|
||||
{
|
||||
var xz = new XZStream(CompressedStream);
|
||||
using var sr = new StreamReader(xz);
|
||||
var uncompressed = await sr.ReadToEndAsync().ConfigureAwait(false);
|
||||
Assert.Equal(Original, uncompressed);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CanReadIndexedStreamAsync()
|
||||
{
|
||||
var xz = new XZStream(CompressedIndexedStream);
|
||||
using var sr = new StreamReader(xz);
|
||||
var uncompressed = await sr.ReadToEndAsync().ConfigureAwait(false);
|
||||
Assert.Equal(OriginalIndexed, uncompressed);
|
||||
}
|
||||
}
|
||||
441
tests/SharpCompress.Test/Zip/Zip64VersionConsistencyTests.cs
Normal file
441
tests/SharpCompress.Test/Zip/Zip64VersionConsistencyTests.cs
Normal file
@@ -0,0 +1,441 @@
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.IO;
|
||||
using SharpCompress.Archives;
|
||||
using SharpCompress.Archives.Zip;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Writers;
|
||||
using SharpCompress.Writers.Zip;
|
||||
using Xunit;
|
||||
|
||||
namespace SharpCompress.Test.Zip;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for verifying version consistency between Local File Header (LFH)
|
||||
/// and Central Directory File Header (CDFH) when using Zip64.
|
||||
/// </summary>
|
||||
public class Zip64VersionConsistencyTests : WriterTests
|
||||
{
|
||||
public Zip64VersionConsistencyTests()
|
||||
: base(ArchiveType.Zip) { }
|
||||
|
||||
[Fact]
|
||||
public void Zip64_Small_File_With_UseZip64_Should_Have_Matching_Versions()
|
||||
{
|
||||
// Create a zip with UseZip64=true but with a small file
|
||||
var filename = Path.Combine(SCRATCH2_FILES_PATH, "zip64_version_test.zip");
|
||||
|
||||
if (File.Exists(filename))
|
||||
{
|
||||
File.Delete(filename);
|
||||
}
|
||||
|
||||
// Create archive with UseZip64=true
|
||||
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.Deflate)
|
||||
{
|
||||
LeaveStreamOpen = false,
|
||||
UseZip64 = true,
|
||||
};
|
||||
|
||||
ZipArchive zipArchive = ZipArchive.Create();
|
||||
zipArchive.AddEntry("empty", new MemoryStream());
|
||||
zipArchive.SaveTo(filename, writerOptions);
|
||||
|
||||
// Now read the raw bytes to verify version consistency
|
||||
using var fs = File.OpenRead(filename);
|
||||
using var br = new BinaryReader(fs);
|
||||
|
||||
// Read Local File Header
|
||||
var lfhSignature = br.ReadUInt32();
|
||||
Assert.Equal(0x04034b50u, lfhSignature); // Local file header signature
|
||||
|
||||
var lfhVersion = br.ReadUInt16();
|
||||
|
||||
// Skip to Central Directory
|
||||
// Find Central Directory by searching from the end
|
||||
fs.Seek(-22, SeekOrigin.End); // Min EOCD size
|
||||
var eocdSignature = br.ReadUInt32();
|
||||
|
||||
if (eocdSignature != 0x06054b50u)
|
||||
{
|
||||
// Might have Zip64 EOCD, search backwards
|
||||
fs.Seek(-100, SeekOrigin.End);
|
||||
var buffer = new byte[100];
|
||||
fs.Read(buffer, 0, 100);
|
||||
|
||||
// Find EOCD signature
|
||||
for (int i = buffer.Length - 4; i >= 0; i--)
|
||||
{
|
||||
if (BinaryPrimitives.ReadUInt32LittleEndian(buffer.AsSpan(i)) == 0x06054b50u)
|
||||
{
|
||||
fs.Seek(-100 + i, SeekOrigin.End);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Read EOCD
|
||||
fs.Seek(-22, SeekOrigin.End);
|
||||
br.ReadUInt32(); // EOCD signature
|
||||
br.ReadUInt16(); // disk number
|
||||
br.ReadUInt16(); // disk with central dir
|
||||
br.ReadUInt16(); // entries on this disk
|
||||
br.ReadUInt16(); // total entries
|
||||
br.ReadUInt32(); // central directory size (unused)
|
||||
var cdOffset = br.ReadUInt32();
|
||||
|
||||
// If Zip64, need to read from Zip64 EOCD
|
||||
if (cdOffset == 0xFFFFFFFF)
|
||||
{
|
||||
// Find Zip64 EOCD Locator
|
||||
fs.Seek(-22 - 20, SeekOrigin.End);
|
||||
var z64eocdlSig = br.ReadUInt32();
|
||||
if (z64eocdlSig == 0x07064b50u)
|
||||
{
|
||||
br.ReadUInt32(); // disk number
|
||||
var z64eocdOffset = br.ReadUInt64();
|
||||
br.ReadUInt32(); // total disks
|
||||
|
||||
// Read Zip64 EOCD
|
||||
fs.Seek((long)z64eocdOffset, SeekOrigin.Begin);
|
||||
br.ReadUInt32(); // signature
|
||||
br.ReadUInt64(); // size of EOCD64
|
||||
br.ReadUInt16(); // version made by
|
||||
br.ReadUInt16(); // version needed
|
||||
br.ReadUInt32(); // disk number
|
||||
br.ReadUInt32(); // disk with CD
|
||||
br.ReadUInt64(); // entries on disk
|
||||
br.ReadUInt64(); // total entries
|
||||
br.ReadUInt64(); // CD size
|
||||
cdOffset = (uint)br.ReadUInt64(); // CD offset
|
||||
}
|
||||
}
|
||||
|
||||
// Read Central Directory Header
|
||||
fs.Seek(cdOffset, SeekOrigin.Begin);
|
||||
var cdhSignature = br.ReadUInt32();
|
||||
Assert.Equal(0x02014b50u, cdhSignature); // Central directory header signature
|
||||
|
||||
br.ReadUInt16(); // version made by
|
||||
var cdhVersionNeeded = br.ReadUInt16();
|
||||
|
||||
// The versions should match when UseZip64 is true
|
||||
Assert.Equal(lfhVersion, cdhVersionNeeded);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Zip64_Small_File_Without_UseZip64_Should_Have_Version_20()
|
||||
{
|
||||
// Create a zip without UseZip64
|
||||
var filename = Path.Combine(SCRATCH2_FILES_PATH, "no_zip64_version_test.zip");
|
||||
|
||||
if (File.Exists(filename))
|
||||
{
|
||||
File.Delete(filename);
|
||||
}
|
||||
|
||||
// Create archive without UseZip64
|
||||
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.Deflate)
|
||||
{
|
||||
LeaveStreamOpen = false,
|
||||
UseZip64 = false,
|
||||
};
|
||||
|
||||
ZipArchive zipArchive = ZipArchive.Create();
|
||||
zipArchive.AddEntry("empty", new MemoryStream());
|
||||
zipArchive.SaveTo(filename, writerOptions);
|
||||
|
||||
// Read the raw bytes
|
||||
using var fs = File.OpenRead(filename);
|
||||
using var br = new BinaryReader(fs);
|
||||
|
||||
// Read Local File Header version
|
||||
var lfhSignature = br.ReadUInt32();
|
||||
Assert.Equal(0x04034b50u, lfhSignature);
|
||||
var lfhVersion = br.ReadUInt16();
|
||||
|
||||
// Read Central Directory Header version
|
||||
fs.Seek(-22, SeekOrigin.End);
|
||||
br.ReadUInt32(); // EOCD signature
|
||||
br.ReadUInt16(); // disk number
|
||||
br.ReadUInt16(); // disk with central dir
|
||||
br.ReadUInt16(); // entries on this disk
|
||||
br.ReadUInt16(); // total entries
|
||||
br.ReadUInt32(); // CD size
|
||||
var cdOffset = br.ReadUInt32();
|
||||
|
||||
fs.Seek(cdOffset, SeekOrigin.Begin);
|
||||
var cdhSignature = br.ReadUInt32();
|
||||
Assert.Equal(0x02014b50u, cdhSignature);
|
||||
br.ReadUInt16(); // version made by
|
||||
var cdhVersionNeeded = br.ReadUInt16();
|
||||
|
||||
// Both should be version 20 (or less)
|
||||
Assert.True(lfhVersion <= 20);
|
||||
Assert.Equal(lfhVersion, cdhVersionNeeded);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void LZMA_Compression_Should_Use_Version_63()
|
||||
{
|
||||
// Create a zip with LZMA compression
|
||||
var filename = Path.Combine(SCRATCH2_FILES_PATH, "lzma_version_test.zip");
|
||||
|
||||
if (File.Exists(filename))
|
||||
{
|
||||
File.Delete(filename);
|
||||
}
|
||||
|
||||
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.LZMA)
|
||||
{
|
||||
LeaveStreamOpen = false,
|
||||
UseZip64 = false,
|
||||
};
|
||||
|
||||
ZipArchive zipArchive = ZipArchive.Create();
|
||||
var data = new byte[100];
|
||||
new Random(42).NextBytes(data);
|
||||
zipArchive.AddEntry("test.bin", new MemoryStream(data));
|
||||
zipArchive.SaveTo(filename, writerOptions);
|
||||
|
||||
// Read the raw bytes
|
||||
using var fs = File.OpenRead(filename);
|
||||
using var br = new BinaryReader(fs);
|
||||
|
||||
// Read Local File Header version
|
||||
var lfhSignature = br.ReadUInt32();
|
||||
Assert.Equal(0x04034b50u, lfhSignature);
|
||||
var lfhVersion = br.ReadUInt16();
|
||||
|
||||
// Read Central Directory Header version
|
||||
fs.Seek(-22, SeekOrigin.End);
|
||||
br.ReadUInt32(); // EOCD signature
|
||||
br.ReadUInt16(); // disk number
|
||||
br.ReadUInt16(); // disk with central dir
|
||||
br.ReadUInt16(); // entries on this disk
|
||||
br.ReadUInt16(); // total entries
|
||||
br.ReadUInt32(); // CD size
|
||||
var cdOffset = br.ReadUInt32();
|
||||
|
||||
fs.Seek(cdOffset, SeekOrigin.Begin);
|
||||
var cdhSignature = br.ReadUInt32();
|
||||
Assert.Equal(0x02014b50u, cdhSignature);
|
||||
br.ReadUInt16(); // version made by
|
||||
var cdhVersionNeeded = br.ReadUInt16();
|
||||
|
||||
// Both should be version 63 for LZMA
|
||||
Assert.Equal(63, lfhVersion);
|
||||
Assert.Equal(lfhVersion, cdhVersionNeeded);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void PPMd_Compression_Should_Use_Version_63()
|
||||
{
|
||||
// Create a zip with PPMd compression
|
||||
var filename = Path.Combine(SCRATCH2_FILES_PATH, "ppmd_version_test.zip");
|
||||
|
||||
if (File.Exists(filename))
|
||||
{
|
||||
File.Delete(filename);
|
||||
}
|
||||
|
||||
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.PPMd)
|
||||
{
|
||||
LeaveStreamOpen = false,
|
||||
UseZip64 = false,
|
||||
};
|
||||
|
||||
ZipArchive zipArchive = ZipArchive.Create();
|
||||
var data = new byte[100];
|
||||
new Random(42).NextBytes(data);
|
||||
zipArchive.AddEntry("test.bin", new MemoryStream(data));
|
||||
zipArchive.SaveTo(filename, writerOptions);
|
||||
|
||||
// Read the raw bytes
|
||||
using var fs = File.OpenRead(filename);
|
||||
using var br = new BinaryReader(fs);
|
||||
|
||||
// Read Local File Header version
|
||||
var lfhSignature = br.ReadUInt32();
|
||||
Assert.Equal(0x04034b50u, lfhSignature);
|
||||
var lfhVersion = br.ReadUInt16();
|
||||
|
||||
// Read Central Directory Header version
|
||||
fs.Seek(-22, SeekOrigin.End);
|
||||
br.ReadUInt32(); // EOCD signature
|
||||
br.ReadUInt16(); // disk number
|
||||
br.ReadUInt16(); // disk with central dir
|
||||
br.ReadUInt16(); // entries on this disk
|
||||
br.ReadUInt16(); // total entries
|
||||
br.ReadUInt32(); // CD size
|
||||
var cdOffset = br.ReadUInt32();
|
||||
|
||||
fs.Seek(cdOffset, SeekOrigin.Begin);
|
||||
var cdhSignature = br.ReadUInt32();
|
||||
Assert.Equal(0x02014b50u, cdhSignature);
|
||||
br.ReadUInt16(); // version made by
|
||||
var cdhVersionNeeded = br.ReadUInt16();
|
||||
|
||||
// Both should be version 63 for PPMd
|
||||
Assert.Equal(63, lfhVersion);
|
||||
Assert.Equal(lfhVersion, cdhVersionNeeded);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Zip64_Multiple_Small_Files_With_UseZip64_Should_Have_Matching_Versions()
|
||||
{
|
||||
// Create a zip with UseZip64=true but with multiple small files
|
||||
var filename = Path.Combine(SCRATCH2_FILES_PATH, "zip64_version_multiple_test.zip");
|
||||
|
||||
if (File.Exists(filename))
|
||||
{
|
||||
File.Delete(filename);
|
||||
}
|
||||
|
||||
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.Deflate)
|
||||
{
|
||||
LeaveStreamOpen = false,
|
||||
UseZip64 = true,
|
||||
};
|
||||
|
||||
ZipArchive zipArchive = ZipArchive.Create();
|
||||
for (int i = 0; i < 5; i++)
|
||||
{
|
||||
var data = new byte[100];
|
||||
new Random(i).NextBytes(data);
|
||||
zipArchive.AddEntry($"file{i}.bin", new MemoryStream(data));
|
||||
}
|
||||
zipArchive.SaveTo(filename, writerOptions);
|
||||
|
||||
// Verify that all entries have matching versions
|
||||
using var fs = File.OpenRead(filename);
|
||||
using var br = new BinaryReader(fs);
|
||||
|
||||
// Read all LFH versions
|
||||
var lfhVersions = new System.Collections.Generic.List<ushort>();
|
||||
while (true)
|
||||
{
|
||||
var sig = br.ReadUInt32();
|
||||
if (sig == 0x04034b50u) // LFH signature
|
||||
{
|
||||
var version = br.ReadUInt16();
|
||||
lfhVersions.Add(version);
|
||||
|
||||
// Skip rest of LFH
|
||||
br.ReadUInt16(); // flags
|
||||
br.ReadUInt16(); // compression
|
||||
br.ReadUInt32(); // mod time
|
||||
br.ReadUInt32(); // crc
|
||||
br.ReadUInt32(); // compressed size
|
||||
br.ReadUInt32(); // uncompressed size
|
||||
var fnLen = br.ReadUInt16();
|
||||
var extraLen = br.ReadUInt16();
|
||||
fs.Seek(fnLen + extraLen, SeekOrigin.Current);
|
||||
|
||||
// Skip compressed data by reading compressed size from extra field if zip64
|
||||
// For simplicity in this test, we'll just find the next signature
|
||||
var found = false;
|
||||
|
||||
while (fs.Position < fs.Length - 4)
|
||||
{
|
||||
var b = br.ReadByte();
|
||||
if (b == 0x50)
|
||||
{
|
||||
var nextBytes = br.ReadBytes(3);
|
||||
if (
|
||||
(nextBytes[0] == 0x4b && nextBytes[1] == 0x03 && nextBytes[2] == 0x04)
|
||||
|| // LFH
|
||||
(nextBytes[0] == 0x4b && nextBytes[1] == 0x01 && nextBytes[2] == 0x02)
|
||||
) // CDH
|
||||
{
|
||||
fs.Seek(-4, SeekOrigin.Current);
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!found)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
else if (sig == 0x02014b50u) // CDH signature
|
||||
{
|
||||
break; // Reached central directory
|
||||
}
|
||||
else
|
||||
{
|
||||
break; // Unknown signature
|
||||
}
|
||||
}
|
||||
|
||||
// Find Central Directory
|
||||
fs.Seek(-22, SeekOrigin.End);
|
||||
br.ReadUInt32(); // EOCD signature
|
||||
br.ReadUInt16(); // disk number
|
||||
br.ReadUInt16(); // disk with central dir
|
||||
br.ReadUInt16(); // entries on this disk
|
||||
var totalEntries = br.ReadUInt16();
|
||||
br.ReadUInt32(); // CD size
|
||||
var cdOffset = br.ReadUInt32();
|
||||
|
||||
// Check if we need Zip64 EOCD
|
||||
if (cdOffset == 0xFFFFFFFF)
|
||||
{
|
||||
fs.Seek(-22 - 20, SeekOrigin.End);
|
||||
var z64eocdlSig = br.ReadUInt32();
|
||||
if (z64eocdlSig == 0x07064b50u)
|
||||
{
|
||||
br.ReadUInt32(); // disk number
|
||||
var z64eocdOffset = br.ReadUInt64();
|
||||
fs.Seek((long)z64eocdOffset, SeekOrigin.Begin);
|
||||
br.ReadUInt32(); // signature
|
||||
br.ReadUInt64(); // size
|
||||
br.ReadUInt16(); // version made by
|
||||
br.ReadUInt16(); // version needed
|
||||
br.ReadUInt32(); // disk number
|
||||
br.ReadUInt32(); // disk with CD
|
||||
br.ReadUInt64(); // entries on disk
|
||||
totalEntries = (ushort)br.ReadUInt64(); // total entries
|
||||
br.ReadUInt64(); // CD size
|
||||
cdOffset = (uint)br.ReadUInt64(); // CD offset
|
||||
}
|
||||
}
|
||||
|
||||
// Read CDH versions
|
||||
fs.Seek(cdOffset, SeekOrigin.Begin);
|
||||
var cdhVersions = new System.Collections.Generic.List<ushort>();
|
||||
for (int i = 0; i < totalEntries; i++)
|
||||
{
|
||||
var sig = br.ReadUInt32();
|
||||
Assert.Equal(0x02014b50u, sig);
|
||||
br.ReadUInt16(); // version made by
|
||||
var version = br.ReadUInt16();
|
||||
cdhVersions.Add(version);
|
||||
|
||||
// Skip rest of CDH
|
||||
br.ReadUInt16(); // flags
|
||||
br.ReadUInt16(); // compression
|
||||
br.ReadUInt32(); // mod time
|
||||
br.ReadUInt32(); // crc
|
||||
br.ReadUInt32(); // compressed size
|
||||
br.ReadUInt32(); // uncompressed size
|
||||
var fnLen = br.ReadUInt16();
|
||||
var extraLen = br.ReadUInt16();
|
||||
var commentLen = br.ReadUInt16();
|
||||
br.ReadUInt16(); // disk number start
|
||||
br.ReadUInt16(); // internal attributes
|
||||
br.ReadUInt32(); // external attributes
|
||||
br.ReadUInt32(); // LFH offset
|
||||
fs.Seek(fnLen + extraLen + commentLen, SeekOrigin.Current);
|
||||
}
|
||||
|
||||
// Verify all versions match
|
||||
Assert.Equal(lfhVersions.Count, cdhVersions.Count);
|
||||
for (int i = 0; i < lfhVersions.Count; i++)
|
||||
{
|
||||
Assert.Equal(lfhVersions[i], cdhVersions[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,6 +2,7 @@ using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Archives;
|
||||
using SharpCompress.Archives.Zip;
|
||||
using SharpCompress.Common;
|
||||
@@ -89,31 +90,13 @@ public class ZipArchiveTests : ArchiveTests
|
||||
public void Zip_Deflate_ArchiveFileRead() => ArchiveFileRead("Zip.deflate.zip");
|
||||
|
||||
[Fact]
|
||||
public void Zip_Deflate_ArchiveExtractToDirectory() =>
|
||||
ArchiveExtractToDirectory("Zip.deflate.zip");
|
||||
public Task Zip_Deflate_ArchiveFileRead_Multithreaded() =>
|
||||
ArchiveFileRead_Multithreaded("Zip.deflate.zip");
|
||||
|
||||
[Fact]
|
||||
public void Zip_Deflate_ArchiveExtractToDirectory_Overwrite()
|
||||
{
|
||||
// First extraction
|
||||
public void Zip_Deflate_ArchiveExtractToDirectory() =>
|
||||
ArchiveExtractToDirectory("Zip.deflate.zip");
|
||||
|
||||
// Corrupt one of the extracted files by making it longer
|
||||
var testFile = Path.Combine(SCRATCH_FILES_PATH, "Tar.tar");
|
||||
if (File.Exists(testFile))
|
||||
{
|
||||
var originalSize = new FileInfo(testFile).Length;
|
||||
File.WriteAllText(testFile, new string('X', (int)originalSize + 1000));
|
||||
Assert.True(new FileInfo(testFile).Length > originalSize);
|
||||
}
|
||||
|
||||
// Second extraction should overwrite and produce correct file sizes
|
||||
ArchiveExtractToDirectory("Zip.deflate.zip");
|
||||
|
||||
// Verify files are correct size (not corrupted with leftover data)
|
||||
VerifyFiles();
|
||||
}
|
||||
|
||||
//will detect and load other files
|
||||
[Fact]
|
||||
public void Zip_Deflate_Multi_ArchiveFirstFileRead() =>
|
||||
|
||||
@@ -4,9 +4,9 @@
|
||||
".NETFramework,Version=v4.8": {
|
||||
"AwesomeAssertions": {
|
||||
"type": "Direct",
|
||||
"requested": "[9.2.1, )",
|
||||
"resolved": "9.2.1",
|
||||
"contentHash": "lbwhyQNXxxEGx4oCbFqNfFy2DsywsvNhN6qoOjY4wwhMgI2L9+YrxjyF/M0io99yrvWV1Cjj12LP2QGcC43Uhw==",
|
||||
"requested": "[9.3.0, )",
|
||||
"resolved": "9.3.0",
|
||||
"contentHash": "8lGLYap2ec2gNLgjf2xKZaKLpQ7j36oJvrYzBVVpNAumqnxRdevqqhEF66qxE92f8y2+zsbQ061DeHG61ZhzaQ==",
|
||||
"dependencies": {
|
||||
"System.Threading.Tasks.Extensions": "4.5.4"
|
||||
}
|
||||
@@ -199,9 +199,9 @@
|
||||
"net8.0": {
|
||||
"AwesomeAssertions": {
|
||||
"type": "Direct",
|
||||
"requested": "[9.2.1, )",
|
||||
"resolved": "9.2.1",
|
||||
"contentHash": "lbwhyQNXxxEGx4oCbFqNfFy2DsywsvNhN6qoOjY4wwhMgI2L9+YrxjyF/M0io99yrvWV1Cjj12LP2QGcC43Uhw=="
|
||||
"requested": "[9.3.0, )",
|
||||
"resolved": "9.3.0",
|
||||
"contentHash": "8lGLYap2ec2gNLgjf2xKZaKLpQ7j36oJvrYzBVVpNAumqnxRdevqqhEF66qxE92f8y2+zsbQ061DeHG61ZhzaQ=="
|
||||
},
|
||||
"Microsoft.NET.Test.Sdk": {
|
||||
"type": "Direct",
|
||||
|
||||
BIN
tests/TestArchives/Archives/Arc.crunched.largefile.arc
Normal file
BIN
tests/TestArchives/Archives/Arc.crunched.largefile.arc
Normal file
Binary file not shown.
BIN
tests/TestArchives/Archives/Arc.squashed.largefile.arc
Normal file
BIN
tests/TestArchives/Archives/Arc.squashed.largefile.arc
Normal file
Binary file not shown.
BIN
tests/TestArchives/Archives/Arc.squeezed.largefile.arc
Normal file
BIN
tests/TestArchives/Archives/Arc.squeezed.largefile.arc
Normal file
Binary file not shown.
BIN
tests/TestArchives/Archives/Arc.uncompressed.largefile.arc
Normal file
BIN
tests/TestArchives/Archives/Arc.uncompressed.largefile.arc
Normal file
Binary file not shown.
BIN
tests/TestArchives/Archives/Arj.method1.arj
Normal file
BIN
tests/TestArchives/Archives/Arj.method1.arj
Normal file
Binary file not shown.
BIN
tests/TestArchives/Archives/Arj.method1.largefile.arj
Normal file
BIN
tests/TestArchives/Archives/Arj.method1.largefile.arj
Normal file
Binary file not shown.
BIN
tests/TestArchives/Archives/Arj.method2.arj
Normal file
BIN
tests/TestArchives/Archives/Arj.method2.arj
Normal file
Binary file not shown.
BIN
tests/TestArchives/Archives/Arj.method2.largefile.arj
Normal file
BIN
tests/TestArchives/Archives/Arj.method2.largefile.arj
Normal file
Binary file not shown.
BIN
tests/TestArchives/Archives/Arj.method3.arj
Normal file
BIN
tests/TestArchives/Archives/Arj.method3.arj
Normal file
Binary file not shown.
BIN
tests/TestArchives/Archives/Arj.method3.largefile.arj
Normal file
BIN
tests/TestArchives/Archives/Arj.method3.largefile.arj
Normal file
Binary file not shown.
BIN
tests/TestArchives/Archives/Arj.method4.largefile.arj
Normal file
BIN
tests/TestArchives/Archives/Arj.method4.largefile.arj
Normal file
Binary file not shown.
BIN
tests/TestArchives/Archives/Arj.store.largefile.arj
Normal file
BIN
tests/TestArchives/Archives/Arj.store.largefile.arj
Normal file
Binary file not shown.
3609
tests/TestArchives/MiscTest/alice29.txt
Normal file
3609
tests/TestArchives/MiscTest/alice29.txt
Normal file
File diff suppressed because it is too large
Load Diff
10059
tests/TestArchives/MiscTest/news.txt
Normal file
10059
tests/TestArchives/MiscTest/news.txt
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user