Compare commits

..

4 Commits

Author SHA1 Message Date
Adam Hathcock
73e9b61fcc try a new agent config 2025-10-25 16:17:42 +01:00
Adam Hathcock
699bc5f34b chore: add Copilot coding agent config and CI workflow 2025-10-25 16:05:09 +01:00
Adam Hathcock
9eed8e842c Merge pull request #972 from TwanVanDongen/master
Handle vendor-specific and malformed ZIP extra fields safely
2025-10-25 13:53:10 +01:00
Twan van Dongen
6d652a12ee And again forgot to apply CSharpierAdds bounds checks to prevent exceptions when extra fields are truncated or non-standard (e.g., 0x4341 "AC"/ARC0). Stops parsing gracefully, allowing other fields to be processed. 2025-10-24 17:18:37 +02:00
48 changed files with 1952 additions and 2789 deletions

13
.github/COPILOT_AGENT_README.md vendored Normal file
View File

@@ -0,0 +1,13 @@
# Copilot Coding Agent Configuration
This repository includes a minimal opt-in configuration and CI workflow to allow the GitHub Copilot coding agent to open and validate PRs.
- .copilot-agent.yml: opt-in config for automated agents
- .github/workflows/dotnetcore.yml: CI runs on PRs touching the solution, source, or tests to validate changes
- AGENTS.yml: general information for this project
Maintainers can adjust the allowed paths or disable the agent by editing or removing .copilot-agent.yml.
Notes:
- Do not change any other files in the repository.
- If build/test paths are different, update the workflow accordingly; this workflow targets SharpCompress.sln and the SharpCompress.Tests test project.

7
.github/agents/copilot-agent.yml vendored Normal file
View File

@@ -0,0 +1,7 @@
enabled: true
agent:
name: copilot-coding-agent
allow:
- paths: ["src/**/*", "tests/**/*", "README.md", "AGENTS.md"]
actions: ["create", "modify"]
require_review_before_merge: true

View File

@@ -2,7 +2,6 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Factories;
using SharpCompress.IO;
@@ -117,14 +116,14 @@ public static class ArchiveFactory
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static async Task WriteToDirectoryAsync(
public static void WriteToDirectory(
string sourceArchive,
string destinationDirectory,
ExtractionOptions? options = null
)
{
using var archive = Open(sourceArchive);
await archive.WriteToDirectoryAsync(destinationDirectory, options);
archive.WriteToDirectory(destinationDirectory, options);
}
private static T FindFactory<T>(FileInfo finfo)

View File

@@ -1,6 +1,5 @@
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common.GZip;
namespace SharpCompress.Archives.GZip;
@@ -21,9 +20,6 @@ public class GZipArchiveEntry : GZipEntry, IArchiveEntry
return Parts.Single().GetCompressedStream().NotNull();
}
public virtual async Task<Stream> OpenEntryStreamAsync() =>
await Task.FromResult(OpenEntryStream());
#region IArchiveEntry Members
public IArchive Archive { get; }

View File

@@ -1,5 +1,4 @@
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Archives;
@@ -12,12 +11,6 @@ public interface IArchiveEntry : IEntry
/// </summary>
Stream OpenEntryStream();
/// <summary>
/// Opens the current entry as a stream that will decompress as it is read.
/// Read the entire stream or use SkipEntry on EntryStream.
/// </summary>
Task<Stream> OpenEntryStreamAsync();
/// <summary>
/// The archive can find all the parts of the archive needed to extract this entry.
/// </summary>

View File

@@ -1,5 +1,4 @@
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
@@ -7,7 +6,7 @@ namespace SharpCompress.Archives;
public static class IArchiveEntryExtensions
{
public static async Task WriteToAsync(this IArchiveEntry archiveEntry, Stream streamToWriteTo)
public static void WriteTo(this IArchiveEntry archiveEntry, Stream streamToWriteTo)
{
if (archiveEntry.IsDirectory)
{
@@ -22,11 +21,11 @@ public static class IArchiveEntryExtensions
archiveEntry.Size,
archiveEntry.CompressedSize
);
var entryStream = await archiveEntry.OpenEntryStreamAsync();
var entryStream = archiveEntry.OpenEntryStream();
using (entryStream)
{
using Stream s = new ListeningStream(streamListener, entryStream);
await s.CopyToAsync(streamToWriteTo);
s.CopyTo(streamToWriteTo);
}
streamListener.FireEntryExtractionEnd(archiveEntry);
}
@@ -34,34 +33,34 @@ public static class IArchiveEntryExtensions
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static async Task WriteEntryToDirectoryAsync(
public static void WriteToDirectory(
this IArchiveEntry entry,
string destinationDirectory,
ExtractionOptions? options = null
) =>
await ExtractionMethods.WriteEntryToDirectoryAsync(
ExtractionMethods.WriteEntryToDirectory(
entry,
destinationDirectory,
options,
entry.WriteToFileAsync
entry.WriteToFile
);
/// <summary>
/// Extract to specific file
/// </summary>
public static Task WriteToFileAsync(
public static void WriteToFile(
this IArchiveEntry entry,
string destinationFileName,
ExtractionOptions? options = null
) =>
ExtractionMethods.WriteEntryToFileAsync(
ExtractionMethods.WriteEntryToFile(
entry,
destinationFileName,
options,
async (x, fm) =>
(x, fm) =>
{
using var fs = File.Open(destinationFileName, fm);
await entry.WriteToAsync(fs);
entry.WriteTo(fs);
}
);
}

View File

@@ -1,8 +1,8 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
@@ -13,14 +13,14 @@ public static class IArchiveExtensions
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static async Task WriteToDirectoryAsync(
public static void WriteToDirectory(
this IArchive archive,
string destinationDirectory,
ExtractionOptions? options = null
)
{
using var reader = archive.ExtractAllEntries();
await reader.WriteAllToDirectoryAsync(destinationDirectory, options);
reader.WriteAllToDirectory(destinationDirectory, options);
}
/// <summary>
@@ -30,7 +30,7 @@ public static class IArchiveExtensions
/// <param name="destination">The folder to extract into.</param>
/// <param name="progressReport">Optional progress report callback.</param>
/// <param name="cancellationToken">Optional cancellation token.</param>
public static async Task ExtractToDirectoryAsync(
public static void ExtractToDirectory(
this IArchive archive,
string destination,
Action<double>? progressReport = null,
@@ -75,7 +75,7 @@ public static class IArchiveExtensions
// Write file
using var fs = File.OpenWrite(path);
await entry.WriteToAsync(fs);
entry.WriteTo(fs);
// Update progress
bytesRead += entry.Size;

View File

@@ -2,7 +2,6 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
@@ -67,23 +66,18 @@ public class RarArchiveEntry : RarEntry, IArchiveEntry
}
}
public Stream OpenEntryStream() =>
throw new NotSupportedException(
"Synchronous extraction is not supported. Use OpenEntryStreamAsync instead."
);
public async Task<Stream> OpenEntryStreamAsync()
public Stream OpenEntryStream()
{
if (IsRarV3)
{
return await RarStream.Create(
return new RarStream(
archive.UnpackV1.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
);
}
return await RarStream.Create(
return new RarStream(
archive.UnpackV2017.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)

View File

@@ -2,7 +2,6 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.SevenZip;
using SharpCompress.Compressors.LZMA.Utilites;
@@ -254,8 +253,8 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
}
}
protected override Task<EntryStream> GetEntryStreamAsync() =>
CreateEntryStreamAsync(
protected override EntryStream GetEntryStream() =>
CreateEntryStream(
new ReadOnlySubStream(
_currentStream.NotNull("currentStream is not null"),
_currentItem?.Size ?? 0

View File

@@ -1,5 +1,4 @@
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common.SevenZip;
namespace SharpCompress.Archives.SevenZip;
@@ -11,9 +10,6 @@ public class SevenZipArchiveEntry : SevenZipEntry, IArchiveEntry
public Stream OpenEntryStream() => FilePart.GetCompressedStream();
public virtual async Task<Stream> OpenEntryStreamAsync() =>
await Task.FromResult(OpenEntryStream());
public IArchive Archive { get; }
public bool IsComplete => true;

View File

@@ -1,6 +1,5 @@
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Tar;
@@ -13,9 +12,6 @@ public class TarArchiveEntry : TarEntry, IArchiveEntry
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();
public virtual async Task<Stream> OpenEntryStreamAsync() =>
await Task.FromResult(OpenEntryStream());
#region IArchiveEntry Members
public IArchive Archive { get; }

View File

@@ -1,6 +1,5 @@
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common.Zip;
namespace SharpCompress.Archives.Zip;
@@ -12,9 +11,6 @@ public class ZipArchiveEntry : ZipEntry, IArchiveEntry
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();
public virtual async Task<Stream> OpenEntryStreamAsync() =>
await Task.FromResult(OpenEntryStream());
#region IArchiveEntry Members
public IArchive Archive { get; }

View File

@@ -1,7 +1,6 @@
using System;
using System.IO;
using System.IO.Compression;
using System.Threading.Tasks;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -52,15 +51,6 @@ public class EntryStream : Stream, IStreamStack
_completed = true;
}
/// <summary>
/// When reading a stream from OpenEntryStream, the stream must be completed so use this to finish reading the entire entry.
/// </summary>
public async Task SkipEntryAsync()
{
await this.SkipAsync();
_completed = true;
}
protected override void Dispose(bool disposing)
{
if (!(_completed || _reader.Cancelled))

View File

@@ -1,6 +1,5 @@
using System;
using System.IO;
using System.Threading.Tasks;
namespace SharpCompress.Common;
@@ -9,11 +8,11 @@ internal static class ExtractionMethods
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static async Task WriteEntryToDirectoryAsync(
public static void WriteEntryToDirectory(
IEntry entry,
string destinationDirectory,
ExtractionOptions? options,
Func<string, ExtractionOptions?, Task> write
Action<string, ExtractionOptions?> write
)
{
string destinationFileName;
@@ -78,7 +77,7 @@ internal static class ExtractionMethods
"Entry is trying to write a file outside of the destination directory."
);
}
await write(destinationFileName, options);
write(destinationFileName, options);
}
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
{
@@ -86,11 +85,11 @@ internal static class ExtractionMethods
}
}
public static async Task WriteEntryToFileAsync(
public static void WriteEntryToFile(
IEntry entry,
string destinationFileName,
ExtractionOptions? options,
Func<string, FileMode, Task> openAndWrite
Action<string, FileMode> openAndWrite
)
{
if (entry.LinkTarget != null)
@@ -113,7 +112,7 @@ internal static class ExtractionMethods
fm = FileMode.CreateNew;
}
await openAndWrite(destinationFileName, fm);
openAndWrite(destinationFileName, fm);
entry.PreserveExtractionOptions(destinationFileName, options);
}
}

View File

@@ -91,8 +91,15 @@ internal abstract class ZipFileEntry : ZipHeader
protected void LoadExtra(byte[] extra)
{
for (var i = 0; i < extra.Length - 4; )
for (var i = 0; i < extra.Length; )
{
// Ensure we have at least a header (2-byte ID + 2-byte length)
if (i + 4 > extra.Length)
{
// Incomplete header — stop parsing extras
break;
}
var type = (ExtraDataType)BinaryPrimitives.ReadUInt16LittleEndian(extra.AsSpan(i));
if (!Enum.IsDefined(typeof(ExtraDataType), type))
{
@@ -106,7 +113,17 @@ internal abstract class ZipFileEntry : ZipHeader
if (length > extra.Length)
{
// bad extras block
return;
break; // allow processing optional other blocks
}
// Some ZIP files contain vendor-specific or malformed extra fields where the declared
// data length extends beyond the remaining buffer. This adjustment ensures that
// we only read data within bounds (i + 4 + length <= extra.Length)
// The example here is: 41 43 18 00 41 52 43 30 46 EB FF FF 51 29 03 C6 03 00 00 00 00 00 00 00 00
// No existing zip utility uses 0x4341 ('AC')
if (i + 4 + length > extra.Length)
{
// incomplete or corrupt field
break; // allow processing optional other blocks
}
var data = new byte[length];

View File

@@ -1,18 +1,12 @@
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Compressors.Rar;
internal interface IRarUnpack
{
#if NETSTANDARD2_0 || NETFRAMEWORK
void DoUnpack(FileHeader fileHeader, Stream readStream, Stream writeStream);
void DoUnpack();
#else
ValueTask DoUnpackAsync(FileHeader fileHeader, Stream readStream, Stream writeStream);
ValueTask DoUnpackAsync();
#endif
// eg u/i pause/resume button
bool Suspended { get; set; }

View File

@@ -1,7 +1,6 @@
using System;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
@@ -10,17 +9,6 @@ namespace SharpCompress.Compressors.Rar;
internal class RarBLAKE2spStream : RarStream, IStreamStack
{
public static async ValueTask<RarBLAKE2spStream> Create(
IRarUnpack unpack,
FileHeader fileHeader,
MultiVolumeReadOnlyStream readStream
)
{
var rs = new RarBLAKE2spStream(unpack, fileHeader, readStream);
await RarStream.Initialize(rs, unpack, fileHeader, readStream);
return rs;
}
#if DEBUG_STREAMS
long IStreamStack.InstanceId { get; set; }
#endif
@@ -115,7 +103,7 @@ internal class RarBLAKE2spStream : RarStream, IStreamStack
byte[] _hash = { };
protected RarBLAKE2spStream(
public RarBLAKE2spStream(
IRarUnpack unpack,
FileHeader fileHeader,
MultiVolumeReadOnlyStream readStream

View File

@@ -1,8 +1,8 @@
#nullable disable
using System;
using System.Buffers;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
@@ -36,47 +36,18 @@ internal class RarStream : Stream, IStreamStack
private bool fetch;
private byte[]? tmpBuffer = ArrayPool<byte>.Shared.Rent(65536);
private byte[] tmpBuffer = ArrayPool<byte>.Shared.Rent(65536);
private int tmpOffset;
private int tmpCount;
private byte[]? outBuffer;
private byte[] outBuffer;
private int outOffset;
private int outCount;
private int outTotal;
private bool isDisposed;
private long _position;
public static async ValueTask<RarStream> Create(
IRarUnpack unpack,
FileHeader fileHeader,
Stream readStream
)
{
var rs = new RarStream(unpack, fileHeader, readStream);
await Initialize(rs, unpack, fileHeader, readStream);
return rs;
}
internal static async ValueTask Initialize(
RarStream rs,
IRarUnpack unpack,
FileHeader fileHeader,
Stream readStream
)
{
rs.fetch = true;
#if !NETSTANDARD2_0 && !NETFRAMEWORK
await unpack.DoUnpackAsync(fileHeader, readStream, rs);
#else
unpack.DoUnpack(fileHeader, readStream, rs);
await Task.CompletedTask;
#endif
rs.fetch = false;
rs._position = 0;
}
protected RarStream(IRarUnpack unpack, FileHeader fileHeader, Stream readStream)
public RarStream(IRarUnpack unpack, FileHeader fileHeader, Stream readStream)
{
this.unpack = unpack;
this.fileHeader = fileHeader;
@@ -85,6 +56,11 @@ internal class RarStream : Stream, IStreamStack
#if DEBUG_STREAMS
this.DebugConstruct(typeof(RarStream));
#endif
fetch = true;
unpack.DoUnpack(fileHeader, readStream, this);
fetch = false;
_position = 0;
}
protected override void Dispose(bool disposing)
@@ -96,11 +72,8 @@ internal class RarStream : Stream, IStreamStack
#if DEBUG_STREAMS
this.DebugDispose(typeof(RarStream));
#endif
if (tmpBuffer != null)
{
ArrayPool<byte>.Shared.Return(this.tmpBuffer);
this.tmpBuffer = null;
}
ArrayPool<byte>.Shared.Return(this.tmpBuffer);
this.tmpBuffer = null;
}
isDisposed = true;
base.Dispose(disposing);
@@ -108,26 +81,6 @@ internal class RarStream : Stream, IStreamStack
}
}
#if !NETSTANDARD2_0 && !NETFRAMEWORK
public override async ValueTask DisposeAsync()
{
if (!isDisposed)
{
#if DEBUG_STREAMS
this.DebugDispose(typeof(RarStream));
#endif
if (tmpBuffer != null)
{
ArrayPool<byte>.Shared.Return(this.tmpBuffer);
this.tmpBuffer = null;
}
isDisposed = true;
await readStream.DisposeAsync().ConfigureAwait(false);
}
await base.DisposeAsync().ConfigureAwait(false);
}
#endif
public override bool CanRead => true;
public override bool CanSeek => false;
@@ -136,8 +89,6 @@ internal class RarStream : Stream, IStreamStack
public override void Flush() { }
public override Task FlushAsync(CancellationToken cancellationToken) => Task.CompletedTask;
public override long Length => fileHeader.UncompressedSize;
//commented out code always returned the length of the file
@@ -147,96 +98,8 @@ internal class RarStream : Stream, IStreamStack
set => throw new NotSupportedException();
}
#if !NETSTANDARD2_0 && !NETFRAMEWORK
public override Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
var bytesRead = Read(buffer, offset, count);
return Task.FromResult(bytesRead);
}
catch (Exception ex)
{
return Task.FromException<int>(ex);
}
}
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
outTotal = 0;
var count = buffer.Length;
var offset = 0;
if (tmpCount > 0)
{
var toCopy = tmpCount < count ? tmpCount : count;
tmpBuffer.AsSpan(tmpOffset, toCopy).CopyTo(buffer.Span.Slice(offset, toCopy));
tmpOffset += toCopy;
tmpCount -= toCopy;
offset += toCopy;
count -= toCopy;
outTotal += toCopy;
}
if (count > 0 && unpack.DestSize > 0)
{
// Create a temporary array for the unpack operation
var tempArray = ArrayPool<byte>.Shared.Rent(count);
try
{
outBuffer = tempArray;
outOffset = 0;
outCount = count;
fetch = true;
await unpack.DoUnpackAsync();
fetch = false;
// Copy the unpacked data to the memory buffer
var unpacked = outTotal - (tmpCount > 0 ? offset : 0);
if (unpacked > 0)
{
tempArray.AsSpan(0, unpacked).CopyTo(buffer.Span.Slice(offset, unpacked));
}
}
finally
{
ArrayPool<byte>.Shared.Return(tempArray);
outBuffer = null;
}
}
_position += outTotal;
if (count > 0 && outTotal == 0 && _position != Length)
{
// sanity check, eg if we try to decompress a redir entry
throw new InvalidOperationException(
$"unpacked file size does not match header: expected {Length} found {_position}"
);
}
return outTotal;
}
public override int Read(byte[] buffer, int offset, int count) =>
throw new NotSupportedException("Use ReadAsync or ReadAsync(Memory<byte>) instead.");
#else
public override int Read(byte[] buffer, int offset, int count)
{
if (tmpBuffer == null)
{
throw new ObjectDisposedException(nameof(RarStream));
}
outTotal = 0;
if (tmpCount > 0)
{
@@ -267,7 +130,6 @@ internal class RarStream : Stream, IStreamStack
}
return outTotal;
}
#endif
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
@@ -275,14 +137,6 @@ internal class RarStream : Stream, IStreamStack
public override void Write(byte[] buffer, int offset, int count)
{
if (tmpBuffer == null)
{
throw new ObjectDisposedException(nameof(RarStream));
}
if (outBuffer == null)
{
throw new ObjectDisposedException(nameof(RarStream));
}
if (!fetch)
{
throw new NotSupportedException();
@@ -311,81 +165,8 @@ internal class RarStream : Stream, IStreamStack
}
}
public override Task WriteAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
Write(buffer, offset, count);
return Task.CompletedTask;
}
catch (Exception ex)
{
return Task.FromException(ex);
}
}
#if !NETSTANDARD2_0 && !NETFRAMEWORK
public override ValueTask WriteAsync(
ReadOnlyMemory<byte> buffer,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
if (!fetch)
{
throw new NotSupportedException();
}
var count = buffer.Length;
var offset = 0;
if (outCount > 0)
{
var toCopy = outCount < count ? outCount : count;
buffer.Span.Slice(offset, toCopy).CopyTo(outBuffer.AsSpan(outOffset, toCopy));
outOffset += toCopy;
outCount -= toCopy;
offset += toCopy;
count -= toCopy;
outTotal += toCopy;
}
if (count > 0)
{
EnsureBufferCapacity(count);
buffer.Span.Slice(offset, count).CopyTo(tmpBuffer.AsSpan(tmpCount, count));
tmpCount += count;
tmpOffset = 0;
unpack.Suspended = true;
}
else
{
unpack.Suspended = false;
}
return ValueTask.CompletedTask;
}
catch (Exception ex)
{
return new ValueTask(Task.FromException(ex));
}
}
#endif
private void EnsureBufferCapacity(int count)
{
if (tmpBuffer == null)
{
throw new ObjectDisposedException(nameof(RarStream));
}
if (this.tmpBuffer.Length < this.tmpCount + count)
{
var newLength =

View File

@@ -4,7 +4,6 @@ using System;
using System.Buffers;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Compressors.PPMd.H;
@@ -156,20 +155,6 @@ internal sealed partial class Unpack : BitInput, IRarUnpack
DoUnpack();
}
#if !NETSTANDARD2_0 && !NETFRAMEWORK
public ValueTask DoUnpackAsync()
{
DoUnpack();
return ValueTask.CompletedTask;
}
public ValueTask DoUnpackAsync(FileHeader fileHeader, Stream readStream, Stream writeStream)
{
DoUnpack(fileHeader, readStream, writeStream);
return ValueTask.CompletedTask;
}
#endif
public void DoUnpack()
{
if (fileHeader.CompressionMethod == 0)

View File

@@ -1,4 +1,3 @@
#if NETSTANDARD2_0 || NETFRAMEWORK
using System;
using System.IO;
using SharpCompress.Common.Rar.Headers;
@@ -107,4 +106,3 @@ internal partial class Unpack : IRarUnpack
public static byte[] EnsureCapacity(byte[] array, int length) =>
array.Length < length ? new byte[length] : array;
}
#endif

View File

@@ -1,4 +1,3 @@
#if NETSTANDARD2_0 || NETFRAMEWORK
#nullable disable
using System;
@@ -30,12 +29,12 @@ internal sealed partial class Unpack : BitInput
Suspended = false;
UnpAllBuf = false;
UnpSomeRead = false;
// #if RarV2017_RAR_SMP
// MaxUserThreads = 1;
// UnpThreadPool = CreateThreadPool();
// ReadBufMT = null;
// UnpThreadData = null;
// #endif
/*#if RarV2017_RAR_SMP
MaxUserThreads = 1;
UnpThreadPool = CreateThreadPool();
ReadBufMT = null;
UnpThreadData = null;
#endif*/
MaxWinSize = 0;
MaxWinMask = 0;
@@ -198,21 +197,21 @@ internal sealed partial class Unpack : BitInput
break;
#endif
case 50: // RAR 5.0 compression algorithm.
// #if RarV2017_RAR_SMP
// if (MaxUserThreads > 1)
// {
// // We do not use the multithreaded unpack routine to repack RAR archives
// // in 'suspended' mode, because unlike the single threaded code it can
// // write more than one dictionary for same loop pass. So we would need
// // larger buffers of unknown size. Also we do not support multithreading
// // in fragmented window mode.
// if (!Fragmented)
// {
// Unpack5MT(Solid);
// break;
// }
// }
// #endif
/*#if RarV2017_RAR_SMP
if (MaxUserThreads > 1)
{
// We do not use the multithreaded unpack routine to repack RAR archives
// in 'suspended' mode, because unlike the single threaded code it can
// write more than one dictionary for same loop pass. So we would need
// larger buffers of unknown size. Also we do not support multithreading
// in fragmented window mode.
if (!Fragmented)
{
Unpack5MT(Solid);
break;
}
}
#endif*/
Unpack5(Solid);
break;
#if !Rar2017_NOSTRICT
@@ -408,4 +407,3 @@ internal sealed partial class Unpack : BitInput
}
}
}
#endif

View File

@@ -1,411 +0,0 @@
#if !NETSTANDARD2_0 && !NETFRAMEWORK
#nullable disable
using System;
using SharpCompress.Common;
using static SharpCompress.Compressors.Rar.UnpackV2017.PackDef;
using static SharpCompress.Compressors.Rar.UnpackV2017.UnpackGlobal;
#if !Rar2017_64bit
using size_t = System.UInt32;
#else
using nint = System.Int64;
using nuint = System.UInt64;
using size_t = System.UInt64;
#endif
namespace SharpCompress.Compressors.Rar.UnpackV2017;
internal sealed partial class Unpack : BitInput
{
public Unpack( /* ComprDataIO *DataIO */
)
//:Inp(true),VMCodeInp(true)
: base(true)
{
_UnpackCtor();
//UnpIO=DataIO;
Window = null;
Fragmented = false;
Suspended = false;
UnpAllBuf = false;
UnpSomeRead = false;
// #if RarV2017_RAR_SMP
// MaxUserThreads = 1;
// UnpThreadPool = CreateThreadPool();
// ReadBufMT = null;
// UnpThreadData = null;
// #endif
MaxWinSize = 0;
MaxWinMask = 0;
// Perform initialization, which should be done only once for all files.
// It prevents crash if first DoUnpack call is later made with wrong
// (true) 'Solid' value.
UnpInitData(false);
#if !RarV2017_SFX_MODULE
// RAR 1.5 decompression initialization
UnpInitData15(false);
InitHuff();
#endif
}
// later: may need Dispose() if we support thread pool
//Unpack::~Unpack()
//{
// InitFilters30(false);
//
// if (Window!=null)
// free(Window);
//#if RarV2017_RAR_SMP
// DestroyThreadPool(UnpThreadPool);
// delete[] ReadBufMT;
// delete[] UnpThreadData;
//#endif
//}
private void Init(size_t WinSize, bool Solid)
{
// If 32-bit RAR unpacks an archive with 4 GB dictionary, the window size
// will be 0 because of size_t overflow. Let's issue the memory error.
if (WinSize == 0)
//ErrHandler.MemoryError();
{
throw new InvalidFormatException(
"invalid window size (possibly due to a rar file with a 4GB being unpacked on a 32-bit platform)"
);
}
// Minimum window size must be at least twice more than maximum possible
// size of filter block, which is 0x10000 in RAR now. If window size is
// smaller, we can have a block with never cleared flt->NextWindow flag
// in UnpWriteBuf(). Minimum window size 0x20000 would be enough, but let's
// use 0x40000 for extra safety and possible filter area size expansion.
const size_t MinAllocSize = 0x40000;
if (WinSize < MinAllocSize)
{
WinSize = MinAllocSize;
}
if (WinSize <= MaxWinSize) // Use the already allocated window.
{
return;
}
if ((WinSize >> 16) > 0x10000) // Window size must not exceed 4 GB.
{
return;
}
// Archiving code guarantees that window size does not grow in the same
// solid stream. So if we are here, we are either creating a new window
// or increasing the size of non-solid window. So we could safely reject
// current window data without copying them to a new window, though being
// extra cautious, we still handle the solid window grow case below.
var Grow = Solid && (Window != null || Fragmented);
// We do not handle growth for existing fragmented window.
if (Grow && Fragmented)
//throw std::bad_alloc();
{
throw new InvalidFormatException("Grow && Fragmented");
}
var NewWindow = Fragmented ? null : new byte[WinSize];
if (NewWindow == null)
{
if (Grow || WinSize < 0x1000000)
{
// We do not support growth for new fragmented window.
// Also exclude RAR4 and small dictionaries.
//throw std::bad_alloc();
throw new InvalidFormatException("Grow || WinSize<0x1000000");
}
else
{
if (Window != null) // If allocated by preceding files.
{
//free(Window);
Window = null;
}
FragWindow.Init(WinSize);
Fragmented = true;
}
}
if (!Fragmented)
{
// Clean the window to generate the same output when unpacking corrupt
// RAR files, which may access unused areas of sliding dictionary.
// sharpcompress: don't need this, freshly allocated above
//memset(NewWindow,0,WinSize);
// If Window is not NULL, it means that window size has grown.
// In solid streams we need to copy data to a new window in such case.
// RAR archiving code does not allow it in solid streams now,
// but let's implement it anyway just in case we'll change it sometimes.
if (Grow)
{
for (size_t I = 1; I <= MaxWinSize; I++)
{
NewWindow[(UnpPtr - I) & (WinSize - 1)] = Window[
(UnpPtr - I) & (MaxWinSize - 1)
];
}
}
//if (Window!=null)
// free(Window);
Window = NewWindow;
}
MaxWinSize = WinSize;
MaxWinMask = MaxWinSize - 1;
}
private void DoUnpack(uint Method, bool Solid)
{
// Methods <50 will crash in Fragmented mode when accessing NULL Window.
// They cannot be called in such mode now, but we check it below anyway
// just for extra safety.
switch (Method)
{
#if !RarV2017_SFX_MODULE
case 15: // rar 1.5 compression
if (!Fragmented)
{
Unpack15(Solid);
}
break;
case 20: // rar 2.x compression
case 26: // files larger than 2GB
if (!Fragmented)
{
Unpack20(Solid);
}
break;
#endif
#if !RarV2017_RAR5ONLY
case 29: // rar 3.x compression
if (!Fragmented)
{
throw new NotImplementedException();
}
break;
#endif
case 50: // RAR 5.0 compression algorithm.
// #if RarV2017_RAR_SMP
// if (MaxUserThreads > 1)
// {
// // We do not use the multithreaded unpack routine to repack RAR archives
// // in 'suspended' mode, because unlike the single threaded code it can
// // write more than one dictionary for same loop pass. So we would need
// // larger buffers of unknown size. Also we do not support multithreading
// // in fragmented window mode.
// if (!Fragmented)
// {
// Unpack5MT(Solid);
// break;
// }
// }
// #endif
Unpack5(Solid);
break;
#if !Rar2017_NOSTRICT
default:
throw new InvalidFormatException("unknown compression method " + Method);
#endif
}
}
private void UnpInitData(bool Solid)
{
if (!Solid)
{
new Span<uint>(OldDist).Clear();
OldDistPtr = 0;
LastDist = LastLength = 0;
// memset(Window,0,MaxWinSize);
//memset(&BlockTables,0,sizeof(BlockTables));
BlockTables = new UnpackBlockTables();
// sharpcompress: no default ctor for struct
BlockTables.Init();
UnpPtr = WrPtr = 0;
WriteBorder = Math.Min(MaxWinSize, UNPACK_MAX_WRITE) & MaxWinMask;
}
// Filters never share several solid files, so we can safely reset them
// even in solid archive.
InitFilters();
Inp.InitBitInput();
WrittenFileSize = 0;
ReadTop = 0;
ReadBorder = 0;
//memset(&BlockHeader,0,sizeof(BlockHeader));
BlockHeader = new UnpackBlockHeader();
BlockHeader.BlockSize = -1; // '-1' means not defined yet.
#if !RarV2017_SFX_MODULE
UnpInitData20(Solid);
#endif
//UnpInitData30(Solid);
UnpInitData50(Solid);
}
// LengthTable contains the length in bits for every element of alphabet.
// Dec is the structure to decode Huffman code/
// Size is size of length table and DecodeNum field in Dec structure,
private void MakeDecodeTables(Span<byte> LengthTable, int offset, DecodeTable Dec, uint Size)
{
// Size of alphabet and DecodePos array.
Dec.MaxNum = Size;
// Calculate how many entries for every bit length in LengthTable we have.
var LengthCount = new uint[16];
//memset(LengthCount,0,sizeof(LengthCount));
for (size_t I = 0; I < Size; I++)
{
LengthCount[LengthTable[checked((int)(offset + I))] & 0xf]++;
}
// We must not calculate the number of zero length codes.
LengthCount[0] = 0;
// Set the entire DecodeNum to zero.
//memset(Dec->DecodeNum,0,Size*sizeof(*Dec->DecodeNum));
new Span<ushort>(Dec.DecodeNum).Clear();
// Initialize not really used entry for zero length code.
Dec.DecodePos[0] = 0;
// Start code for bit length 1 is 0.
Dec.DecodeLen[0] = 0;
// Right aligned upper limit code for current bit length.
uint UpperLimit = 0;
for (var I = 1; I < 16; I++)
{
// Adjust the upper limit code.
UpperLimit += LengthCount[I];
// Left aligned upper limit code.
var LeftAligned = UpperLimit << (16 - I);
// Prepare the upper limit code for next bit length.
UpperLimit *= 2;
// Store the left aligned upper limit code.
Dec.DecodeLen[I] = LeftAligned;
// Every item of this array contains the sum of all preceding items.
// So it contains the start position in code list for every bit length.
Dec.DecodePos[I] = Dec.DecodePos[I - 1] + LengthCount[I - 1];
}
// Prepare the copy of DecodePos. We'll modify this copy below,
// so we cannot use the original DecodePos.
var CopyDecodePos = new uint[Dec.DecodePos.Length];
//memcpy(CopyDecodePos,Dec->DecodePos,sizeof(CopyDecodePos));
Array.Copy(Dec.DecodePos, CopyDecodePos, CopyDecodePos.Length);
// For every bit length in the bit length table and so for every item
// of alphabet.
for (uint I = 0; I < Size; I++)
{
// Get the current bit length.
var _CurBitLength = (byte)(LengthTable[checked((int)(offset + I))] & 0xf);
if (_CurBitLength != 0)
{
// Last position in code list for current bit length.
var LastPos = CopyDecodePos[_CurBitLength];
// Prepare the decode table, so this position in code list will be
// decoded to current alphabet item number.
Dec.DecodeNum[LastPos] = (ushort)I;
// We'll use next position number for this bit length next time.
// So we pass through the entire range of positions available
// for every bit length.
CopyDecodePos[_CurBitLength]++;
}
}
// Define the number of bits to process in quick mode. We use more bits
// for larger alphabets. More bits means that more codes will be processed
// in quick mode, but also that more time will be spent to preparation
// of tables for quick decode.
switch (Size)
{
case NC:
case NC20:
case NC30:
Dec.QuickBits = MAX_QUICK_DECODE_BITS;
break;
default:
Dec.QuickBits = MAX_QUICK_DECODE_BITS - 3;
break;
}
// Size of tables for quick mode.
var QuickDataSize = 1U << (int)Dec.QuickBits;
// Bit length for current code, start from 1 bit codes. It is important
// to use 1 bit instead of 0 for minimum code length, so we are moving
// forward even when processing a corrupt archive.
//uint CurBitLength=1;
byte CurBitLength = 1;
// For every right aligned bit string which supports the quick decoding.
for (uint Code = 0; Code < QuickDataSize; Code++)
{
// Left align the current code, so it will be in usual bit field format.
var BitField = Code << (int)(16 - Dec.QuickBits);
// Prepare the table for quick decoding of bit lengths.
// Find the upper limit for current bit field and adjust the bit length
// accordingly if necessary.
while (CurBitLength < Dec.DecodeLen.Length && BitField >= Dec.DecodeLen[CurBitLength])
{
CurBitLength++;
}
// Translation of right aligned bit string to bit length.
Dec.QuickLen[Code] = CurBitLength;
// Prepare the table for quick translation of position in code list
// to position in alphabet.
// Calculate the distance from the start code for current bit length.
var Dist = BitField - Dec.DecodeLen[CurBitLength - 1];
// Right align the distance.
Dist >>= (16 - CurBitLength);
// Now we can calculate the position in the code list. It is the sum
// of first position for current bit length and right aligned distance
// between our bit field and start code for current bit length.
uint Pos;
if (
CurBitLength < Dec.DecodePos.Length
&& (Pos = Dec.DecodePos[CurBitLength] + Dist) < Size
)
{
// Define the code to alphabet number translation.
Dec.QuickNum[Code] = Dec.DecodeNum[Pos];
}
else
{
// Can be here for length table filled with zeroes only (empty).
Dec.QuickNum[Code] = 0;
}
}
}
}
#endif

View File

@@ -1,115 +0,0 @@
#if !NETSTANDARD2_0 && !NETFRAMEWORK
using System;
using System.Buffers;
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common.Rar.Headers;
#if !Rar2017_64bit
using size_t = System.UInt32;
#else
using nint = System.Int64;
using nuint = System.UInt64;
using size_t = System.UInt64;
#endif
namespace SharpCompress.Compressors.Rar.UnpackV2017;
internal partial class Unpack : IRarUnpack
{
private FileHeader fileHeader;
private Stream readStream;
private Stream writeStream;
private void _UnpackCtor()
{
for (var i = 0; i < AudV.Length; i++)
{
AudV[i] = new AudioVariables();
}
}
private int UnpIO_UnpRead(byte[] buf, int offset, int count) =>
// NOTE: caller has logic to check for -1 for error we throw instead.
readStream.Read(buf, offset, count);
private void UnpIO_UnpWrite(byte[] buf, size_t offset, uint count) =>
writeStream.Write(buf, checked((int)offset), checked((int)count));
public ValueTask DoUnpackAsync(FileHeader fileHeader, Stream readStream, Stream writeStream)
{
// as of 12/2017 .NET limits array indexing to using a signed integer
// MaxWinSize causes unpack to use a fragmented window when the file
// window size exceeds MaxWinSize
// uggh, that's not how this variable is used, it's the size of the currently allocated window buffer
//x MaxWinSize = ((uint)int.MaxValue) + 1;
// may be long.MaxValue which could indicate unknown size (not present in header)
DestUnpSize = fileHeader.UncompressedSize;
this.fileHeader = fileHeader;
this.readStream = readStream;
this.writeStream = writeStream;
if (!fileHeader.IsStored)
{
Init(fileHeader.WindowSize, fileHeader.IsSolid);
}
Suspended = false;
return DoUnpackAsync();
}
public ValueTask DoUnpackAsync()
{
if (fileHeader.IsStored)
{
return UnstoreFileAsync();
}
else
{
DoUnpack(fileHeader.CompressionAlgorithm, fileHeader.IsSolid);
return new ValueTask();
}
}
private async ValueTask UnstoreFileAsync()
{
var length = (int)Math.Min(0x10000, DestUnpSize);
var buffer = ArrayPool<byte>.Shared.Rent(length);
do
{
var memory = new Memory<byte>(buffer, 0, length);
var n = await readStream.ReadAsync(memory);
if (n == 0)
{
break;
}
await writeStream.WriteAsync(memory.Slice(0, n));
DestUnpSize -= n;
} while (!Suspended);
}
public bool Suspended { get; set; }
public long DestSize => DestUnpSize;
public int Char
{
get
{
// TODO: coderb: not sure where the "MAXSIZE-30" comes from, ported from V1 code
if (InAddr > MAX_SIZE - 30)
{
UnpReadBuf();
}
return InBuf[InAddr++];
}
}
public int PpmEscChar
{
get => PPMEscChar;
set => PPMEscChar = value;
}
public static byte[] EnsureCapacity(byte[] array, int length) =>
array.Length < length ? new byte[length] : array;
}
#endif

View File

@@ -2,7 +2,6 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Readers;
@@ -68,7 +67,7 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
}
}
public async Task<bool> MoveToNextEntryAsync()
public bool MoveToNextEntry()
{
if (_completed)
{
@@ -84,7 +83,7 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
}
if (!_wroteCurrentEntry)
{
await SkipEntryAsync();
SkipEntry();
}
_wroteCurrentEntry = false;
if (NextEntryForCurrentStream())
@@ -120,15 +119,15 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
#region Entry Skip/Write
private async Task SkipEntryAsync()
private void SkipEntry()
{
if (!Entry.IsDirectory)
{
await SkipAsync();
Skip();
}
}
private async Task SkipAsync()
private void Skip()
{
var part = Entry.Parts.First();
@@ -146,11 +145,11 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
}
}
//don't know the size so we have to try to decompress to skip
using var s = await OpenEntryStreamAsync();
await s.SkipEntryAsync();
using var s = OpenEntryStream();
s.SkipEntry();
}
public async Task WriteEntryToAsync(Stream writableStream)
public void WriteEntryTo(Stream writableStream)
{
if (_wroteCurrentEntry)
{
@@ -168,24 +167,24 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
);
}
await WriteAsync(writableStream);
Write(writableStream);
_wroteCurrentEntry = true;
}
internal async Task WriteAsync(Stream writeStream)
internal void Write(Stream writeStream)
{
var streamListener = this as IReaderExtractionListener;
using Stream s = await OpenEntryStreamAsync();
using Stream s = OpenEntryStream();
s.TransferTo(writeStream, Entry, streamListener);
}
public async Task<EntryStream> OpenEntryStreamAsync()
public EntryStream OpenEntryStream()
{
if (_wroteCurrentEntry)
{
throw new ArgumentException("WriteEntryTo or OpenEntryStream can only be called once.");
}
var stream = await GetEntryStreamAsync();
var stream = GetEntryStream();
_wroteCurrentEntry = true;
return stream;
}
@@ -193,11 +192,11 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
/// <summary>
/// Retains a reference to the entry stream, so we can check whether it completed later.
/// </summary>
protected Task<EntryStream> CreateEntryStreamAsync(Stream? decompressed) =>
Task.FromResult(new EntryStream(this, decompressed.NotNull()));
protected EntryStream CreateEntryStream(Stream? decompressed) =>
new(this, decompressed.NotNull());
protected virtual Task<EntryStream> GetEntryStreamAsync() =>
CreateEntryStreamAsync(Entry.Parts.First().GetCompressedStream());
protected virtual EntryStream GetEntryStream() =>
CreateEntryStream(Entry.Parts.First().GetCompressedStream());
#endregion

View File

@@ -1,6 +1,5 @@
using System;
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Readers;
@@ -20,7 +19,7 @@ public interface IReader : IDisposable
/// Decompresses the current entry to the stream. This cannot be called twice for the current entry.
/// </summary>
/// <param name="writableStream"></param>
Task WriteEntryToAsync(Stream writableStream);
void WriteEntryTo(Stream writableStream);
bool Cancelled { get; }
void Cancel();
@@ -29,11 +28,11 @@ public interface IReader : IDisposable
/// Moves to the next entry by reading more data from the underlying stream. This skips if data has not been read.
/// </summary>
/// <returns></returns>
Task<bool> MoveToNextEntryAsync();
bool MoveToNextEntry();
/// <summary>
/// Opens the current entry as a stream that will decompress as it is read.
/// Read the entire stream or use SkipEntry on EntryStream.
/// </summary>
Task<EntryStream> OpenEntryStreamAsync();
EntryStream OpenEntryStream();
}

View File

@@ -1,69 +1,68 @@
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Readers;
public static class IReaderExtensions
{
public static async Task WriteEntryToAsync(this IReader reader, string filePath)
public static void WriteEntryTo(this IReader reader, string filePath)
{
using Stream stream = File.Open(filePath, FileMode.Create, FileAccess.Write);
await reader.WriteEntryToAsync(stream);
reader.WriteEntryTo(stream);
}
public static async Task WriteEntryToAsync(this IReader reader, FileInfo filePath)
public static void WriteEntryTo(this IReader reader, FileInfo filePath)
{
using Stream stream = filePath.Open(FileMode.Create);
await reader.WriteEntryToAsync(stream);
reader.WriteEntryTo(stream);
}
/// <summary>
/// Extract all remaining unread entries to specific directory, retaining filename
/// </summary>
public static async Task WriteAllToDirectoryAsync(
public static void WriteAllToDirectory(
this IReader reader,
string destinationDirectory,
ExtractionOptions? options = null
)
{
while (await reader.MoveToNextEntryAsync())
while (reader.MoveToNextEntry())
{
await reader.WriteEntryToDirectoryAsync(destinationDirectory, options);
reader.WriteEntryToDirectory(destinationDirectory, options);
}
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static async Task WriteEntryToDirectoryAsync(
public static void WriteEntryToDirectory(
this IReader reader,
string destinationDirectory,
ExtractionOptions? options = null
) =>
await ExtractionMethods.WriteEntryToDirectoryAsync(
ExtractionMethods.WriteEntryToDirectory(
reader.Entry,
destinationDirectory,
options,
reader.WriteEntryToFileAsync
reader.WriteEntryToFile
);
/// <summary>
/// Extract to specific file
/// </summary>
public static async Task WriteEntryToFileAsync(
public static void WriteEntryToFile(
this IReader reader,
string destinationFileName,
ExtractionOptions? options = null
) =>
await ExtractionMethods.WriteEntryToFileAsync(
ExtractionMethods.WriteEntryToFile(
reader.Entry,
destinationFileName,
options,
async (x, fm) =>
(x, fm) =>
{
using var fs = File.Open(destinationFileName, fm);
await reader.WriteEntryToAsync(fs);
reader.WriteEntryTo(fs);
}
);
}

View File

@@ -2,7 +2,6 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Compressors.Rar;
@@ -101,7 +100,7 @@ public abstract class RarReader : AbstractReader<RarReaderEntry, RarVolume>
protected virtual IEnumerable<FilePart> CreateFilePartEnumerableForCurrentEntry() =>
Entry.Parts;
protected override async Task<EntryStream> GetEntryStreamAsync()
protected override EntryStream GetEntryStream()
{
if (Entry.IsRedir)
{
@@ -114,19 +113,16 @@ public abstract class RarReader : AbstractReader<RarReaderEntry, RarVolume>
);
if (Entry.IsRarV3)
{
return await CreateEntryStreamAsync(
new RarCrcStream(UnpackV1.Value, Entry.FileHeader, stream)
);
return CreateEntryStream(new RarCrcStream(UnpackV1.Value, Entry.FileHeader, stream));
}
if (Entry.FileHeader.FileCrc?.Length > 5)
{
var s = await RarBLAKE2spStream.Create(UnpackV2017.Value, Entry.FileHeader, stream);
return await CreateEntryStreamAsync(s);
return CreateEntryStream(
new RarBLAKE2spStream(UnpackV2017.Value, Entry.FileHeader, stream)
);
}
return await CreateEntryStreamAsync(
new RarCrcStream(UnpackV2017.Value, Entry.FileHeader, stream)
);
return CreateEntryStream(new RarCrcStream(UnpackV2017.Value, Entry.FileHeader, stream));
}
}

View File

@@ -4,7 +4,6 @@ using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Readers;
namespace SharpCompress;
@@ -86,28 +85,8 @@ internal static class Utility
public static void Skip(this Stream source)
{
var buffer = ArrayPool<byte>.Shared.Rent(TEMP_BUFFER_SIZE);
try
{
do { } while (source.Read(buffer, 0, buffer.Length) == buffer.Length);
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
}
}
public static async Task SkipAsync(this Stream source)
{
var buffer = ArrayPool<byte>.Shared.Rent(TEMP_BUFFER_SIZE);
try
{
do { } while (await source.ReadAsync(buffer, 0, buffer.Length) == buffer.Length);
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
}
using var buffer = MemoryPool<byte>.Shared.Rent(TEMP_BUFFER_SIZE);
while (source.Read(buffer.Memory.Span) > 0) { }
}
public static DateTime DosDateToDateTime(ushort iDate, ushort iTime)

View File

@@ -335,9 +335,9 @@
"net8.0": {
"Microsoft.NET.ILLink.Tasks": {
"type": "Direct",
"requested": "[8.0.17, )",
"resolved": "8.0.17",
"contentHash": "x5/y4l8AtshpBOrCZdlE4txw8K3e3s9meBFeZeR3l8hbbku2V7kK6ojhXvrbjg1rk3G+JqL1BI26gtgc1ZrdUw=="
"requested": "[8.0.21, )",
"resolved": "8.0.21",
"contentHash": "s8H5PZQs50OcNkaB6Si54+v3GWM7vzs6vxFRMlD3aXsbM+aPCtod62gmK0BYWou9diGzmo56j8cIf/PziijDqQ=="
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",

View File

@@ -1,4 +1,8 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
@@ -16,32 +20,31 @@ namespace SharpCompress.Test.Arc
}
[Fact]
public Task Arc_Uncompressed_Read() =>
ReadAsync("Arc.uncompressed.arc", CompressionType.None);
public void Arc_Uncompressed_Read() => Read("Arc.uncompressed.arc", CompressionType.None);
[Fact]
public async Task Arc_Squeezed_Read()
public void Arc_Squeezed_Read()
{
await ProcessArchive("Arc.squeezed.arc");
ProcessArchive("Arc.squeezed.arc");
}
[Fact]
public async Task Arc_Crunched_Read()
public void Arc_Crunched_Read()
{
await ProcessArchive("Arc.crunched.arc");
ProcessArchive("Arc.crunched.arc");
}
private async Task ProcessArchive(string archiveName)
private void ProcessArchive(string archiveName)
{
// Process a given archive by its name
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, archiveName)))
using (IReader reader = ArcReader.Open(stream))
{
while (await reader.MoveToNextEntryAsync())
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
await reader.WriteEntryToDirectoryAsync(
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);

View File

@@ -2,7 +2,6 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Common;
using SharpCompress.Compressors.Xz;
@@ -28,16 +27,13 @@ public class ArchiveTests : ReaderTests
}
}
protected async Task ArchiveStreamReadExtractAllAsync(
string testArchive,
CompressionType compression
)
protected void ArchiveStreamReadExtractAll(string testArchive, CompressionType compression)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
await ArchiveStreamReadExtractAllAsync(new[] { testArchive }, compression);
ArchiveStreamReadExtractAll(new[] { testArchive }, compression);
}
protected async Task ArchiveStreamReadExtractAllAsync(
protected void ArchiveStreamReadExtractAll(
IEnumerable<string> testArchives,
CompressionType compression
)
@@ -58,7 +54,7 @@ public class ArchiveTests : ReaderTests
Assert.True(archive.IsSolid);
using (var reader = archive.ExtractAllEntries())
{
await UseReaderAsync(reader, compression);
UseReader(reader, compression);
}
VerifyFiles();
@@ -69,7 +65,7 @@ public class ArchiveTests : ReaderTests
}
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteEntryToDirectoryAsync(
entry.WriteToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -87,38 +83,36 @@ public class ArchiveTests : ReaderTests
}
}
protected Task ArchiveStreamReadAsync(
string testArchive,
ReaderOptions? readerOptions = null
) => ArchiveStreamReadAsync(ArchiveFactory.AutoFactory, testArchive, readerOptions);
protected void ArchiveStreamRead(string testArchive, ReaderOptions? readerOptions = null) =>
ArchiveStreamRead(ArchiveFactory.AutoFactory, testArchive, readerOptions);
protected Task ArchiveStreamReadAsync(
protected void ArchiveStreamRead(
IArchiveFactory archiveFactory,
string testArchive,
ReaderOptions? readerOptions = null
)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
return ArchiveStreamReadAsync(archiveFactory, readerOptions, testArchive);
ArchiveStreamRead(archiveFactory, readerOptions, testArchive);
}
protected Task ArchiveStreamReadAsync(
protected void ArchiveStreamRead(
ReaderOptions? readerOptions = null,
params string[] testArchives
) => ArchiveStreamReadAsync(ArchiveFactory.AutoFactory, readerOptions, testArchives);
) => ArchiveStreamRead(ArchiveFactory.AutoFactory, readerOptions, testArchives);
protected Task ArchiveStreamReadAsync(
protected void ArchiveStreamRead(
IArchiveFactory archiveFactory,
ReaderOptions? readerOptions = null,
params string[] testArchives
) =>
ArchiveStreamReadAsync(
ArchiveStreamRead(
archiveFactory,
readerOptions,
testArchives.Select(x => Path.Combine(TEST_ARCHIVES_PATH, x))
);
protected async Task ArchiveStreamReadAsync(
protected void ArchiveStreamRead(
IArchiveFactory archiveFactory,
ReaderOptions? readerOptions,
IEnumerable<string> testArchives
@@ -139,7 +133,7 @@ public class ArchiveTests : ReaderTests
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteEntryToDirectoryAsync(
entry.WriteToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -157,16 +151,16 @@ public class ArchiveTests : ReaderTests
}
}
protected Task ArchiveStreamMultiReadAsync(
protected void ArchiveStreamMultiRead(
ReaderOptions? readerOptions = null,
params string[] testArchives
) =>
ArchiveStreamMultiReadAsync(
ArchiveStreamMultiRead(
readerOptions,
testArchives.Select(x => Path.Combine(TEST_ARCHIVES_PATH, x))
);
protected async Task ArchiveStreamMultiReadAsync(
protected void ArchiveStreamMultiRead(
ReaderOptions? readerOptions,
IEnumerable<string> testArchives
)
@@ -180,7 +174,7 @@ public class ArchiveTests : ReaderTests
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteEntryToDirectoryAsync(
entry.WriteToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -189,16 +183,16 @@ public class ArchiveTests : ReaderTests
VerifyFiles();
}
protected Task ArchiveOpenStreamReadAsync(
protected void ArchiveOpenStreamRead(
ReaderOptions? readerOptions = null,
params string[] testArchives
) =>
ArchiveOpenStreamReadAsync(
ArchiveOpenStreamRead(
readerOptions,
testArchives.Select(x => Path.Combine(TEST_ARCHIVES_PATH, x))
);
protected async Task ArchiveOpenStreamReadAsync(
protected void ArchiveOpenStreamRead(
ReaderOptions? readerOptions,
IEnumerable<string> testArchives
)
@@ -212,7 +206,7 @@ public class ArchiveTests : ReaderTests
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteEntryToDirectoryAsync(
entry.WriteToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -258,7 +252,7 @@ public class ArchiveTests : ReaderTests
}
}
protected async Task ArchiveExtractToDirectoryAsync(
protected void ArchiveExtractToDirectory(
string testArchive,
ReaderOptions? readerOptions = null
)
@@ -266,12 +260,12 @@ public class ArchiveTests : ReaderTests
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using (var archive = ArchiveFactory.Open(new FileInfo(testArchive), readerOptions))
{
await archive.ExtractToDirectoryAsync(SCRATCH_FILES_PATH);
archive.ExtractToDirectory(SCRATCH_FILES_PATH);
}
VerifyFiles();
}
protected async Task ArchiveFileReadAsync(
protected void ArchiveFileRead(
IArchiveFactory archiveFactory,
string testArchive,
ReaderOptions? readerOptions = null
@@ -282,7 +276,7 @@ public class ArchiveTests : ReaderTests
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteEntryToDirectoryAsync(
entry.WriteToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -291,8 +285,8 @@ public class ArchiveTests : ReaderTests
VerifyFiles();
}
protected Task ArchiveFileReadAsync(string testArchive, ReaderOptions? readerOptions = null) =>
ArchiveFileReadAsync(ArchiveFactory.AutoFactory, testArchive, readerOptions);
protected void ArchiveFileRead(string testArchive, ReaderOptions? readerOptions = null) =>
ArchiveFileRead(ArchiveFactory.AutoFactory, testArchive, readerOptions);
protected void ArchiveFileSkip(
string testArchive,
@@ -316,14 +310,14 @@ public class ArchiveTests : ReaderTests
/// <summary>
/// Demonstrate the ExtractionOptions.PreserveFileTime and ExtractionOptions.PreserveAttributes extract options
/// </summary>
protected async Task ArchiveFileReadExAsync(string testArchive)
protected void ArchiveFileReadEx(string testArchive)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using (var archive = ArchiveFactory.Open(testArchive))
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteEntryToDirectoryAsync(
entry.WriteToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions
{
@@ -338,7 +332,7 @@ public class ArchiveTests : ReaderTests
VerifyFilesEx();
}
protected async Task ArchiveDeltaDistanceReadAsync(string testArchive)
protected void ArchiveDeltaDistanceRead(string testArchive)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using var archive = ArchiveFactory.Open(testArchive);
@@ -347,7 +341,7 @@ public class ArchiveTests : ReaderTests
if (!entry.IsDirectory)
{
var memory = new MemoryStream();
await entry.WriteToAsync(memory);
entry.WriteTo(memory);
memory.Position = 0;

View File

@@ -1,6 +1,5 @@
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.GZip;
using SharpCompress.Archives.Tar;
@@ -14,13 +13,13 @@ public class GZipArchiveTests : ArchiveTests
public GZipArchiveTests() => UseExtensionInsteadOfNameToVerify = true;
[Fact]
public async Task GZip_Archive_Generic()
public void GZip_Archive_Generic()
{
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz")))
using (var archive = ArchiveFactory.Open(stream))
{
var entry = archive.Entries.First();
await entry.WriteToFileAsync(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull()));
entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull()));
var size = entry.Size;
var scratch = new FileInfo(Path.Combine(SCRATCH_FILES_PATH, "Tar.tar"));
@@ -29,20 +28,20 @@ public class GZipArchiveTests : ArchiveTests
Assert.Equal(size, scratch.Length);
Assert.Equal(size, test.Length);
}
await CompareArchivesByPathAsync(
CompareArchivesByPath(
Path.Combine(SCRATCH_FILES_PATH, "Tar.tar"),
Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar")
);
}
[Fact]
public async Task GZip_Archive()
public void GZip_Archive()
{
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz")))
using (var archive = GZipArchive.Open(stream))
{
var entry = archive.Entries.First();
await entry.WriteToFileAsync(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull()));
entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull()));
var size = entry.Size;
var scratch = new FileInfo(Path.Combine(SCRATCH_FILES_PATH, "Tar.tar"));
@@ -51,7 +50,7 @@ public class GZipArchiveTests : ArchiveTests
Assert.Equal(size, scratch.Length);
Assert.Equal(size, test.Length);
}
await CompareArchivesByPathAsync(
CompareArchivesByPath(
Path.Combine(SCRATCH_FILES_PATH, "Tar.tar"),
Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar")
);

View File

@@ -1,5 +1,4 @@
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers.GZip;
@@ -12,15 +11,15 @@ public class GZipReaderTests : ReaderTests
public GZipReaderTests() => UseExtensionInsteadOfNameToVerify = true;
[Fact]
public Task GZip_Reader_Generic() => ReadAsync("Tar.tar.gz", CompressionType.GZip);
public void GZip_Reader_Generic() => Read("Tar.tar.gz", CompressionType.GZip);
[Fact]
public async Task GZip_Reader_Generic2()
public void GZip_Reader_Generic2()
{
//read only as GZip itme
using Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"));
using var reader = GZipReader.Open(new SharpCompressStream(stream));
while (await reader.MoveToNextEntryAsync()) // Crash here
while (reader.MoveToNextEntry()) // Crash here
{
Assert.NotEqual(0, reader.Entry.Size);
Assert.NotEqual(0, reader.Entry.Crc);

View File

@@ -1,5 +1,4 @@
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Writers;
using SharpCompress.Writers.GZip;
@@ -13,7 +12,7 @@ public class GZipWriterTests : WriterTests
: base(ArchiveType.GZip) => UseExtensionInsteadOfNameToVerify = true;
[Fact]
public async Task GZip_Writer_Generic()
public void GZip_Writer_Generic()
{
using (
Stream stream = File.Open(
@@ -26,14 +25,14 @@ public class GZipWriterTests : WriterTests
{
writer.Write("Tar.tar", Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar"));
}
await CompareArchivesByPathAsync(
CompareArchivesByPath(
Path.Combine(SCRATCH_FILES_PATH, "Tar.tar.gz"),
Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz")
);
}
[Fact]
public async Task GZip_Writer()
public void GZip_Writer()
{
using (
Stream stream = File.Open(
@@ -46,7 +45,7 @@ public class GZipWriterTests : WriterTests
{
writer.Write("Tar.tar", Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar"));
}
await CompareArchivesByPathAsync(
CompareArchivesByPath(
Path.Combine(SCRATCH_FILES_PATH, "Tar.tar.gz"),
Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz")
);
@@ -61,7 +60,7 @@ public class GZipWriterTests : WriterTests
});
[Fact]
public async Task GZip_Writer_Entry_Path_With_Dir()
public void GZip_Writer_Entry_Path_With_Dir()
{
using (
Stream stream = File.Open(
@@ -75,7 +74,7 @@ public class GZipWriterTests : WriterTests
var path = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar");
writer.Write(path, path); //covers issue #532
}
await CompareArchivesByPathAsync(
CompareArchivesByPath(
Path.Combine(SCRATCH_FILES_PATH, "Tar.tar.gz"),
Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz")
);

View File

@@ -1,6 +1,5 @@
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Rar;
using SharpCompress.Common;
@@ -13,57 +12,57 @@ namespace SharpCompress.Test.Rar;
public class RarArchiveTests : ArchiveTests
{
[Fact]
public Task Rar_EncryptedFileAndHeader_Archive() =>
ReadRarPasswordAsync("Rar.encrypted_filesAndHeader.rar", "test");
public void Rar_EncryptedFileAndHeader_Archive() =>
ReadRarPassword("Rar.encrypted_filesAndHeader.rar", "test");
[Fact]
public Task Rar_EncryptedFileAndHeader_NoPasswordExceptionTest() =>
Assert.ThrowsAsync(
public void Rar_EncryptedFileAndHeader_NoPasswordExceptionTest() =>
Assert.Throws(
typeof(CryptographicException),
async () => await ReadRarPasswordAsync("Rar.encrypted_filesAndHeader.rar", null)
() => ReadRarPassword("Rar.encrypted_filesAndHeader.rar", null)
);
[Fact]
public Task Rar5_EncryptedFileAndHeader_Archive() =>
ReadRarPasswordAsync("Rar5.encrypted_filesAndHeader.rar", "test");
public void Rar5_EncryptedFileAndHeader_Archive() =>
ReadRarPassword("Rar5.encrypted_filesAndHeader.rar", "test");
[Fact]
public Task Rar5_EncryptedFileAndHeader_Archive_Err() =>
Assert.ThrowsAsync(
public void Rar5_EncryptedFileAndHeader_Archive_Err() =>
Assert.Throws(
typeof(CryptographicException),
async () => await ReadRarPasswordAsync("Rar5.encrypted_filesAndHeader.rar", "failed")
() => ReadRarPassword("Rar5.encrypted_filesAndHeader.rar", "failed")
);
[Fact]
public Task Rar5_EncryptedFileAndHeader_NoPasswordExceptionTest() =>
Assert.ThrowsAsync(
public void Rar5_EncryptedFileAndHeader_NoPasswordExceptionTest() =>
Assert.Throws(
typeof(CryptographicException),
async () => await ReadRarPasswordAsync("Rar5.encrypted_filesAndHeader.rar", null)
() => ReadRarPassword("Rar5.encrypted_filesAndHeader.rar", null)
);
[Fact]
public Task Rar_EncryptedFileOnly_Archive() =>
ReadRarPasswordAsync("Rar.encrypted_filesOnly.rar", "test");
public void Rar_EncryptedFileOnly_Archive() =>
ReadRarPassword("Rar.encrypted_filesOnly.rar", "test");
[Fact]
public Task Rar_EncryptedFileOnly_Archive_Err() =>
Assert.ThrowsAsync(
public void Rar_EncryptedFileOnly_Archive_Err() =>
Assert.Throws(
typeof(CryptographicException),
async () => await ReadRarPasswordAsync("Rar5.encrypted_filesOnly.rar", "failed")
() => ReadRarPassword("Rar5.encrypted_filesOnly.rar", "failed")
);
[Fact]
public Task Rar5_EncryptedFileOnly_Archive() =>
ReadRarPasswordAsync("Rar5.encrypted_filesOnly.rar", "test");
public void Rar5_EncryptedFileOnly_Archive() =>
ReadRarPassword("Rar5.encrypted_filesOnly.rar", "test");
[Fact]
public Task Rar_Encrypted_Archive() => ReadRarPasswordAsync("Rar.Encrypted.rar", "test");
public void Rar_Encrypted_Archive() => ReadRarPassword("Rar.Encrypted.rar", "test");
[Fact]
public Task Rar5_Encrypted_Archive() =>
ReadRarPasswordAsync("Rar5.encrypted_filesAndHeader.rar", "test");
public void Rar5_Encrypted_Archive() =>
ReadRarPassword("Rar5.encrypted_filesAndHeader.rar", "test");
private async Task ReadRarPasswordAsync(string testArchive, string? password)
private void ReadRarPassword(string testArchive, string? password)
{
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, testArchive)))
using (
@@ -78,7 +77,7 @@ public class RarArchiveTests : ArchiveTests
if (!entry.IsDirectory)
{
Assert.Equal(CompressionType.Rar, entry.CompressionType);
await entry.WriteEntryToDirectoryAsync(
entry.WriteToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -89,12 +88,12 @@ public class RarArchiveTests : ArchiveTests
}
[Fact]
public Task Rar_Multi_Archive_Encrypted() =>
Assert.ThrowsAsync<InvalidFormatException>(async () =>
await ArchiveFileReadPasswordAsync("Rar.EncryptedParts.part01.rar", "test")
public void Rar_Multi_Archive_Encrypted() =>
Assert.Throws<InvalidFormatException>(() =>
ArchiveFileReadPassword("Rar.EncryptedParts.part01.rar", "test")
);
protected async Task ArchiveFileReadPasswordAsync(string archiveName, string password)
protected void ArchiveFileReadPassword(string archiveName, string password)
{
using (
var archive = RarArchive.Open(
@@ -105,7 +104,7 @@ public class RarArchiveTests : ArchiveTests
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteEntryToDirectoryAsync(
entry.WriteToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -115,28 +114,28 @@ public class RarArchiveTests : ArchiveTests
}
[Fact]
public Task Rar_None_ArchiveStreamRead() => ArchiveStreamReadAsync("Rar.none.rar");
public void Rar_None_ArchiveStreamRead() => ArchiveStreamRead("Rar.none.rar");
[Fact]
public Task Rar5_None_ArchiveStreamRead() => ArchiveStreamReadAsync("Rar5.none.rar");
public void Rar5_None_ArchiveStreamRead() => ArchiveStreamRead("Rar5.none.rar");
[Fact]
public Task Rar_ArchiveStreamRead() => ArchiveStreamReadAsync("Rar.rar");
public void Rar_ArchiveStreamRead() => ArchiveStreamRead("Rar.rar");
[Fact]
public Task Rar5_ArchiveStreamRead() => ArchiveStreamReadAsync("Rar5.rar");
public void Rar5_ArchiveStreamRead() => ArchiveStreamRead("Rar5.rar");
[Fact]
public Task Rar_test_invalid_exttime_ArchiveStreamRead() =>
public void Rar_test_invalid_exttime_ArchiveStreamRead() =>
DoRar_test_invalid_exttime_ArchiveStreamRead("Rar.test_invalid_exttime.rar");
private async Task DoRar_test_invalid_exttime_ArchiveStreamRead(string filename)
private void DoRar_test_invalid_exttime_ArchiveStreamRead(string filename)
{
using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename));
using var archive = ArchiveFactory.Open(stream);
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteEntryToDirectoryAsync(
entry.WriteToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -144,14 +143,14 @@ public class RarArchiveTests : ArchiveTests
}
[Fact]
public async Task Rar_Jpg_ArchiveStreamRead()
public void Rar_Jpg_ArchiveStreamRead()
{
using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Rar.jpeg.jpg"));
using (var archive = RarArchive.Open(stream, new ReaderOptions { LookForHeader = true }))
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteEntryToDirectoryAsync(
entry.WriteToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -161,12 +160,12 @@ public class RarArchiveTests : ArchiveTests
}
[Fact]
public Task Rar_IsSolidArchiveCheck() => DoRar_IsSolidArchiveCheck("Rar.rar");
public void Rar_IsSolidArchiveCheck() => DoRar_IsSolidArchiveCheck("Rar.rar");
[Fact]
public Task Rar5_IsSolidArchiveCheck() => DoRar_IsSolidArchiveCheck("Rar5.rar");
public void Rar5_IsSolidArchiveCheck() => DoRar_IsSolidArchiveCheck("Rar5.rar");
private async Task DoRar_IsSolidArchiveCheck(string filename)
private void DoRar_IsSolidArchiveCheck(string filename)
{
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename)))
{
@@ -174,7 +173,7 @@ public class RarArchiveTests : ArchiveTests
Assert.False(archive.IsSolid);
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteEntryToDirectoryAsync(
entry.WriteToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -214,22 +213,22 @@ public class RarArchiveTests : ArchiveTests
}
[Fact]
public Task Rar_Solid_ArchiveStreamRead() => ArchiveStreamReadAsync("Rar.solid.rar");
public void Rar_Solid_ArchiveStreamRead() => ArchiveStreamRead("Rar.solid.rar");
[Fact]
public Task Rar5_Solid_ArchiveStreamRead() => ArchiveStreamReadAsync("Rar5.solid.rar");
public void Rar5_Solid_ArchiveStreamRead() => ArchiveStreamRead("Rar5.solid.rar");
[Fact]
public Task Rar_Solid_StreamRead_Extract_All() =>
ArchiveStreamReadExtractAllAsync("Rar.solid.rar", CompressionType.Rar);
public void Rar_Solid_StreamRead_Extract_All() =>
ArchiveStreamReadExtractAll("Rar.solid.rar", CompressionType.Rar);
[Fact]
public Task Rar5_Solid_StreamRead_Extract_All() =>
ArchiveStreamReadExtractAllAsync("Rar5.solid.rar", CompressionType.Rar);
public void Rar5_Solid_StreamRead_Extract_All() =>
ArchiveStreamReadExtractAll("Rar5.solid.rar", CompressionType.Rar);
[Fact]
public Task Rar_Multi_ArchiveStreamRead() =>
DoRar_Multi_ArchiveStreamReadAsync(
public void Rar_Multi_ArchiveStreamRead() =>
DoRar_Multi_ArchiveStreamRead(
[
"Rar.multi.part01.rar",
"Rar.multi.part02.rar",
@@ -242,8 +241,8 @@ public class RarArchiveTests : ArchiveTests
);
[Fact]
public Task Rar5_Multi_ArchiveStreamRead() =>
DoRar_Multi_ArchiveStreamReadAsync(
public void Rar5_Multi_ArchiveStreamRead() =>
DoRar_Multi_ArchiveStreamRead(
[
"Rar5.multi.part01.rar",
"Rar5.multi.part02.rar",
@@ -255,7 +254,7 @@ public class RarArchiveTests : ArchiveTests
false
);
private async Task DoRar_Multi_ArchiveStreamReadAsync(string[] archives, bool isSolid)
private void DoRar_Multi_ArchiveStreamRead(string[] archives, bool isSolid)
{
using var archive = RarArchive.Open(
archives.Select(s => Path.Combine(TEST_ARCHIVES_PATH, s)).Select(File.OpenRead)
@@ -263,7 +262,7 @@ public class RarArchiveTests : ArchiveTests
Assert.Equal(archive.IsSolid, isSolid);
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteEntryToDirectoryAsync(
entry.WriteToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -271,8 +270,8 @@ public class RarArchiveTests : ArchiveTests
}
[Fact]
public Task Rar5_MultiSolid_ArchiveStreamRead() =>
DoRar_Multi_ArchiveStreamReadAsync(
public void Rar5_MultiSolid_ArchiveStreamRead() =>
DoRar_Multi_ArchiveStreamRead(
[
"Rar.multi.solid.part01.rar",
"Rar.multi.solid.part02.rar",
@@ -285,16 +284,16 @@ public class RarArchiveTests : ArchiveTests
);
[Fact]
public Task RarNoneArchiveFileRead() => ArchiveFileReadAsync("Rar.none.rar");
public void RarNoneArchiveFileRead() => ArchiveFileRead("Rar.none.rar");
[Fact]
public Task Rar5NoneArchiveFileRead() => ArchiveFileReadAsync("Rar5.none.rar");
public void Rar5NoneArchiveFileRead() => ArchiveFileRead("Rar5.none.rar");
[Fact]
public Task Rar_ArchiveFileRead() => ArchiveFileReadAsync("Rar.rar");
public void Rar_ArchiveFileRead() => ArchiveFileRead("Rar.rar");
[Fact]
public Task Rar5_ArchiveFileRead() => ArchiveFileReadAsync("Rar5.rar");
public void Rar5_ArchiveFileRead() => ArchiveFileRead("Rar5.rar");
[Fact]
public void Rar_ArchiveFileRead_HasDirectories() =>
@@ -313,7 +312,7 @@ public class RarArchiveTests : ArchiveTests
}
[Fact]
public async Task Rar_Jpg_ArchiveFileRead()
public void Rar_Jpg_ArchiveFileRead()
{
using (
var archive = RarArchive.Open(
@@ -324,7 +323,7 @@ public class RarArchiveTests : ArchiveTests
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteEntryToDirectoryAsync(
entry.WriteToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -334,14 +333,14 @@ public class RarArchiveTests : ArchiveTests
}
[Fact]
public Task Rar_Solid_ArchiveFileRead() => ArchiveFileReadAsync("Rar.solid.rar");
public void Rar_Solid_ArchiveFileRead() => ArchiveFileRead("Rar.solid.rar");
[Fact]
public Task Rar5_Solid_ArchiveFileRead() => ArchiveFileReadAsync("Rar5.solid.rar");
public void Rar5_Solid_ArchiveFileRead() => ArchiveFileRead("Rar5.solid.rar");
[Fact]
public Task Rar2_Multi_ArchiveStreamRead() =>
DoRar_Multi_ArchiveStreamReadAsync(
public void Rar2_Multi_ArchiveStreamRead() =>
DoRar_Multi_ArchiveStreamRead(
[
"Rar2.multi.rar",
"Rar2.multi.r00",
@@ -355,17 +354,17 @@ public class RarArchiveTests : ArchiveTests
);
[Fact]
public Task Rar2_Multi_ArchiveFileRead() => ArchiveFileReadAsync("Rar2.multi.rar"); //r00, r01...
public void Rar2_Multi_ArchiveFileRead() => ArchiveFileRead("Rar2.multi.rar"); //r00, r01...
[Fact]
public Task Rar2_ArchiveFileRead() => ArchiveFileReadAsync("Rar2.rar");
public void Rar2_ArchiveFileRead() => ArchiveFileRead("Rar2.rar");
[Fact]
public async Task Rar15_ArchiveFileRead()
public void Rar15_ArchiveFileRead()
{
UseExtensionInsteadOfNameToVerify = true;
UseCaseInsensitiveToVerify = true;
await ArchiveFileReadAsync("Rar15.rar");
ArchiveFileRead("Rar15.rar");
}
[Fact]
@@ -409,10 +408,10 @@ public class RarArchiveTests : ArchiveTests
}
[Fact]
public Task Rar4_Multi_ArchiveFileRead() => ArchiveFileReadAsync("Rar4.multi.part01.rar");
public void Rar4_Multi_ArchiveFileRead() => ArchiveFileRead("Rar4.multi.part01.rar");
[Fact]
public Task Rar4_ArchiveFileRead() => ArchiveFileReadAsync("Rar4.rar");
public void Rar4_ArchiveFileRead() => ArchiveFileRead("Rar4.rar");
[Fact]
public void Rar_GetPartsSplit() =>
@@ -462,8 +461,8 @@ public class RarArchiveTests : ArchiveTests
);
[Fact]
public Task Rar4_Multi_ArchiveStreamRead() =>
DoRar_Multi_ArchiveStreamReadAsync(
public void Rar4_Multi_ArchiveStreamRead() =>
DoRar_Multi_ArchiveStreamRead(
[
"Rar4.multi.part01.rar",
"Rar4.multi.part02.rar",
@@ -478,8 +477,8 @@ public class RarArchiveTests : ArchiveTests
//no extension to test the lib identifies the archive by content not ext
[Fact]
public Task Rar4_Split_ArchiveStreamRead() =>
ArchiveStreamMultiReadAsync(
public void Rar4_Split_ArchiveStreamRead() =>
ArchiveStreamMultiRead(
null,
[
"Rar4.split.001",
@@ -493,7 +492,7 @@ public class RarArchiveTests : ArchiveTests
//will detect and load other files
[Fact]
public Task Rar4_Multi_ArchiveFirstFileRead() => ArchiveFileReadAsync("Rar4.multi.part01.rar");
public void Rar4_Multi_ArchiveFirstFileRead() => ArchiveFileRead("Rar4.multi.part01.rar");
//"Rar4.multi.part02.rar",
//"Rar4.multi.part03.rar",
@@ -503,7 +502,7 @@ public class RarArchiveTests : ArchiveTests
//"Rar4.multi.part07.rar"
//will detect and load other files
[Fact]
public Task Rar4_Split_ArchiveFirstFileRead() => ArchiveFileReadAsync("Rar4.split.001");
public void Rar4_Split_ArchiveFirstFileRead() => ArchiveFileRead("Rar4.split.001");
//"Rar4.split.002",
//"Rar4.split.003",
@@ -512,8 +511,8 @@ public class RarArchiveTests : ArchiveTests
//"Rar4.split.006"
//will detect and load other files
[Fact]
public Task Rar4_Split_ArchiveStreamFirstFileRead() =>
ArchiveStreamMultiReadAsync(
public void Rar4_Split_ArchiveStreamFirstFileRead() =>
ArchiveStreamMultiRead(
null,
[
"Rar4.split.001",
@@ -527,8 +526,8 @@ public class RarArchiveTests : ArchiveTests
//open with ArchiveFactory.Open and stream
[Fact]
public Task Rar4_Split_ArchiveOpen() =>
ArchiveOpenStreamReadAsync(
public void Rar4_Split_ArchiveOpen() =>
ArchiveOpenStreamRead(
null,
"Rar4.split.001",
"Rar4.split.002",
@@ -540,8 +539,8 @@ public class RarArchiveTests : ArchiveTests
//open with ArchiveFactory.Open and stream
[Fact]
public Task Rar4_Multi_ArchiveOpen() =>
ArchiveOpenStreamReadAsync(
public void Rar4_Multi_ArchiveOpen() =>
ArchiveOpenStreamRead(
null,
"Rar4.multi.part01.rar",
"Rar4.multi.part02.rar",
@@ -571,10 +570,10 @@ public class RarArchiveTests : ArchiveTests
);
[Fact]
public Task Rar_Multi_ArchiveFileRead() => ArchiveFileReadAsync("Rar.multi.part01.rar");
public void Rar_Multi_ArchiveFileRead() => ArchiveFileRead("Rar.multi.part01.rar");
[Fact]
public Task Rar5_Multi_ArchiveFileRead() => ArchiveFileReadAsync("Rar5.multi.part01.rar");
public void Rar5_Multi_ArchiveFileRead() => ArchiveFileRead("Rar5.multi.part01.rar");
[Fact]
public void Rar_IsFirstVolume_True() => DoRar_IsFirstVolume_True("Rar.multi.part01.rar");
@@ -603,7 +602,7 @@ public class RarArchiveTests : ArchiveTests
}
[Fact]
public Task Rar5_CRC_Blake2_Archive() => ArchiveFileReadAsync("Rar5.crc_blake2.rar");
public void Rar5_CRC_Blake2_Archive() => ArchiveFileRead("Rar5.crc_blake2.rar");
[Fact]
void Rar_Iterate_Archive() =>

View File

@@ -1,4 +1,4 @@
/*using System;
using System;
using System.Collections;
using System.IO;
using System.Linq;
@@ -422,4 +422,4 @@ public class RarReaderTests : ReaderTests
Assert.Equal(expectedOrder.Pop(), reader.Entry.Key);
}
}
}*/
}

View File

@@ -1,7 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -12,7 +11,7 @@ namespace SharpCompress.Test;
public abstract class ReaderTests : TestBase
{
protected async Task ReadAsync(
protected void Read(
string testArchive,
CompressionType expectedCompression,
ReaderOptions? options = null
@@ -23,14 +22,14 @@ public abstract class ReaderTests : TestBase
options ??= new ReaderOptions() { BufferSize = 0x20000 }; //test larger buffer size (need test rather than eyeballing debug logs :P)
options.LeaveStreamOpen = true;
await ReadAsyncImpl(testArchive, expectedCompression, options);
ReadImpl(testArchive, expectedCompression, options);
options.LeaveStreamOpen = false;
await ReadAsyncImpl(testArchive, expectedCompression, options);
ReadImpl(testArchive, expectedCompression, options);
VerifyFiles();
}
private async Task ReadAsyncImpl(
private void ReadImpl(
string testArchive,
CompressionType expectedCompression,
ReaderOptions options
@@ -46,7 +45,7 @@ public abstract class ReaderTests : TestBase
using var testStream = new TestStream(protectedStream);
using (var reader = ReaderFactory.Open(testStream, options))
{
await UseReaderAsync(reader, expectedCompression);
UseReader(reader, expectedCompression);
protectedStream.ThrowOnDispose = false;
Assert.False(testStream.IsDisposed, $"{nameof(testStream)} prematurely closed");
}
@@ -58,18 +57,42 @@ public abstract class ReaderTests : TestBase
Assert.True(options.LeaveStreamOpen != testStream.IsDisposed, message);
}
public async Task UseReaderAsync(IReader reader, CompressionType expectedCompression)
public void UseReader(IReader reader, CompressionType expectedCompression)
{
while (await reader.MoveToNextEntryAsync())
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(expectedCompression, reader.Entry.CompressionType);
await reader.WriteEntryToDirectoryAsync(
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
}
protected void Iterate(
string testArchive,
string fileOrder,
CompressionType expectedCompression,
ReaderOptions? options = null
)
{
if (!Environment.OSVersion.IsWindows())
{
fileOrder = fileOrder.Replace('\\', '/');
}
var expected = new Stack<string>(fileOrder.Split(' '));
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using var file = File.OpenRead(testArchive);
using var forward = new ForwardOnlyStream(file);
using var reader = ReaderFactory.Open(forward, options);
while (reader.MoveToNextEntry())
{
Assert.Equal(expectedCompression, reader.Entry.CompressionType);
Assert.Equal(expected.Pop(), reader.Entry.Key);
}
}
}

View File

@@ -1,7 +1,6 @@
using System;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.SevenZip;
using SharpCompress.Common;
@@ -14,93 +13,78 @@ namespace SharpCompress.Test.SevenZip;
public class SevenZipArchiveTests : ArchiveTests
{
[Fact]
public Task SevenZipArchive_Solid_StreamRead() => ArchiveStreamReadAsync("7Zip.solid.7z");
public void SevenZipArchive_Solid_StreamRead() => ArchiveStreamRead("7Zip.solid.7z");
[Fact]
public Task SevenZipArchive_NonSolid_StreamRead() => ArchiveStreamReadAsync("7Zip.nonsolid.7z");
public void SevenZipArchive_NonSolid_StreamRead() => ArchiveStreamRead("7Zip.nonsolid.7z");
[Fact]
public Task SevenZipArchive_LZMA_StreamRead() => ArchiveStreamReadAsync("7Zip.LZMA.7z");
public void SevenZipArchive_LZMA_StreamRead() => ArchiveStreamRead("7Zip.LZMA.7z");
[Fact]
public Task SevenZipArchive_LZMA_PathRead() => ArchiveFileReadAsync("7Zip.LZMA.7z");
public void SevenZipArchive_LZMA_PathRead() => ArchiveFileRead("7Zip.LZMA.7z");
[Fact]
public Task SevenZipArchive_LZMAAES_StreamRead() =>
ArchiveStreamReadAsync("7Zip.LZMA.Aes.7z", new ReaderOptions { Password = "testpassword" });
public void SevenZipArchive_LZMAAES_StreamRead() =>
ArchiveStreamRead("7Zip.LZMA.Aes.7z", new ReaderOptions { Password = "testpassword" });
[Fact]
public Task SevenZipArchive_LZMAAES_PathRead() =>
ArchiveFileReadAsync("7Zip.LZMA.Aes.7z", new ReaderOptions { Password = "testpassword" });
public void SevenZipArchive_LZMAAES_PathRead() =>
ArchiveFileRead("7Zip.LZMA.Aes.7z", new ReaderOptions { Password = "testpassword" });
[Fact]
public Task SevenZipArchive_LZMAAES_NoPasswordExceptionTest() =>
Assert.ThrowsAsync(
public void SevenZipArchive_LZMAAES_NoPasswordExceptionTest() =>
Assert.Throws(
typeof(CryptographicException),
async () =>
await ArchiveFileReadAsync(
"7Zip.LZMA.Aes.7z",
new ReaderOptions { Password = null }
)
() => ArchiveFileRead("7Zip.LZMA.Aes.7z", new ReaderOptions { Password = null })
); //was failing with ArgumentNullException not CryptographicException like rar
[Fact]
public Task SevenZipArchive_PPMd_StreamRead() => ArchiveStreamReadAsync("7Zip.PPMd.7z");
public void SevenZipArchive_PPMd_StreamRead() => ArchiveStreamRead("7Zip.PPMd.7z");
[Fact]
public Task SevenZipArchive_PPMd_StreamRead_Extract_All() =>
ArchiveStreamReadExtractAllAsync("7Zip.PPMd.7z", CompressionType.PPMd);
public void SevenZipArchive_PPMd_StreamRead_Extract_All() =>
ArchiveStreamReadExtractAll("7Zip.PPMd.7z", CompressionType.PPMd);
[Fact]
public Task SevenZipArchive_PPMd_PathRead() => ArchiveFileReadAsync("7Zip.PPMd.7z");
public void SevenZipArchive_PPMd_PathRead() => ArchiveFileRead("7Zip.PPMd.7z");
[Fact]
public Task SevenZipArchive_LZMA2_StreamRead() => ArchiveStreamReadAsync("7Zip.LZMA2.7z");
public void SevenZipArchive_LZMA2_StreamRead() => ArchiveStreamRead("7Zip.LZMA2.7z");
[Fact]
public Task SevenZipArchive_LZMA2_PathRead() => ArchiveFileReadAsync("7Zip.LZMA2.7z");
public void SevenZipArchive_LZMA2_PathRead() => ArchiveFileRead("7Zip.LZMA2.7z");
[Fact]
public Task SevenZipArchive_LZMA2_EXE_StreamRead() =>
ArchiveStreamReadAsync(
new SevenZipFactory(),
"7Zip.LZMA2.exe",
new() { LookForHeader = true }
);
public void SevenZipArchive_LZMA2_EXE_StreamRead() =>
ArchiveStreamRead(new SevenZipFactory(), "7Zip.LZMA2.exe", new() { LookForHeader = true });
[Fact]
public Task SevenZipArchive_LZMA2_EXE_PathRead() =>
ArchiveFileReadAsync(
new SevenZipFactory(),
"7Zip.LZMA2.exe",
new() { LookForHeader = true }
);
public void SevenZipArchive_LZMA2_EXE_PathRead() =>
ArchiveFileRead(new SevenZipFactory(), "7Zip.LZMA2.exe", new() { LookForHeader = true });
[Fact]
public Task SevenZipArchive_LZMA2AES_StreamRead() =>
ArchiveStreamReadAsync(
"7Zip.LZMA2.Aes.7z",
new ReaderOptions { Password = "testpassword" }
);
public void SevenZipArchive_LZMA2AES_StreamRead() =>
ArchiveStreamRead("7Zip.LZMA2.Aes.7z", new ReaderOptions { Password = "testpassword" });
[Fact]
public Task SevenZipArchive_LZMA2AES_PathRead() =>
ArchiveFileReadAsync("7Zip.LZMA2.Aes.7z", new ReaderOptions { Password = "testpassword" });
public void SevenZipArchive_LZMA2AES_PathRead() =>
ArchiveFileRead("7Zip.LZMA2.Aes.7z", new ReaderOptions { Password = "testpassword" });
[Fact]
public Task SevenZipArchive_BZip2_StreamRead() => ArchiveStreamReadAsync("7Zip.BZip2.7z");
public void SevenZipArchive_BZip2_StreamRead() => ArchiveStreamRead("7Zip.BZip2.7z");
[Fact]
public Task SevenZipArchive_BZip2_PathRead() => ArchiveFileReadAsync("7Zip.BZip2.7z");
public void SevenZipArchive_BZip2_PathRead() => ArchiveFileRead("7Zip.BZip2.7z");
[Fact]
public Task SevenZipArchive_LZMA_Time_Attributes_PathRead() =>
ArchiveFileReadExAsync("7Zip.LZMA.7z");
public void SevenZipArchive_LZMA_Time_Attributes_PathRead() =>
ArchiveFileReadEx("7Zip.LZMA.7z");
[Fact]
public Task SevenZipArchive_BZip2_Split() =>
Assert.ThrowsAsync<InvalidOperationException>(async () =>
await ArchiveStreamReadAsync(
public void SevenZipArchive_BZip2_Split() =>
Assert.Throws<InvalidOperationException>(() =>
ArchiveStreamRead(
null,
"Original.7z.001",
"Original.7z.002",
@@ -114,8 +98,8 @@ public class SevenZipArchiveTests : ArchiveTests
//Same as archive as Original.7z.001 ... 007 files without the root directory 'Original\' in the archive - this caused the verify to fail
[Fact]
public Task SevenZipArchive_BZip2_Split_Working() =>
ArchiveStreamMultiReadAsync(
public void SevenZipArchive_BZip2_Split_Working() =>
ArchiveStreamMultiRead(
null,
"7Zip.BZip2.split.001",
"7Zip.BZip2.split.002",
@@ -128,8 +112,8 @@ public class SevenZipArchiveTests : ArchiveTests
//will detect and load other files
[Fact]
public Task SevenZipArchive_BZip2_Split_FirstFileRead() =>
ArchiveFileReadAsync("7Zip.BZip2.split.001");
public void SevenZipArchive_BZip2_Split_FirstFileRead() =>
ArchiveFileRead("7Zip.BZip2.split.001");
//"7Zip.BZip2.split.002",
//"7Zip.BZip2.split.003",
@@ -139,15 +123,15 @@ public class SevenZipArchiveTests : ArchiveTests
//"7Zip.BZip2.split.007"
[Fact]
public Task SevenZipArchive_ZSTD_StreamRead() => ArchiveStreamReadAsync("7Zip.ZSTD.7z");
public void SevenZipArchive_ZSTD_StreamRead() => ArchiveStreamRead("7Zip.ZSTD.7z");
[Fact]
public Task SevenZipArchive_ZSTD_PathRead() => ArchiveFileReadAsync("7Zip.ZSTD.7z");
public void SevenZipArchive_ZSTD_PathRead() => ArchiveFileRead("7Zip.ZSTD.7z");
[Fact]
public Task SevenZipArchive_ZSTD_Split() =>
Assert.ThrowsAsync<InvalidOperationException>(async () =>
await ArchiveStreamReadAsync(
public void SevenZipArchive_ZSTD_Split() =>
Assert.Throws<InvalidOperationException>(() =>
ArchiveStreamRead(
null,
"7Zip.ZSTD.Split.7z.001",
"7Zip.ZSTD.Split.7z.002",
@@ -159,53 +143,53 @@ public class SevenZipArchiveTests : ArchiveTests
);
[Fact]
public Task SevenZipArchive_EOS_FileRead() => ArchiveFileReadAsync("7Zip.eos.7z");
public void SevenZipArchive_EOS_FileRead() => ArchiveFileRead("7Zip.eos.7z");
[Fact]
public Task SevenZipArchive_Delta_FileRead() => ArchiveFileReadAsync("7Zip.delta.7z");
public void SevenZipArchive_Delta_FileRead() => ArchiveFileRead("7Zip.delta.7z");
[Fact]
public Task SevenZipArchive_ARM_FileRead() => ArchiveFileReadAsync("7Zip.ARM.7z");
public void SevenZipArchive_ARM_FileRead() => ArchiveFileRead("7Zip.ARM.7z");
[Fact]
public Task SevenZipArchive_ARMT_FileRead() => ArchiveFileReadAsync("7Zip.ARMT.7z");
public void SevenZipArchive_ARMT_FileRead() => ArchiveFileRead("7Zip.ARMT.7z");
[Fact]
public Task SevenZipArchive_BCJ_FileRead() => ArchiveFileReadAsync("7Zip.BCJ.7z");
public void SevenZipArchive_BCJ_FileRead() => ArchiveFileRead("7Zip.BCJ.7z");
[Fact]
public Task SevenZipArchive_BCJ2_FileRead() => ArchiveFileReadAsync("7Zip.BCJ2.7z");
public void SevenZipArchive_BCJ2_FileRead() => ArchiveFileRead("7Zip.BCJ2.7z");
[Fact]
public Task SevenZipArchive_IA64_FileRead() => ArchiveFileReadAsync("7Zip.IA64.7z");
public void SevenZipArchive_IA64_FileRead() => ArchiveFileRead("7Zip.IA64.7z");
[Fact]
public Task SevenZipArchive_PPC_FileRead() => ArchiveFileReadAsync("7Zip.PPC.7z");
public void SevenZipArchive_PPC_FileRead() => ArchiveFileRead("7Zip.PPC.7z");
[Fact]
public Task SevenZipArchive_SPARC_FileRead() => ArchiveFileReadAsync("7Zip.SPARC.7z");
public void SevenZipArchive_SPARC_FileRead() => ArchiveFileRead("7Zip.SPARC.7z");
[Fact]
public Task SevenZipArchive_ARM64_FileRead() => ArchiveFileReadAsync("7Zip.ARM64.7z");
public void SevenZipArchive_ARM64_FileRead() => ArchiveFileRead("7Zip.ARM64.7z");
[Fact]
public Task SevenZipArchive_RISCV_FileRead() => ArchiveFileReadAsync("7Zip.RISCV.7z");
public void SevenZipArchive_RISCV_FileRead() => ArchiveFileRead("7Zip.RISCV.7z");
[Fact]
public Task SevenZipArchive_Filters_FileRead() => ArchiveFileReadAsync("7Zip.Filters.7z");
public void SevenZipArchive_Filters_FileRead() => ArchiveFileRead("7Zip.Filters.7z");
[Fact]
public Task SevenZipArchive_Delta_Distance() =>
ArchiveDeltaDistanceReadAsync("7Zip.delta.distance.7z");
public void SevenZipArchive_Delta_Distance() =>
ArchiveDeltaDistanceRead("7Zip.delta.distance.7z");
[Fact]
public async Task SevenZipArchive_Tar_PathRead()
public void SevenZipArchive_Tar_PathRead()
{
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "7Zip.Tar.tar.7z")))
using (var archive = SevenZipArchive.Open(stream))
{
var entry = archive.Entries.First();
await entry.WriteToFileAsync(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull()));
entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull()));
var size = entry.Size;
var scratch = new FileInfo(Path.Combine(SCRATCH_FILES_PATH, "7Zip.Tar.tar"));
@@ -215,7 +199,7 @@ public class SevenZipArchiveTests : ArchiveTests
Assert.Equal(size, test.Length);
}
await CompareArchivesByPathAsync(
CompareArchivesByPath(
Path.Combine(SCRATCH_FILES_PATH, "7Zip.Tar.tar"),
Path.Combine(TEST_ARCHIVES_PATH, "7Zip.Tar.tar")
);

View File

@@ -16,283 +16,283 @@ namespace SharpCompress.Test.Tar;
public class TarArchiveTests : ArchiveTests
{
public TarArchiveTests() => UseExtensionInsteadOfNameToVerify = true;
/*
[Fact]
public void TarArchiveStreamRead() => ArchiveStreamRead("Tar.tar");
[Fact]
public void TarArchivePathRead() => ArchiveFileRead("Tar.tar");
[Fact]
public void Tar_FileName_Exactly_100_Characters()
[Fact]
public void TarArchiveStreamRead() => ArchiveStreamRead("Tar.tar");
[Fact]
public void TarArchivePathRead() => ArchiveFileRead("Tar.tar");
[Fact]
public void Tar_FileName_Exactly_100_Characters()
{
var archive = "Tar_FileName_Exactly_100_Characters.tar";
// create the 100 char filename
var filename =
"filename_with_exactly_100_characters_______________________________________________________________X";
// Step 1: create a tar file containing a file with the test name
using (Stream stream = File.OpenWrite(Path.Combine(SCRATCH2_FILES_PATH, archive)))
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, CompressionType.None))
using (Stream inputStream = new MemoryStream())
{
var archive = "Tar_FileName_Exactly_100_Characters.tar";
// create the 100 char filename
var filename =
"filename_with_exactly_100_characters_______________________________________________________________X";
// Step 1: create a tar file containing a file with the test name
using (Stream stream = File.OpenWrite(Path.Combine(SCRATCH2_FILES_PATH, archive)))
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, CompressionType.None))
using (Stream inputStream = new MemoryStream())
{
var sw = new StreamWriter(inputStream);
sw.Write("dummy filecontent");
sw.Flush();
inputStream.Position = 0;
writer.Write(filename, inputStream, null);
}
// Step 2: check if the written tar file can be read correctly
var unmodified = Path.Combine(SCRATCH2_FILES_PATH, archive);
using (var archive2 = TarArchive.Open(unmodified))
{
Assert.Equal(1, archive2.Entries.Count);
Assert.Contains(filename, archive2.Entries.Select(entry => entry.Key));
foreach (var entry in archive2.Entries)
{
Assert.Equal(
"dummy filecontent",
new StreamReader(entry.OpenEntryStream()).ReadLine()
);
}
}
var sw = new StreamWriter(inputStream);
sw.Write("dummy filecontent");
sw.Flush();
inputStream.Position = 0;
writer.Write(filename, inputStream, null);
}
[Fact]
public void Tar_NonUstarArchiveWithLongNameDoesNotSkipEntriesAfterTheLongOne()
// Step 2: check if the written tar file can be read correctly
var unmodified = Path.Combine(SCRATCH2_FILES_PATH, archive);
using (var archive2 = TarArchive.Open(unmodified))
{
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "very long filename.tar");
using var archive = TarArchive.Open(unmodified);
Assert.Equal(5, archive.Entries.Count);
Assert.Contains("very long filename/", archive.Entries.Select(entry => entry.Key));
Assert.Contains(
"very long filename/very long filename very long filename very long filename very long filename very long filename very long filename very long filename very long filename very long filename very long filename.jpg",
archive.Entries.Select(entry => entry.Key)
);
Assert.Contains("z_file 1.txt", archive.Entries.Select(entry => entry.Key));
Assert.Contains("z_file 2.txt", archive.Entries.Select(entry => entry.Key));
Assert.Contains("z_file 3.txt", archive.Entries.Select(entry => entry.Key));
}
[Fact]
public void Tar_VeryLongFilepathReadback()
{
var archive = "Tar_VeryLongFilepathReadback.tar";
// create a very long filename
var longFilename = "";
for (var i = 0; i < 600; i = longFilename.Length)
Assert.Equal(1, archive2.Entries.Count);
Assert.Contains(filename, archive2.Entries.Select(entry => entry.Key));
foreach (var entry in archive2.Entries)
{
longFilename += i.ToString("D10") + "-";
}
longFilename += ".txt";
// Step 1: create a tar file containing a file with a long name
using (Stream stream = File.OpenWrite(Path.Combine(SCRATCH2_FILES_PATH, archive)))
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, CompressionType.None))
using (Stream inputStream = new MemoryStream())
{
var sw = new StreamWriter(inputStream);
sw.Write("dummy filecontent");
sw.Flush();
inputStream.Position = 0;
writer.Write(longFilename, inputStream, null);
}
// Step 2: check if the written tar file can be read correctly
var unmodified = Path.Combine(SCRATCH2_FILES_PATH, archive);
using (var archive2 = TarArchive.Open(unmodified))
{
Assert.Equal(1, archive2.Entries.Count);
Assert.Contains(longFilename, archive2.Entries.Select(entry => entry.Key));
foreach (var entry in archive2.Entries)
{
Assert.Equal(
"dummy filecontent",
new StreamReader(entry.OpenEntryStream()).ReadLine()
);
}
}
}
[Fact]
public void Tar_UstarArchivePathReadLongName()
{
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "ustar with long names.tar");
using var archive = TarArchive.Open(unmodified);
Assert.Equal(6, archive.Entries.Count);
Assert.Contains("Directory/", archive.Entries.Select(entry => entry.Key));
Assert.Contains(
"Directory/Some file with veeeeeeeeeery loooooooooong name",
archive.Entries.Select(entry => entry.Key)
);
Assert.Contains(
"Directory/Directory with veeeeeeeeeery loooooooooong name/",
archive.Entries.Select(entry => entry.Key)
);
Assert.Contains(
"Directory/Directory with veeeeeeeeeery loooooooooong name/Some file with veeeeeeeeeery loooooooooong name",
archive.Entries.Select(entry => entry.Key)
);
Assert.Contains(
"Directory/Directory with veeeeeeeeeery loooooooooong name/Directory with veeeeeeeeeery loooooooooong name/",
archive.Entries.Select(entry => entry.Key)
);
Assert.Contains(
"Directory/Directory with veeeeeeeeeery loooooooooong name/Directory with veeeeeeeeeery loooooooooong name/Some file with veeeeeeeeeery loooooooooong name",
archive.Entries.Select(entry => entry.Key)
);
}
[Fact]
public void Tar_Create_New()
{
var scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.tar");
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar");
// var aropt = new Ar
using (var archive = TarArchive.Create())
{
archive.AddAllFromDirectory(ORIGINAL_FILES_PATH);
var twopt = new TarWriterOptions(CompressionType.None, true);
twopt.ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(866) };
archive.SaveTo(scratchPath, twopt);
}
CompareArchivesByPath(unmodified, scratchPath);
}
[Fact]
public void Tar_Random_Write_Add()
{
var jpg = Path.Combine(ORIGINAL_FILES_PATH, "jpg", "test.jpg");
var scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.mod.tar");
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.mod.tar");
var modified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar");
using (var archive = TarArchive.Open(unmodified))
{
archive.AddEntry("jpg\\test.jpg", jpg);
archive.SaveTo(scratchPath, CompressionType.None);
}
CompareArchivesByPath(modified, scratchPath);
}
[Fact]
public void Tar_Random_Write_Remove()
{
var scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.mod.tar");
var modified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.mod.tar");
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar");
using (var archive = TarArchive.Open(unmodified))
{
var entry = archive.Entries.Single(x =>
x.Key.NotNull().EndsWith("jpg", StringComparison.OrdinalIgnoreCase)
Assert.Equal(
"dummy filecontent",
new StreamReader(entry.OpenEntryStream()).ReadLine()
);
archive.RemoveEntry(entry);
archive.SaveTo(scratchPath, CompressionType.None);
}
CompareArchivesByPath(modified, scratchPath);
}
[Fact]
public void Tar_Containing_Rar_Archive()
}
[Fact]
public void Tar_NonUstarArchiveWithLongNameDoesNotSkipEntriesAfterTheLongOne()
{
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "very long filename.tar");
using var archive = TarArchive.Open(unmodified);
Assert.Equal(5, archive.Entries.Count);
Assert.Contains("very long filename/", archive.Entries.Select(entry => entry.Key));
Assert.Contains(
"very long filename/very long filename very long filename very long filename very long filename very long filename very long filename very long filename very long filename very long filename very long filename.jpg",
archive.Entries.Select(entry => entry.Key)
);
Assert.Contains("z_file 1.txt", archive.Entries.Select(entry => entry.Key));
Assert.Contains("z_file 2.txt", archive.Entries.Select(entry => entry.Key));
Assert.Contains("z_file 3.txt", archive.Entries.Select(entry => entry.Key));
}
[Fact]
public void Tar_VeryLongFilepathReadback()
{
var archive = "Tar_VeryLongFilepathReadback.tar";
// create a very long filename
var longFilename = "";
for (var i = 0; i < 600; i = longFilename.Length)
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.ContainsRar.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var archive = ArchiveFactory.Open(stream);
Assert.True(archive.Type == ArchiveType.Tar);
longFilename += i.ToString("D10") + "-";
}
[Fact]
public void Tar_Empty_Archive()
longFilename += ".txt";
// Step 1: create a tar file containing a file with a long name
using (Stream stream = File.OpenWrite(Path.Combine(SCRATCH2_FILES_PATH, archive)))
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, CompressionType.None))
using (Stream inputStream = new MemoryStream())
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.Empty.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var archive = ArchiveFactory.Open(stream);
Assert.True(archive.Type == ArchiveType.Tar);
var sw = new StreamWriter(inputStream);
sw.Write("dummy filecontent");
sw.Flush();
inputStream.Position = 0;
writer.Write(longFilename, inputStream, null);
}
[Theory]
[InlineData(10)]
[InlineData(128)]
public void Tar_Japanese_Name(int length)
// Step 2: check if the written tar file can be read correctly
var unmodified = Path.Combine(SCRATCH2_FILES_PATH, archive);
using (var archive2 = TarArchive.Open(unmodified))
{
using var mstm = new MemoryStream();
var enc = new ArchiveEncoding { Default = Encoding.UTF8 };
Assert.Equal(1, archive2.Entries.Count);
Assert.Contains(longFilename, archive2.Entries.Select(entry => entry.Key));
foreach (var entry in archive2.Entries)
{
Assert.Equal(
"dummy filecontent",
new StreamReader(entry.OpenEntryStream()).ReadLine()
);
}
}
}
[Fact]
public void Tar_UstarArchivePathReadLongName()
{
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "ustar with long names.tar");
using var archive = TarArchive.Open(unmodified);
Assert.Equal(6, archive.Entries.Count);
Assert.Contains("Directory/", archive.Entries.Select(entry => entry.Key));
Assert.Contains(
"Directory/Some file with veeeeeeeeeery loooooooooong name",
archive.Entries.Select(entry => entry.Key)
);
Assert.Contains(
"Directory/Directory with veeeeeeeeeery loooooooooong name/",
archive.Entries.Select(entry => entry.Key)
);
Assert.Contains(
"Directory/Directory with veeeeeeeeeery loooooooooong name/Some file with veeeeeeeeeery loooooooooong name",
archive.Entries.Select(entry => entry.Key)
);
Assert.Contains(
"Directory/Directory with veeeeeeeeeery loooooooooong name/Directory with veeeeeeeeeery loooooooooong name/",
archive.Entries.Select(entry => entry.Key)
);
Assert.Contains(
"Directory/Directory with veeeeeeeeeery loooooooooong name/Directory with veeeeeeeeeery loooooooooong name/Some file with veeeeeeeeeery loooooooooong name",
archive.Entries.Select(entry => entry.Key)
);
}
[Fact]
public void Tar_Create_New()
{
var scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.tar");
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar");
// var aropt = new Ar
using (var archive = TarArchive.Create())
{
archive.AddAllFromDirectory(ORIGINAL_FILES_PATH);
var twopt = new TarWriterOptions(CompressionType.None, true);
twopt.ArchiveEncoding = enc;
var fname = new string((char)0x3042, length);
using (var tw = new TarWriter(mstm, twopt))
using (var input = new MemoryStream(new byte[32]))
twopt.ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(866) };
archive.SaveTo(scratchPath, twopt);
}
CompareArchivesByPath(unmodified, scratchPath);
}
[Fact]
public void Tar_Random_Write_Add()
{
var jpg = Path.Combine(ORIGINAL_FILES_PATH, "jpg", "test.jpg");
var scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.mod.tar");
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.mod.tar");
var modified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar");
using (var archive = TarArchive.Open(unmodified))
{
archive.AddEntry("jpg\\test.jpg", jpg);
archive.SaveTo(scratchPath, CompressionType.None);
}
CompareArchivesByPath(modified, scratchPath);
}
[Fact]
public void Tar_Random_Write_Remove()
{
var scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.mod.tar");
var modified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.mod.tar");
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar");
using (var archive = TarArchive.Open(unmodified))
{
var entry = archive.Entries.Single(x =>
x.Key.NotNull().EndsWith("jpg", StringComparison.OrdinalIgnoreCase)
);
archive.RemoveEntry(entry);
archive.SaveTo(scratchPath, CompressionType.None);
}
CompareArchivesByPath(modified, scratchPath);
}
[Fact]
public void Tar_Containing_Rar_Archive()
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.ContainsRar.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var archive = ArchiveFactory.Open(stream);
Assert.True(archive.Type == ArchiveType.Tar);
}
[Fact]
public void Tar_Empty_Archive()
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.Empty.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var archive = ArchiveFactory.Open(stream);
Assert.True(archive.Type == ArchiveType.Tar);
}
[Theory]
[InlineData(10)]
[InlineData(128)]
public void Tar_Japanese_Name(int length)
{
using var mstm = new MemoryStream();
var enc = new ArchiveEncoding { Default = Encoding.UTF8 };
var twopt = new TarWriterOptions(CompressionType.None, true);
twopt.ArchiveEncoding = enc;
var fname = new string((char)0x3042, length);
using (var tw = new TarWriter(mstm, twopt))
using (var input = new MemoryStream(new byte[32]))
{
tw.Write(fname, input, null);
}
using (var inputMemory = new MemoryStream(mstm.ToArray()))
{
var tropt = new ReaderOptions { ArchiveEncoding = enc };
using (var tr = TarReader.Open(inputMemory, tropt))
{
tw.Write(fname, input, null);
}
using (var inputMemory = new MemoryStream(mstm.ToArray()))
{
var tropt = new ReaderOptions { ArchiveEncoding = enc };
using (var tr = TarReader.Open(inputMemory, tropt))
while (tr.MoveToNextEntry())
{
while (tr.MoveToNextEntry())
{
Assert.Equal(fname, tr.Entry.Key);
}
Assert.Equal(fname, tr.Entry.Key);
}
}
}
[Fact]
public void Tar_Read_One_At_A_Time()
}
[Fact]
public void Tar_Read_One_At_A_Time()
{
var archiveEncoding = new ArchiveEncoding { Default = Encoding.UTF8 };
var tarWriterOptions = new TarWriterOptions(CompressionType.None, true)
{
var archiveEncoding = new ArchiveEncoding { Default = Encoding.UTF8 };
var tarWriterOptions = new TarWriterOptions(CompressionType.None, true)
{
ArchiveEncoding = archiveEncoding,
};
var testBytes = Encoding.UTF8.GetBytes("This is a test.");
using var memoryStream = new MemoryStream();
using (var tarWriter = new TarWriter(memoryStream, tarWriterOptions))
using (var testFileStream = new MemoryStream(testBytes))
{
tarWriter.Write("test1.txt", testFileStream);
testFileStream.Position = 0;
tarWriter.Write("test2.txt", testFileStream);
}
memoryStream.Position = 0;
var numberOfEntries = 0;
using (var archiveFactory = TarArchive.Open(memoryStream))
{
foreach (var entry in archiveFactory.Entries)
{
++numberOfEntries;
using var tarEntryStream = entry.OpenEntryStream();
using var testFileStream = new MemoryStream();
tarEntryStream.CopyTo(testFileStream);
Assert.Equal(testBytes.Length, testFileStream.Length);
}
}
Assert.Equal(2, numberOfEntries);
ArchiveEncoding = archiveEncoding,
};
var testBytes = Encoding.UTF8.GetBytes("This is a test.");
using var memoryStream = new MemoryStream();
using (var tarWriter = new TarWriter(memoryStream, tarWriterOptions))
using (var testFileStream = new MemoryStream(testBytes))
{
tarWriter.Write("test1.txt", testFileStream);
testFileStream.Position = 0;
tarWriter.Write("test2.txt", testFileStream);
}
[Fact]
public void Tar_Detect_Test()
memoryStream.Position = 0;
var numberOfEntries = 0;
using (var archiveFactory = TarArchive.Open(memoryStream))
{
var isTar = TarArchive.IsTarFile(Path.Combine(TEST_ARCHIVES_PATH, "false.positive.tar"));
Assert.False(isTar);
}*/
foreach (var entry in archiveFactory.Entries)
{
++numberOfEntries;
using var tarEntryStream = entry.OpenEntryStream();
using var testFileStream = new MemoryStream();
tarEntryStream.CopyTo(testFileStream);
Assert.Equal(testBytes.Length, testFileStream.Length);
}
}
Assert.Equal(2, numberOfEntries);
}
[Fact]
public void Tar_Detect_Test()
{
var isTar = TarArchive.IsTarFile(Path.Combine(TEST_ARCHIVES_PATH, "false.positive.tar"));
Assert.False(isTar);
}
}

View File

@@ -12,255 +12,255 @@ namespace SharpCompress.Test.Tar;
public class TarReaderTests : ReaderTests
{
public TarReaderTests() => UseExtensionInsteadOfNameToVerify = true;
/*
[Fact]
public void Tar_Reader() => Read("Tar.tar", CompressionType.None);
[Fact]
public void Tar_Skip()
[Fact]
public void Tar_Reader() => Read("Tar.tar", CompressionType.None);
[Fact]
public void Tar_Skip()
{
using Stream stream = new ForwardOnlyStream(
File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar"))
);
using var reader = ReaderFactory.Open(stream);
var x = 0;
while (reader.MoveToNextEntry())
{
using Stream stream = new ForwardOnlyStream(
File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar"))
);
using var reader = ReaderFactory.Open(stream);
var x = 0;
while (reader.MoveToNextEntry())
if (!reader.Entry.IsDirectory)
{
if (!reader.Entry.IsDirectory)
x++;
if (x % 2 == 0)
{
x++;
if (x % 2 == 0)
{
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
}
[Fact]
public void Tar_Z_Reader() => Read("Tar.tar.Z", CompressionType.Lzw);
[Fact]
public void Tar_BZip2_Reader() => Read("Tar.tar.bz2", CompressionType.BZip2);
[Fact]
public void Tar_GZip_Reader() => Read("Tar.tar.gz", CompressionType.GZip);
[Fact]
public void Tar_ZStandard_Reader() => Read("Tar.tar.zst", CompressionType.ZStandard);
[Fact]
public void Tar_LZip_Reader() => Read("Tar.tar.lz", CompressionType.LZip);
[Fact]
public void Tar_Xz_Reader() => Read("Tar.tar.xz", CompressionType.Xz);
[Fact]
public void Tar_GZip_OldGnu_Reader() => Read("Tar.oldgnu.tar.gz", CompressionType.GZip);
[Fact]
public void Tar_BZip2_Entry_Stream()
}
[Fact]
public void Tar_Z_Reader() => Read("Tar.tar.Z", CompressionType.Lzw);
[Fact]
public void Tar_BZip2_Reader() => Read("Tar.tar.bz2", CompressionType.BZip2);
[Fact]
public void Tar_GZip_Reader() => Read("Tar.tar.gz", CompressionType.GZip);
[Fact]
public void Tar_ZStandard_Reader() => Read("Tar.tar.zst", CompressionType.ZStandard);
[Fact]
public void Tar_LZip_Reader() => Read("Tar.tar.lz", CompressionType.LZip);
[Fact]
public void Tar_Xz_Reader() => Read("Tar.tar.xz", CompressionType.Xz);
[Fact]
public void Tar_GZip_OldGnu_Reader() => Read("Tar.oldgnu.tar.gz", CompressionType.GZip);
[Fact]
public void Tar_BZip2_Entry_Stream()
{
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2")))
using (var reader = TarReader.Open(stream))
{
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2")))
using (var reader = TarReader.Open(stream))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(CompressionType.BZip2, reader.Entry.CompressionType);
using var entryStream = reader.OpenEntryStream();
var file = Path.GetFileName(reader.Entry.Key);
var folder =
Path.GetDirectoryName(reader.Entry.Key)
?? throw new ArgumentNullException();
var destdir = Path.Combine(SCRATCH_FILES_PATH, folder);
if (!Directory.Exists(destdir))
{
Directory.CreateDirectory(destdir);
}
var destinationFileName = Path.Combine(destdir, file.NotNull());
using var fs = File.OpenWrite(destinationFileName);
entryStream.CopyTo(fs);
}
}
}
VerifyFiles();
}
[Fact]
public void Tar_LongNamesWithLongNameExtension()
{
var filePaths = new List<string>();
using (
Stream stream = File.OpenRead(
Path.Combine(TEST_ARCHIVES_PATH, "Tar.LongPathsWithLongNameExtension.tar")
)
)
using (var reader = TarReader.Open(stream))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
filePaths.Add(reader.Entry.Key.NotNull("Entry Key is null"));
}
}
}
Assert.Equal(3, filePaths.Count);
Assert.Contains("a.txt", filePaths);
Assert.Contains(
"wp-content/plugins/gravityformsextend/lib/Aws/Symfony/Component/ClassLoader/Tests/Fixtures/Apc/beta/Apc/ApcPrefixCollision/A/B/Bar.php",
filePaths
);
Assert.Contains(
"wp-content/plugins/gravityformsextend/lib/Aws/Symfony/Component/ClassLoader/Tests/Fixtures/Apc/beta/Apc/ApcPrefixCollision/A/B/Foo.php",
filePaths
);
}
[Fact]
public void Tar_BZip2_Skip_Entry_Stream()
{
using Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2"));
using var reader = TarReader.Open(stream);
var names = new List<string>();
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(CompressionType.BZip2, reader.Entry.CompressionType);
using var entryStream = reader.OpenEntryStream();
entryStream.SkipEntry();
names.Add(reader.Entry.Key.NotNull());
var file = Path.GetFileName(reader.Entry.Key);
var folder =
Path.GetDirectoryName(reader.Entry.Key)
?? throw new ArgumentNullException();
var destdir = Path.Combine(SCRATCH_FILES_PATH, folder);
if (!Directory.Exists(destdir))
{
Directory.CreateDirectory(destdir);
}
var destinationFileName = Path.Combine(destdir, file.NotNull());
using var fs = File.OpenWrite(destinationFileName);
entryStream.CopyTo(fs);
}
}
Assert.Equal(3, names.Count);
}
[Fact]
public void Tar_Containing_Rar_Reader()
VerifyFiles();
}
[Fact]
public void Tar_LongNamesWithLongNameExtension()
{
var filePaths = new List<string>();
using (
Stream stream = File.OpenRead(
Path.Combine(TEST_ARCHIVES_PATH, "Tar.LongPathsWithLongNameExtension.tar")
)
)
using (var reader = TarReader.Open(stream))
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.ContainsRar.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var reader = ReaderFactory.Open(stream);
Assert.True(reader.ArchiveType == ArchiveType.Tar);
}
[Fact]
public void Tar_With_TarGz_With_Flushed_EntryStream()
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.ContainsTarGz.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var reader = ReaderFactory.Open(stream);
Assert.True(reader.MoveToNextEntry());
Assert.Equal("inner.tar.gz", reader.Entry.Key);
using var entryStream = reader.OpenEntryStream();
using var flushingStream = new FlushOnDisposeStream(entryStream);
// Extract inner.tar.gz
using var innerReader = ReaderFactory.Open(flushingStream);
Assert.True(innerReader.MoveToNextEntry());
Assert.Equal("test", innerReader.Entry.Key);
}
[Fact]
public void Tar_Broken_Stream()
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var reader = ReaderFactory.Open(stream);
var memoryStream = new MemoryStream();
Assert.True(reader.MoveToNextEntry());
Assert.True(reader.MoveToNextEntry());
reader.WriteEntryTo(memoryStream);
stream.Close();
Assert.Throws<IncompleteArchiveException>(() => reader.MoveToNextEntry());
}
[Fact]
public void Tar_Corrupted()
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "TarCorrupted.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var reader = ReaderFactory.Open(stream);
var memoryStream = new MemoryStream();
Assert.True(reader.MoveToNextEntry());
Assert.True(reader.MoveToNextEntry());
reader.WriteEntryTo(memoryStream);
stream.Close();
Assert.Throws<IncompleteArchiveException>(() => reader.MoveToNextEntry());
}
#if !NETFRAMEWORK
[Fact]
public void Tar_GZip_With_Symlink_Entries()
{
var isWindows = System.Runtime.InteropServices.RuntimeInformation.IsOSPlatform(
System.Runtime.InteropServices.OSPlatform.Windows
);
using Stream stream = File.OpenRead(
Path.Combine(TEST_ARCHIVES_PATH, "TarWithSymlink.tar.gz")
);
using var reader = TarReader.Open(stream);
while (reader.MoveToNextEntry())
{
if (reader.Entry.IsDirectory)
if (!reader.Entry.IsDirectory)
{
continue;
filePaths.Add(reader.Entry.Key.NotNull("Entry Key is null"));
}
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions
{
ExtractFullPath = true,
Overwrite = true,
WriteSymbolicLink = (sourcePath, targetPath) =>
{
if (!isWindows)
{
var link = new Mono.Unix.UnixSymbolicLinkInfo(sourcePath);
if (File.Exists(sourcePath))
{
link.Delete(); // equivalent to ln -s -f
}
link.CreateSymbolicLinkTo(targetPath);
}
},
}
);
if (!isWindows)
}
}
Assert.Equal(3, filePaths.Count);
Assert.Contains("a.txt", filePaths);
Assert.Contains(
"wp-content/plugins/gravityformsextend/lib/Aws/Symfony/Component/ClassLoader/Tests/Fixtures/Apc/beta/Apc/ApcPrefixCollision/A/B/Bar.php",
filePaths
);
Assert.Contains(
"wp-content/plugins/gravityformsextend/lib/Aws/Symfony/Component/ClassLoader/Tests/Fixtures/Apc/beta/Apc/ApcPrefixCollision/A/B/Foo.php",
filePaths
);
}
[Fact]
public void Tar_BZip2_Skip_Entry_Stream()
{
using Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2"));
using var reader = TarReader.Open(stream);
var names = new List<string>();
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(CompressionType.BZip2, reader.Entry.CompressionType);
using var entryStream = reader.OpenEntryStream();
entryStream.SkipEntry();
names.Add(reader.Entry.Key.NotNull());
}
}
Assert.Equal(3, names.Count);
}
[Fact]
public void Tar_Containing_Rar_Reader()
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.ContainsRar.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var reader = ReaderFactory.Open(stream);
Assert.True(reader.ArchiveType == ArchiveType.Tar);
}
[Fact]
public void Tar_With_TarGz_With_Flushed_EntryStream()
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.ContainsTarGz.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var reader = ReaderFactory.Open(stream);
Assert.True(reader.MoveToNextEntry());
Assert.Equal("inner.tar.gz", reader.Entry.Key);
using var entryStream = reader.OpenEntryStream();
using var flushingStream = new FlushOnDisposeStream(entryStream);
// Extract inner.tar.gz
using var innerReader = ReaderFactory.Open(flushingStream);
Assert.True(innerReader.MoveToNextEntry());
Assert.Equal("test", innerReader.Entry.Key);
}
[Fact]
public void Tar_Broken_Stream()
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var reader = ReaderFactory.Open(stream);
var memoryStream = new MemoryStream();
Assert.True(reader.MoveToNextEntry());
Assert.True(reader.MoveToNextEntry());
reader.WriteEntryTo(memoryStream);
stream.Close();
Assert.Throws<IncompleteArchiveException>(() => reader.MoveToNextEntry());
}
[Fact]
public void Tar_Corrupted()
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "TarCorrupted.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var reader = ReaderFactory.Open(stream);
var memoryStream = new MemoryStream();
Assert.True(reader.MoveToNextEntry());
Assert.True(reader.MoveToNextEntry());
reader.WriteEntryTo(memoryStream);
stream.Close();
Assert.Throws<IncompleteArchiveException>(() => reader.MoveToNextEntry());
}
#if !NETFRAMEWORK
[Fact]
public void Tar_GZip_With_Symlink_Entries()
{
var isWindows = System.Runtime.InteropServices.RuntimeInformation.IsOSPlatform(
System.Runtime.InteropServices.OSPlatform.Windows
);
using Stream stream = File.OpenRead(
Path.Combine(TEST_ARCHIVES_PATH, "TarWithSymlink.tar.gz")
);
using var reader = TarReader.Open(stream);
while (reader.MoveToNextEntry())
{
if (reader.Entry.IsDirectory)
{
continue;
}
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions
{
if (reader.Entry.LinkTarget != null)
ExtractFullPath = true,
Overwrite = true,
WriteSymbolicLink = (sourcePath, targetPath) =>
{
var path = Path.Combine(SCRATCH_FILES_PATH, reader.Entry.Key.NotNull());
var link = new Mono.Unix.UnixSymbolicLinkInfo(path);
if (link.HasContents)
if (!isWindows)
{
// need to convert the link to an absolute path for comparison
var target = reader.Entry.LinkTarget;
var realTarget = Path.GetFullPath(
Path.Combine($"{Path.GetDirectoryName(path)}", target)
);
Assert.Equal(realTarget, link.GetContents().ToString());
}
else
{
Assert.True(false, "Symlink has no target");
var link = new Mono.Unix.UnixSymbolicLinkInfo(sourcePath);
if (File.Exists(sourcePath))
{
link.Delete(); // equivalent to ln -s -f
}
link.CreateSymbolicLinkTo(targetPath);
}
},
}
);
if (!isWindows)
{
if (reader.Entry.LinkTarget != null)
{
var path = Path.Combine(SCRATCH_FILES_PATH, reader.Entry.Key.NotNull());
var link = new Mono.Unix.UnixSymbolicLinkInfo(path);
if (link.HasContents)
{
// need to convert the link to an absolute path for comparison
var target = reader.Entry.LinkTarget;
var realTarget = Path.GetFullPath(
Path.Combine($"{Path.GetDirectoryName(path)}", target)
);
Assert.Equal(realTarget, link.GetContents().ToString());
}
else
{
Assert.True(false, "Symlink has no target");
}
}
}
}
#endif*/
}
#endif
}

View File

@@ -1,6 +1,5 @@
using System.IO;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Writers.Tar;
using Xunit;
@@ -21,8 +20,8 @@ public class TarWriterTests : WriterTests
: base(ArchiveType.Tar) => UseExtensionInsteadOfNameToVerify = true;
[Fact]
public Task Tar_Writer() =>
WriteAsync(
public void Tar_Writer() =>
Write(
CompressionType.None,
"Tar.noEmptyDirs.tar",
"Tar.noEmptyDirs.tar",
@@ -30,8 +29,8 @@ public class TarWriterTests : WriterTests
);
[Fact]
public Task Tar_BZip2_Writer() =>
WriteAsync(
public void Tar_BZip2_Writer() =>
Write(
CompressionType.BZip2,
"Tar.noEmptyDirs.tar.bz2",
"Tar.noEmptyDirs.tar.bz2",
@@ -39,8 +38,8 @@ public class TarWriterTests : WriterTests
);
[Fact]
public Task Tar_LZip_Writer() =>
WriteAsync(
public void Tar_LZip_Writer() =>
Write(
CompressionType.LZip,
"Tar.noEmptyDirs.tar.lz",
"Tar.noEmptyDirs.tar.lz",
@@ -48,13 +47,9 @@ public class TarWriterTests : WriterTests
);
[Fact]
public Task Tar_Rar_Write() =>
Assert.ThrowsAsync<InvalidFormatException>(async () =>
await WriteAsync(
CompressionType.Rar,
"Zip.ppmd.noEmptyDirs.zip",
"Zip.ppmd.noEmptyDirs.zip"
)
public void Tar_Rar_Write() =>
Assert.Throws<InvalidFormatException>(() =>
Write(CompressionType.Rar, "Zip.ppmd.noEmptyDirs.zip", "Zip.ppmd.noEmptyDirs.zip")
);
[Theory]

View File

@@ -3,7 +3,6 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Readers;
using Xunit;
@@ -203,11 +202,7 @@ public class TestBase : IDisposable
Assert.Equal(fi1.Attributes, fi2.Attributes);
}
protected async Task CompareArchivesByPathAsync(
string file1,
string file2,
Encoding? encoding = null
)
protected void CompareArchivesByPath(string file1, string file2, Encoding? encoding = null)
{
var readerOptions = new ReaderOptions { LeaveStreamOpen = false };
readerOptions.ArchiveEncoding.Default = encoding ?? Encoding.Default;
@@ -218,13 +213,13 @@ public class TestBase : IDisposable
using (var archive1 = ReaderFactory.Open(File.OpenRead(file1), readerOptions))
using (var archive2 = ReaderFactory.Open(File.OpenRead(file2), readerOptions))
{
while (await archive1.MoveToNextEntryAsync())
while (archive1.MoveToNextEntry())
{
Assert.True(await archive2.MoveToNextEntryAsync());
Assert.True(archive2.MoveToNextEntry());
archive1Entries.Add(archive1.Entry.Key.NotNull());
archive2Entries.Add(archive2.Entry.Key.NotNull());
}
Assert.False(await archive2.MoveToNextEntryAsync());
Assert.False(archive2.MoveToNextEntry());
}
archive1Entries.Sort();
archive2Entries.Sort();

View File

@@ -1,6 +1,5 @@
using System.IO;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -14,7 +13,7 @@ public class WriterTests : TestBase
protected WriterTests(ArchiveType type) => _type = type;
protected async Task WriteAsync(
protected void Write(
CompressionType compressionType,
string archive,
string archiveToVerifyAgainst,
@@ -30,8 +29,7 @@ public class WriterTests : TestBase
using var writer = WriterFactory.Open(stream, _type, writerOptions);
writer.WriteAll(ORIGINAL_FILES_PATH, "*", SearchOption.AllDirectories);
}
await CompareArchivesByPathAsync(
CompareArchivesByPath(
Path.Combine(SCRATCH2_FILES_PATH, archive),
Path.Combine(TEST_ARCHIVES_PATH, archiveToVerifyAgainst)
);
@@ -46,7 +44,7 @@ public class WriterTests : TestBase
SharpCompressStream.Create(stream, leaveOpen: true),
readerOptions
);
await reader.WriteAllToDirectoryAsync(
reader.WriteAllToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true }
);

View File

@@ -1,7 +1,6 @@
using System;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Common;
using SharpCompress.Common.Zip;
@@ -24,67 +23,67 @@ public class Zip64Tests : WriterTests
private const long FOUR_GB_LIMIT = ((long)uint.MaxValue) + 1;
[Trait("format", "zip64")]
public Task Zip64_Single_Large_File() =>
public void Zip64_Single_Large_File() =>
// One single file, requires zip64
RunSingleTestAsync(1, FOUR_GB_LIMIT, setZip64: true, forwardOnly: false);
RunSingleTest(1, FOUR_GB_LIMIT, setZip64: true, forwardOnly: false);
[Trait("format", "zip64")]
public Task Zip64_Two_Large_Files() =>
public void Zip64_Two_Large_Files() =>
// One single file, requires zip64
RunSingleTestAsync(2, FOUR_GB_LIMIT, setZip64: true, forwardOnly: false);
RunSingleTest(2, FOUR_GB_LIMIT, setZip64: true, forwardOnly: false);
[Trait("format", "zip64")]
public Task Zip64_Two_Small_files() =>
public void Zip64_Two_Small_files() =>
// Multiple files, does not require zip64
RunSingleTestAsync(2, FOUR_GB_LIMIT / 2, setZip64: false, forwardOnly: false);
RunSingleTest(2, FOUR_GB_LIMIT / 2, setZip64: false, forwardOnly: false);
[Trait("format", "zip64")]
public Task Zip64_Two_Small_files_stream() =>
public void Zip64_Two_Small_files_stream() =>
// Multiple files, does not require zip64, and works with streams
RunSingleTestAsync(2, FOUR_GB_LIMIT / 2, setZip64: false, forwardOnly: true);
RunSingleTest(2, FOUR_GB_LIMIT / 2, setZip64: false, forwardOnly: true);
[Trait("format", "zip64")]
public Task Zip64_Two_Small_Files_Zip64() =>
public void Zip64_Two_Small_Files_Zip64() =>
// Multiple files, use zip64 even though it is not required
RunSingleTestAsync(2, FOUR_GB_LIMIT / 2, setZip64: true, forwardOnly: false);
RunSingleTest(2, FOUR_GB_LIMIT / 2, setZip64: true, forwardOnly: false);
[Trait("format", "zip64")]
public async Task Zip64_Single_Large_File_Fail()
public void Zip64_Single_Large_File_Fail()
{
try
{
// One single file, should fail
await RunSingleTestAsync(1, FOUR_GB_LIMIT, setZip64: false, forwardOnly: false);
RunSingleTest(1, FOUR_GB_LIMIT, setZip64: false, forwardOnly: false);
throw new InvalidOperationException("Test did not fail?");
}
catch (NotSupportedException) { }
}
[Trait("zip64", "true")]
public async Task Zip64_Single_Large_File_Zip64_Streaming_Fail()
public void Zip64_Single_Large_File_Zip64_Streaming_Fail()
{
try
{
// One single file, should fail (fast) with zip64
await RunSingleTestAsync(1, FOUR_GB_LIMIT, setZip64: true, forwardOnly: true);
RunSingleTest(1, FOUR_GB_LIMIT, setZip64: true, forwardOnly: true);
throw new InvalidOperationException("Test did not fail?");
}
catch (NotSupportedException) { }
}
[Trait("zip64", "true")]
public async Task Zip64_Single_Large_File_Streaming_Fail()
public void Zip64_Single_Large_File_Streaming_Fail()
{
try
{
// One single file, should fail once the write discovers the problem
await RunSingleTestAsync(1, FOUR_GB_LIMIT, setZip64: false, forwardOnly: true);
RunSingleTest(1, FOUR_GB_LIMIT, setZip64: false, forwardOnly: true);
throw new InvalidOperationException("Test did not fail?");
}
catch (NotSupportedException) { }
}
public async Task RunSingleTestAsync(
public void RunSingleTest(
long files,
long filesize,
bool setZip64,
@@ -105,7 +104,7 @@ public class Zip64Tests : WriterTests
CreateZipArchive(filename, files, filesize, writeChunkSize, setZip64, forwardOnly);
}
var resForward = await ReadForwardOnlyAsync(filename);
var resForward = ReadForwardOnly(filename);
if (resForward.Item1 != files)
{
throw new InvalidOperationException(
@@ -169,7 +168,7 @@ public class Zip64Tests : WriterTests
}
}
public async Task<Tuple<long, long>> ReadForwardOnlyAsync(string filename)
public Tuple<long, long> ReadForwardOnly(string filename)
{
long count = 0;
long size = 0;
@@ -177,9 +176,9 @@ public class Zip64Tests : WriterTests
using (var fs = File.OpenRead(filename))
using (var rd = ZipReader.Open(fs, new ReaderOptions { LookForHeader = false }))
{
while (await rd.MoveToNextEntryAsync())
while (rd.MoveToNextEntry())
{
using (await rd.OpenEntryStreamAsync()) { }
using (rd.OpenEntryStream()) { }
count++;
if (prev != null)

File diff suppressed because it is too large Load Diff

View File

@@ -17,219 +17,219 @@ namespace SharpCompress.Test.Zip;
public class ZipTypesLevelsWithCrcRatioTests : ArchiveTests
{
public ZipTypesLevelsWithCrcRatioTests() => UseExtensionInsteadOfNameToVerify = true;
/*
[Theory]
[InlineData(CompressionType.Deflate, 1, 1, 0.11f)] // was 0.8f, actual 0.104
[InlineData(CompressionType.Deflate, 3, 1, 0.08f)] // was 0.8f, actual 0.078
[InlineData(CompressionType.Deflate, 6, 1, 0.05f)] // was 0.8f, actual ~0.042
[InlineData(CompressionType.Deflate, 9, 1, 0.04f)] // was 0.7f, actual 0.038
[InlineData(CompressionType.ZStandard, 1, 1, 0.025f)] // was 0.8f, actual 0.023
[InlineData(CompressionType.ZStandard, 3, 1, 0.015f)] // was 0.7f, actual 0.013
[InlineData(CompressionType.ZStandard, 9, 1, 0.006f)] // was 0.7f, actual 0.005
[InlineData(CompressionType.ZStandard, 22, 1, 0.005f)] // was 0.7f, actual 0.004
[InlineData(CompressionType.BZip2, 0, 1, 0.035f)] // was 0.8f, actual 0.033
[InlineData(CompressionType.LZMA, 0, 1, 0.005f)] // was 0.8f, actual 0.004
[InlineData(CompressionType.None, 0, 1, 1.001f)] // was 1.1f, actual 1.000
[InlineData(CompressionType.Deflate, 6, 2, 0.045f)] // was 0.8f, actual 0.042
[InlineData(CompressionType.ZStandard, 3, 2, 0.012f)] // was 0.7f, actual 0.010
[InlineData(CompressionType.BZip2, 0, 2, 0.035f)] // was 0.8f, actual 0.032
[InlineData(CompressionType.Deflate, 9, 3, 0.04f)] // was 0.7f, actual 0.038
[InlineData(CompressionType.ZStandard, 9, 3, 0.003f)] // was 0.7f, actual 0.002
public void Zip_Create_Archive_With_3_Files_Crc32_Test(
CompressionType compressionType,
int compressionLevel,
int sizeMb,
float expectedRatio
)
[Theory]
[InlineData(CompressionType.Deflate, 1, 1, 0.11f)] // was 0.8f, actual 0.104
[InlineData(CompressionType.Deflate, 3, 1, 0.08f)] // was 0.8f, actual 0.078
[InlineData(CompressionType.Deflate, 6, 1, 0.05f)] // was 0.8f, actual ~0.042
[InlineData(CompressionType.Deflate, 9, 1, 0.04f)] // was 0.7f, actual 0.038
[InlineData(CompressionType.ZStandard, 1, 1, 0.025f)] // was 0.8f, actual 0.023
[InlineData(CompressionType.ZStandard, 3, 1, 0.015f)] // was 0.7f, actual 0.013
[InlineData(CompressionType.ZStandard, 9, 1, 0.006f)] // was 0.7f, actual 0.005
[InlineData(CompressionType.ZStandard, 22, 1, 0.005f)] // was 0.7f, actual 0.004
[InlineData(CompressionType.BZip2, 0, 1, 0.035f)] // was 0.8f, actual 0.033
[InlineData(CompressionType.LZMA, 0, 1, 0.005f)] // was 0.8f, actual 0.004
[InlineData(CompressionType.None, 0, 1, 1.001f)] // was 1.1f, actual 1.000
[InlineData(CompressionType.Deflate, 6, 2, 0.045f)] // was 0.8f, actual 0.042
[InlineData(CompressionType.ZStandard, 3, 2, 0.012f)] // was 0.7f, actual 0.010
[InlineData(CompressionType.BZip2, 0, 2, 0.035f)] // was 0.8f, actual 0.032
[InlineData(CompressionType.Deflate, 9, 3, 0.04f)] // was 0.7f, actual 0.038
[InlineData(CompressionType.ZStandard, 9, 3, 0.003f)] // was 0.7f, actual 0.002
public void Zip_Create_Archive_With_3_Files_Crc32_Test(
CompressionType compressionType,
int compressionLevel,
int sizeMb,
float expectedRatio
)
{
const int OneMiB = 1024 * 1024;
var baseSize = sizeMb * OneMiB;
// Generate test content for files with sizes based on the sizeMb parameter
var file1Data = TestPseudoTextStream.Create(baseSize);
var file2Data = TestPseudoTextStream.Create(baseSize * 2);
var file3Data = TestPseudoTextStream.Create(baseSize * 3);
var expectedFiles = new Dictionary<string, (byte[] data, uint crc)>
{
const int OneMiB = 1024 * 1024;
var baseSize = sizeMb * OneMiB;
// Generate test content for files with sizes based on the sizeMb parameter
var file1Data = TestPseudoTextStream.Create(baseSize);
var file2Data = TestPseudoTextStream.Create(baseSize * 2);
var file3Data = TestPseudoTextStream.Create(baseSize * 3);
var expectedFiles = new Dictionary<string, (byte[] data, uint crc)>
{
[$"file1_{sizeMb}MiB.txt"] = (file1Data, CalculateCrc32(file1Data)),
[$"data/file2_{sizeMb * 2}MiB.txt"] = (file2Data, CalculateCrc32(file2Data)),
[$"deep/nested/file3_{sizeMb * 3}MiB.txt"] = (file3Data, CalculateCrc32(file3Data)),
};
// Create zip archive in memory
using var zipStream = new MemoryStream();
using (var writer = CreateWriterWithLevel(zipStream, compressionType, compressionLevel))
{
writer.Write($"file1_{sizeMb}MiB.txt", new MemoryStream(file1Data));
writer.Write($"data/file2_{sizeMb * 2}MiB.txt", new MemoryStream(file2Data));
writer.Write($"deep/nested/file3_{sizeMb * 3}MiB.txt", new MemoryStream(file3Data));
}
// Calculate and output actual compression ratio
var originalSize = file1Data.Length + file2Data.Length + file3Data.Length;
var actualRatio = (double)zipStream.Length / originalSize;
//Debug.WriteLine($"Zip_Create_Archive_With_3_Files_Crc32_Test: {compressionType} Level={compressionLevel} Size={sizeMb}MB Expected={expectedRatio:F3} Actual={actualRatio:F3}");
// Verify compression occurred (except for None compression type)
if (compressionType != CompressionType.None)
{
Assert.True(
zipStream.Length < originalSize,
$"Compression failed: compressed={zipStream.Length}, original={originalSize}"
);
}
// Verify compression ratio
VerifyCompressionRatio(
originalSize,
zipStream.Length,
expectedRatio,
$"{compressionType} level {compressionLevel}"
);
// Verify archive content and CRC32
VerifyArchiveContent(zipStream, expectedFiles);
// Verify compression type is correctly set
VerifyCompressionType(zipStream, compressionType);
[$"file1_{sizeMb}MiB.txt"] = (file1Data, CalculateCrc32(file1Data)),
[$"data/file2_{sizeMb * 2}MiB.txt"] = (file2Data, CalculateCrc32(file2Data)),
[$"deep/nested/file3_{sizeMb * 3}MiB.txt"] = (file3Data, CalculateCrc32(file3Data)),
};
// Create zip archive in memory
using var zipStream = new MemoryStream();
using (var writer = CreateWriterWithLevel(zipStream, compressionType, compressionLevel))
{
writer.Write($"file1_{sizeMb}MiB.txt", new MemoryStream(file1Data));
writer.Write($"data/file2_{sizeMb * 2}MiB.txt", new MemoryStream(file2Data));
writer.Write($"deep/nested/file3_{sizeMb * 3}MiB.txt", new MemoryStream(file3Data));
}
[Theory]
[InlineData(CompressionType.Deflate, 1, 4, 0.11f)] // was 0.8, actual 0.105
[InlineData(CompressionType.Deflate, 3, 4, 0.08f)] // was 0.8, actual 0.077
[InlineData(CompressionType.Deflate, 6, 4, 0.045f)] // was 0.8, actual 0.042
[InlineData(CompressionType.Deflate, 9, 4, 0.04f)] // was 0.8, actual 0.037
[InlineData(CompressionType.ZStandard, 1, 4, 0.025f)] // was 0.8, actual 0.022
[InlineData(CompressionType.ZStandard, 3, 4, 0.012f)] // was 0.8, actual 0.010
[InlineData(CompressionType.ZStandard, 9, 4, 0.003f)] // was 0.8, actual 0.002
[InlineData(CompressionType.ZStandard, 22, 4, 0.003f)] // was 0.8, actual 0.002
[InlineData(CompressionType.BZip2, 0, 4, 0.035f)] // was 0.8, actual 0.032
[InlineData(CompressionType.LZMA, 0, 4, 0.003f)] // was 0.8, actual 0.002
public void Zip_WriterFactory_Crc32_Test(
CompressionType compressionType,
int compressionLevel,
int sizeMb,
float expectedRatio
)
// Calculate and output actual compression ratio
var originalSize = file1Data.Length + file2Data.Length + file3Data.Length;
var actualRatio = (double)zipStream.Length / originalSize;
//Debug.WriteLine($"Zip_Create_Archive_With_3_Files_Crc32_Test: {compressionType} Level={compressionLevel} Size={sizeMb}MB Expected={expectedRatio:F3} Actual={actualRatio:F3}");
// Verify compression occurred (except for None compression type)
if (compressionType != CompressionType.None)
{
var fileSize = sizeMb * 1024 * 1024;
var testData = TestPseudoTextStream.Create(fileSize);
var expectedCrc = CalculateCrc32(testData);
// Create archive with specified compression level
using var zipStream = new MemoryStream();
var writerOptions = new ZipWriterOptions(compressionType)
{
CompressionLevel = compressionLevel,
};
using (var writer = WriterFactory.Open(zipStream, ArchiveType.Zip, writerOptions))
{
writer.Write(
$"{compressionType}_level_{compressionLevel}_{sizeMb}MiB.txt",
new MemoryStream(testData)
);
}
// Calculate and output actual compression ratio
var actualRatio = (double)zipStream.Length / testData.Length;
//Debug.WriteLine($"Zip_WriterFactory_Crc32_Test: {compressionType} Level={compressionLevel} Size={sizeMb}MB Expected={expectedRatio:F3} Actual={actualRatio:F3}");
VerifyCompressionRatio(
testData.Length,
zipStream.Length,
expectedRatio,
$"{compressionType} level {compressionLevel}"
Assert.True(
zipStream.Length < originalSize,
$"Compression failed: compressed={zipStream.Length}, original={originalSize}"
);
// Verify the archive
zipStream.Position = 0;
using var archive = ZipArchive.Open(zipStream);
var entry = archive.Entries.Single(e => !e.IsDirectory);
using var entryStream = entry.OpenEntryStream();
using var extractedStream = new MemoryStream();
entryStream.CopyTo(extractedStream);
var extractedData = extractedStream.ToArray();
var actualCrc = CalculateCrc32(extractedData);
Assert.Equal(compressionType, entry.CompressionType);
Assert.Equal(expectedCrc, actualCrc);
Assert.Equal(testData.Length, extractedData.Length);
}
// Verify compression ratio
VerifyCompressionRatio(
originalSize,
zipStream.Length,
expectedRatio,
$"{compressionType} level {compressionLevel}"
);
// Verify archive content and CRC32
VerifyArchiveContent(zipStream, expectedFiles);
// Verify compression type is correctly set
VerifyCompressionType(zipStream, compressionType);
}
[Theory]
[InlineData(CompressionType.Deflate, 1, 4, 0.11f)] // was 0.8, actual 0.105
[InlineData(CompressionType.Deflate, 3, 4, 0.08f)] // was 0.8, actual 0.077
[InlineData(CompressionType.Deflate, 6, 4, 0.045f)] // was 0.8, actual 0.042
[InlineData(CompressionType.Deflate, 9, 4, 0.04f)] // was 0.8, actual 0.037
[InlineData(CompressionType.ZStandard, 1, 4, 0.025f)] // was 0.8, actual 0.022
[InlineData(CompressionType.ZStandard, 3, 4, 0.012f)] // was 0.8, actual 0.010
[InlineData(CompressionType.ZStandard, 9, 4, 0.003f)] // was 0.8, actual 0.002
[InlineData(CompressionType.ZStandard, 22, 4, 0.003f)] // was 0.8, actual 0.002
[InlineData(CompressionType.BZip2, 0, 4, 0.035f)] // was 0.8, actual 0.032
[InlineData(CompressionType.LZMA, 0, 4, 0.003f)] // was 0.8, actual 0.002
public void Zip_WriterFactory_Crc32_Test(
CompressionType compressionType,
int compressionLevel,
int sizeMb,
float expectedRatio
)
{
var fileSize = sizeMb * 1024 * 1024;
var testData = TestPseudoTextStream.Create(fileSize);
var expectedCrc = CalculateCrc32(testData);
// Create archive with specified compression level
using var zipStream = new MemoryStream();
var writerOptions = new ZipWriterOptions(compressionType)
{
CompressionLevel = compressionLevel,
};
using (var writer = WriterFactory.Open(zipStream, ArchiveType.Zip, writerOptions))
{
writer.Write(
$"{compressionType}_level_{compressionLevel}_{sizeMb}MiB.txt",
new MemoryStream(testData)
);
}
// Calculate and output actual compression ratio
var actualRatio = (double)zipStream.Length / testData.Length;
//Debug.WriteLine($"Zip_WriterFactory_Crc32_Test: {compressionType} Level={compressionLevel} Size={sizeMb}MB Expected={expectedRatio:F3} Actual={actualRatio:F3}");
VerifyCompressionRatio(
testData.Length,
zipStream.Length,
expectedRatio,
$"{compressionType} level {compressionLevel}"
);
// Verify the archive
zipStream.Position = 0;
using var archive = ZipArchive.Open(zipStream);
var entry = archive.Entries.Single(e => !e.IsDirectory);
using var entryStream = entry.OpenEntryStream();
using var extractedStream = new MemoryStream();
entryStream.CopyTo(extractedStream);
var extractedData = extractedStream.ToArray();
var actualCrc = CalculateCrc32(extractedData);
Assert.Equal(compressionType, entry.CompressionType);
Assert.Equal(expectedCrc, actualCrc);
Assert.Equal(testData.Length, extractedData.Length);
Assert.Equal(testData, extractedData);
}
[Theory]
[InlineData(CompressionType.Deflate, 1, 2, 0.11f)] // was 0.8, actual 0.104
[InlineData(CompressionType.Deflate, 3, 2, 0.08f)] // was 0.8, actual 0.077
[InlineData(CompressionType.Deflate, 6, 2, 0.045f)] // was 0.8, actual 0.042
[InlineData(CompressionType.Deflate, 9, 2, 0.04f)] // was 0.7, actual 0.038
[InlineData(CompressionType.ZStandard, 1, 2, 0.025f)] // was 0.8, actual 0.023
[InlineData(CompressionType.ZStandard, 3, 2, 0.015f)] // was 0.7, actual 0.012
[InlineData(CompressionType.ZStandard, 9, 2, 0.006f)] // was 0.7, actual 0.005
[InlineData(CompressionType.ZStandard, 22, 2, 0.005f)] // was 0.7, actual 0.004
[InlineData(CompressionType.BZip2, 0, 2, 0.035f)] // was 0.8, actual 0.032
[InlineData(CompressionType.LZMA, 0, 2, 0.005f)] // was 0.8, actual 0.004
public void Zip_ZipArchiveOpen_Crc32_Test(
CompressionType compressionType,
int compressionLevel,
int sizeMb,
float expectedRatio
)
{
var fileSize = sizeMb * 1024 * 1024;
var testData = TestPseudoTextStream.Create(fileSize);
var expectedCrc = CalculateCrc32(testData);
// Create archive with specified compression and level
using var zipStream = new MemoryStream();
using (var writer = CreateWriterWithLevel(zipStream, compressionType, compressionLevel))
{
writer.Write(
$"{compressionType}_{compressionLevel}_{sizeMb}MiB.txt",
new MemoryStream(testData)
);
}
// Calculate and output actual compression ratio
var actualRatio = (double)zipStream.Length / testData.Length;
//Debug.WriteLine($"Zip_ZipArchiveOpen_Crc32_Test: {compressionType} Level={compressionLevel} Size={sizeMb}MB Expected={expectedRatio:F3} Actual={actualRatio:F3}");
// Verify the archive
zipStream.Position = 0;
using var archive = ZipArchive.Open(zipStream);
var entry = archive.Entries.Single(e => !e.IsDirectory);
using var entryStream = entry.OpenEntryStream();
using var extractedStream = new MemoryStream();
entryStream.CopyTo(extractedStream);
var extractedData = extractedStream.ToArray();
var actualCrc = CalculateCrc32(extractedData);
Assert.Equal(compressionType, entry.CompressionType);
Assert.Equal(expectedCrc, actualCrc);
Assert.Equal(testData.Length, extractedData.Length);
// For smaller files, verify full content; for larger, spot check
if (testData.Length <= sizeMb * 2)
{
Assert.Equal(testData, extractedData);
}
[Theory]
[InlineData(CompressionType.Deflate, 1, 2, 0.11f)] // was 0.8, actual 0.104
[InlineData(CompressionType.Deflate, 3, 2, 0.08f)] // was 0.8, actual 0.077
[InlineData(CompressionType.Deflate, 6, 2, 0.045f)] // was 0.8, actual 0.042
[InlineData(CompressionType.Deflate, 9, 2, 0.04f)] // was 0.7, actual 0.038
[InlineData(CompressionType.ZStandard, 1, 2, 0.025f)] // was 0.8, actual 0.023
[InlineData(CompressionType.ZStandard, 3, 2, 0.015f)] // was 0.7, actual 0.012
[InlineData(CompressionType.ZStandard, 9, 2, 0.006f)] // was 0.7, actual 0.005
[InlineData(CompressionType.ZStandard, 22, 2, 0.005f)] // was 0.7, actual 0.004
[InlineData(CompressionType.BZip2, 0, 2, 0.035f)] // was 0.8, actual 0.032
[InlineData(CompressionType.LZMA, 0, 2, 0.005f)] // was 0.8, actual 0.004
public void Zip_ZipArchiveOpen_Crc32_Test(
CompressionType compressionType,
int compressionLevel,
int sizeMb,
float expectedRatio
)
else
{
var fileSize = sizeMb * 1024 * 1024;
var testData = TestPseudoTextStream.Create(fileSize);
var expectedCrc = CalculateCrc32(testData);
// Create archive with specified compression and level
using var zipStream = new MemoryStream();
using (var writer = CreateWriterWithLevel(zipStream, compressionType, compressionLevel))
{
writer.Write(
$"{compressionType}_{compressionLevel}_{sizeMb}MiB.txt",
new MemoryStream(testData)
);
}
// Calculate and output actual compression ratio
var actualRatio = (double)zipStream.Length / testData.Length;
//Debug.WriteLine($"Zip_ZipArchiveOpen_Crc32_Test: {compressionType} Level={compressionLevel} Size={sizeMb}MB Expected={expectedRatio:F3} Actual={actualRatio:F3}");
// Verify the archive
zipStream.Position = 0;
using var archive = ZipArchive.Open(zipStream);
var entry = archive.Entries.Single(e => !e.IsDirectory);
using var entryStream = entry.OpenEntryStream();
using var extractedStream = new MemoryStream();
entryStream.CopyTo(extractedStream);
var extractedData = extractedStream.ToArray();
var actualCrc = CalculateCrc32(extractedData);
Assert.Equal(compressionType, entry.CompressionType);
Assert.Equal(expectedCrc, actualCrc);
Assert.Equal(testData.Length, extractedData.Length);
// For smaller files, verify full content; for larger, spot check
if (testData.Length <= sizeMb * 2)
{
Assert.Equal(testData, extractedData);
}
else
{
VerifyDataSpotCheck(testData, extractedData);
}
VerifyCompressionRatio(
testData.Length,
zipStream.Length,
expectedRatio,
$"{compressionType} Level {compressionLevel}"
);
}*/
VerifyDataSpotCheck(testData, extractedData);
}
VerifyCompressionRatio(
testData.Length,
zipStream.Length,
expectedRatio,
$"{compressionType} Level {compressionLevel}"
);
}
}

View File

@@ -1,6 +1,5 @@
using System;
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Common;
using SharpCompress.IO;
@@ -12,26 +11,25 @@ using Xunit;
namespace SharpCompress.Test.Zip;
/*
public class ZipReaderTests : ReaderTests
{
public ZipReaderTests() => UseExtensionInsteadOfNameToVerify = true;
[Fact]
public async Task Issue_269_Double_Skip()
public void Issue_269_Double_Skip()
{
var path = Path.Combine(TEST_ARCHIVES_PATH, "PrePostHeaders.zip");
using Stream stream = new ForwardOnlyStream(File.OpenRead(path));
using var reader = ReaderFactory.Open(stream);
var count = 0;
while (await reader.MoveToNextEntryAsync())
while (reader.MoveToNextEntry())
{
count++;
if (!reader.Entry.IsDirectory)
{
if (count % 2 != 0)
{
await reader.WriteEntryToAsync(Stream.Null);
reader.WriteEntryTo(Stream.Null);
}
}
}
@@ -57,21 +55,21 @@ public class ZipReaderTests : ReaderTests
public void Zip_Deflate_Streamed_Read() => Read("Zip.deflate.dd.zip", CompressionType.Deflate);
[Fact]
public async Task Zip_Deflate_Streamed_Skip()
public void Zip_Deflate_Streamed_Skip()
{
using Stream stream = new ForwardOnlyStream(
File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd.zip"))
);
using var reader = ReaderFactory.Open(stream);
var x = 0;
while (await reader.MoveToNextEntryAsync())
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
x++;
if (x % 2 == 0)
{
await reader.WriteEntryToDirectoryAsync(
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -106,19 +104,19 @@ public class ZipReaderTests : ReaderTests
Read("Zip.deflate.noEmptyDirs.zip", CompressionType.Deflate);
[Fact]
public async Task Zip_BZip2_PkwareEncryption_Read()
public void Zip_BZip2_PkwareEncryption_Read()
{
using (
Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.bzip2.pkware.zip"))
)
using (var reader = ZipReader.Open(stream, new ReaderOptions { Password = "test" }))
{
while (await reader.MoveToNextEntryAsync())
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(CompressionType.BZip2, reader.Entry.CompressionType);
await reader.WriteEntryToDirectoryAsync(
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -129,18 +127,18 @@ public class ZipReaderTests : ReaderTests
}
[Fact]
public async Task Zip_Reader_Disposal_Test()
public void Zip_Reader_Disposal_Test()
{
using var stream = new TestStream(
File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd.zip"))
);
using (var reader = ReaderFactory.Open(stream))
{
while (await reader.MoveToNextEntryAsync())
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
await reader.WriteEntryToDirectoryAsync(
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -151,17 +149,17 @@ public class ZipReaderTests : ReaderTests
}
[Fact]
public async Task Zip_Reader_Disposal_Test2()
public void Zip_Reader_Disposal_Test2()
{
using var stream = new TestStream(
File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd.zip"))
);
var reader = ReaderFactory.Open(stream);
while (await reader.MoveToNextEntryAsync())
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
await reader.WriteEntryToDirectoryAsync(
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -171,8 +169,8 @@ public class ZipReaderTests : ReaderTests
}
[Fact]
public Task Zip_LZMA_WinzipAES_Read() =>
Assert.ThrowsAsync<NotSupportedException>(async () =>
public void Zip_LZMA_WinzipAES_Read() =>
Assert.Throws<NotSupportedException>(() =>
{
using (
Stream stream = File.OpenRead(
@@ -181,12 +179,12 @@ public class ZipReaderTests : ReaderTests
)
using (var reader = ZipReader.Open(stream, new ReaderOptions { Password = "test" }))
{
while (await reader.MoveToNextEntryAsync())
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(CompressionType.Unknown, reader.Entry.CompressionType);
await reader.WriteEntryToDirectoryAsync(
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -197,7 +195,7 @@ public class ZipReaderTests : ReaderTests
});
[Fact]
public async Task Zip_Deflate_WinzipAES_Read()
public void Zip_Deflate_WinzipAES_Read()
{
using (
Stream stream = File.OpenRead(
@@ -206,12 +204,12 @@ public class ZipReaderTests : ReaderTests
)
using (var reader = ZipReader.Open(stream, new ReaderOptions { Password = "test" }))
{
while (await reader.MoveToNextEntryAsync())
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(CompressionType.Unknown, reader.Entry.CompressionType);
await reader.WriteEntryToDirectoryAsync(
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -222,18 +220,18 @@ public class ZipReaderTests : ReaderTests
}
[Fact]
public async Task Zip_Deflate_ZipCrypto_Read()
public void Zip_Deflate_ZipCrypto_Read()
{
var count = 0;
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "zipcrypto.zip")))
using (var reader = ZipReader.Open(stream, new ReaderOptions { Password = "test" }))
{
while (await reader.MoveToNextEntryAsync())
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(CompressionType.None, reader.Entry.CompressionType);
await reader.WriteEntryToDirectoryAsync(
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -245,7 +243,7 @@ public class ZipReaderTests : ReaderTests
}
[Fact]
public async Task TestSharpCompressWithEmptyStream()
public void TestSharpCompressWithEmptyStream()
{
var expected = new[]
{
@@ -269,9 +267,9 @@ public class ZipReaderTests : ReaderTests
SharpCompressStream.Create(stream, leaveOpen: true, throwOnDispose: true)
);
var i = 0;
while (await zipReader.MoveToNextEntryAsync())
while (zipReader.MoveToNextEntry())
{
using (var entry = await zipReader.OpenEntryStreamAsync())
using (var entry = zipReader.OpenEntryStream())
{
var tempStream = new MemoryStream();
const int bufSize = 0x1000;
@@ -290,7 +288,7 @@ public class ZipReaderTests : ReaderTests
}
[Fact]
public async Task Zip_None_Issue86_Streamed_Read()
public void Zip_None_Issue86_Streamed_Read()
{
var keys = new[] { "Empty1", "Empty2", "Dir1/", "Dir2/", "Fake1", "Fake2", "Internal.zip" };
@@ -300,7 +298,7 @@ public class ZipReaderTests : ReaderTests
using var reader = ZipReader.Open(stream);
foreach (var key in keys)
{
await reader.MoveToNextEntryAsync();
reader.MoveToNextEntry();
Assert.Equal(reader.Entry.Key, key);
@@ -310,11 +308,11 @@ public class ZipReaderTests : ReaderTests
}
}
Assert.False(await reader.MoveToNextEntryAsync());
Assert.False(reader.MoveToNextEntry());
}
[Fact]
public async Task Zip_ReaderMoveToNextEntryAsync()
public void Zip_ReaderMoveToNextEntry()
{
var keys = new[] { "version", "sizehint", "data/0/metadata", "data/0/records" };
@@ -322,61 +320,61 @@ public class ZipReaderTests : ReaderTests
using var reader = ZipReader.Open(fileStream);
foreach (var key in keys)
{
await reader.MoveToNextEntryAsync();
reader.MoveToNextEntry();
Assert.Equal(reader.Entry.Key, key);
}
}
[Fact]
public async Task Issue_685()
public void Issue_685()
{
var count = 0;
using var fileStream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Issue_685.zip"));
using var reader = ZipReader.Open(fileStream);
while (await reader.MoveToNextEntryAsync())
while (reader.MoveToNextEntry())
{
count++;
reader.OpenEntryStreamAsync().Dispose(); // Uncomment for workaround
reader.OpenEntryStream().Dispose(); // Uncomment for workaround
}
Assert.Equal(4, count);
}
[Fact]
public async Task Zip_ReaderFactory_Uncompressed_Read_All()
public void Zip_ReaderFactory_Uncompressed_Read_All()
{
var zipPath = Path.Combine(TEST_ARCHIVES_PATH, "Zip.uncompressed.zip");
using var stream = File.OpenRead(zipPath);
using var reader = ReaderFactory.Open(stream);
while (await reader.MoveToNextEntryAsync())
while (reader.MoveToNextEntry())
{
var target = new MemoryStream();
await (await reader.OpenEntryStreamAsync()).CopyToAsync(target);
reader.OpenEntryStream().CopyTo(target);
}
}
[Fact]
public async Task Zip_ReaderFactory_Uncompressed_Skip_All()
public void Zip_ReaderFactory_Uncompressed_Skip_All()
{
var zipPath = Path.Combine(TEST_ARCHIVES_PATH, "Zip.uncompressed.zip");
using var stream = File.OpenRead(zipPath);
using var reader = ReaderFactory.Open(stream);
while (await reader.MoveToNextEntryAsync()) { }
while (reader.MoveToNextEntry()) { }
}
//this test uses a large 7zip file containing a zip file inside it to test zip64 support
// we probably shouldn't be allowing ExtractAllEntries here but it works for now.
[Fact]
public async Task Zip_Uncompressed_64bit()
public void Zip_Uncompressed_64bit()
{
var zipPath = Path.Combine(TEST_ARCHIVES_PATH, "64bitstream.zip.7z");
using var stream = File.OpenRead(zipPath);
var archive = ArchiveFactory.Open(stream);
var reader = archive.ExtractAllEntries();
await reader.MoveToNextEntryAsync();
var zipReader = ZipReader.Open(await reader.OpenEntryStreamAsync());
reader.MoveToNextEntry();
var zipReader = ZipReader.Open(reader.OpenEntryStream());
var x = 0;
while (await zipReader.MoveToNextEntryAsync())
while (zipReader.MoveToNextEntry())
{
x++;
}
@@ -391,13 +389,12 @@ public class ZipReaderTests : ReaderTests
Path.Combine(TEST_ARCHIVES_PATH, "Zip.none.encrypted.zip"),
new ReaderOptions { Password = "test" }
);
reader.MoveToNextEntryAsync();
reader.MoveToNextEntry();
Assert.Equal("first.txt", reader.Entry.Key);
Assert.Equal(199, reader.Entry.Size);
reader.OpenEntryStreamAsync().Dispose();
reader.MoveToNextEntryAsync();
reader.OpenEntryStream().Dispose();
reader.MoveToNextEntry();
Assert.Equal("second.txt", reader.Entry.Key);
Assert.Equal(197, reader.Entry.Size);
}
}
*/

View File

@@ -1,5 +1,4 @@
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common;
using Xunit;
@@ -11,8 +10,8 @@ public class ZipWriterTests : WriterTests
: base(ArchiveType.Zip) { }
[Fact]
public Task Zip_Deflate_Write() =>
WriteAsync(
public void Zip_Deflate_Write() =>
Write(
CompressionType.Deflate,
"Zip.deflate.noEmptyDirs.zip",
"Zip.deflate.noEmptyDirs.zip",
@@ -20,8 +19,8 @@ public class ZipWriterTests : WriterTests
);
[Fact]
public Task Zip_BZip2_Write() =>
WriteAsync(
public void Zip_BZip2_Write() =>
Write(
CompressionType.BZip2,
"Zip.bzip2.noEmptyDirs.zip",
"Zip.bzip2.noEmptyDirs.zip",
@@ -29,8 +28,8 @@ public class ZipWriterTests : WriterTests
);
[Fact]
public Task Zip_None_Write() =>
WriteAsync(
public void Zip_None_Write() =>
Write(
CompressionType.None,
"Zip.none.noEmptyDirs.zip",
"Zip.none.noEmptyDirs.zip",
@@ -38,8 +37,8 @@ public class ZipWriterTests : WriterTests
);
[Fact]
public Task Zip_LZMA_Write() =>
WriteAsync(
public void Zip_LZMA_Write() =>
Write(
CompressionType.LZMA,
"Zip.lzma.noEmptyDirs.zip",
"Zip.lzma.noEmptyDirs.zip",
@@ -47,8 +46,8 @@ public class ZipWriterTests : WriterTests
);
[Fact]
public Task Zip_PPMd_Write() =>
WriteAsync(
public void Zip_PPMd_Write() =>
Write(
CompressionType.PPMd,
"Zip.ppmd.noEmptyDirs.zip",
"Zip.ppmd.noEmptyDirs.zip",
@@ -56,12 +55,8 @@ public class ZipWriterTests : WriterTests
);
[Fact]
public Task Zip_Rar_Write() =>
Assert.ThrowsAsync<InvalidFormatException>(async () =>
await WriteAsync(
CompressionType.Rar,
"Zip.ppmd.noEmptyDirs.zip",
"Zip.ppmd.noEmptyDirs.zip"
)
public void Zip_Rar_Write() =>
Assert.Throws<InvalidFormatException>(() =>
Write(CompressionType.Rar, "Zip.ppmd.noEmptyDirs.zip", "Zip.ppmd.noEmptyDirs.zip")
);
}