Merge remote-tracking branch 'origin/master' into copilot/sub-pr-1076-again

This commit is contained in:
Adam Hathcock
2025-12-19 13:05:33 +00:00
29 changed files with 995 additions and 483 deletions

View File

@@ -28,11 +28,13 @@ SharpCompress is a pure C# compression library supporting multiple archive forma
## Code Formatting
**Copilot agents: You MUST run the `format` task after making code changes to ensure consistency.**
- Use CSharpier for code formatting to ensure consistent style across the project
- CSharpier is configured as a local tool in `.config/dotnet-tools.json`
- **To format code, run the task: `format` task (which runs `dotnet csharpier .` from project root)**
- Restore tools with: `dotnet tool restore`
- Format files from the project root with: `dotnet csharpier .`
- **Run `dotnet csharpier .` from the project root after making code changes before committing**
- Configure your IDE to format on save using CSharpier for the best experience
- The project also uses `.editorconfig` for editor settings (indentation, encoding, etc.)
- Let CSharpier handle code style while `.editorconfig` handles editor behavior

View File

@@ -11,6 +11,7 @@ const string Restore = "restore";
const string Build = "build";
const string Test = "test";
const string Format = "format";
const string CheckFormat = "check-format";
const string Publish = "publish";
Target(
@@ -42,12 +43,20 @@ Target(
Target(
Format,
() =>
{
Run("dotnet", "tool restore");
Run("dotnet", "csharpier format .");
}
);
Target(
CheckFormat,
() =>
{
Run("dotnet", "tool restore");
Run("dotnet", "csharpier check .");
}
);
Target(Restore, [Format], () => Run("dotnet", "restore"));
Target(Restore, [CheckFormat], () => Run("dotnet", "restore"));
Target(
Build,

View File

@@ -8,7 +8,7 @@ using SharpCompress.Readers;
namespace SharpCompress.Archives;
public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtractionListener
public abstract class AbstractArchive<TEntry, TVolume> : IArchive
where TEntry : IArchiveEntry
where TVolume : IVolume
{
@@ -17,11 +17,6 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
private bool _disposed;
private readonly SourceStream? _sourceStream;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionBegin;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionEnd;
public event EventHandler<CompressedBytesReadEventArgs>? CompressedBytesRead;
public event EventHandler<FilePartExtractionBeginEventArgs>? FilePartExtractionBegin;
protected ReaderOptions ReaderOptions { get; }
internal AbstractArchive(ArchiveType type, SourceStream sourceStream)
@@ -43,12 +38,6 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
public ArchiveType Type { get; }
void IArchiveExtractionListener.FireEntryExtractionBegin(IArchiveEntry entry) =>
EntryExtractionBegin?.Invoke(this, new ArchiveExtractionEventArgs<IArchiveEntry>(entry));
void IArchiveExtractionListener.FireEntryExtractionEnd(IArchiveEntry entry) =>
EntryExtractionEnd?.Invoke(this, new ArchiveExtractionEventArgs<IArchiveEntry>(entry));
private static Stream CheckStreams(Stream stream)
{
if (!stream.CanSeek || !stream.CanRead)
@@ -99,38 +88,12 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
}
}
void IArchiveExtractionListener.EnsureEntriesLoaded()
private void EnsureEntriesLoaded()
{
_lazyEntries.EnsureFullyLoaded();
_lazyVolumes.EnsureFullyLoaded();
}
void IExtractionListener.FireCompressedBytesRead(
long currentPartCompressedBytes,
long compressedReadBytes
) =>
CompressedBytesRead?.Invoke(
this,
new CompressedBytesReadEventArgs(
currentFilePartCompressedBytesRead: currentPartCompressedBytes,
compressedBytesRead: compressedReadBytes
)
);
void IExtractionListener.FireFilePartExtractionBegin(
string name,
long size,
long compressedSize
) =>
FilePartExtractionBegin?.Invoke(
this,
new FilePartExtractionBeginEventArgs(
compressedSize: compressedSize,
size: size,
name: name
)
);
/// <summary>
/// Use this method to extract all entries in an archive in order.
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
@@ -150,7 +113,7 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
"ExtractAllEntries can only be used on solid archives or 7Zip archives (which require random access)."
);
}
((IArchiveExtractionListener)this).EnsureEntriesLoaded();
EnsureEntriesLoaded();
return CreateReaderForSolidExtraction();
}
@@ -173,7 +136,7 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
{
get
{
((IArchiveExtractionListener)this).EnsureEntriesLoaded();
EnsureEntriesLoaded();
return Entries.All(x => x.IsComplete);
}
}

View File

@@ -7,12 +7,6 @@ namespace SharpCompress.Archives;
public interface IArchive : IDisposable
{
event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionBegin;
event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionEnd;
event EventHandler<CompressedBytesReadEventArgs> CompressedBytesRead;
event EventHandler<FilePartExtractionBeginEventArgs> FilePartExtractionBegin;
IEnumerable<IArchiveEntry> Entries { get; }
IEnumerable<IVolume> Volumes { get; }

View File

@@ -1,3 +1,4 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
@@ -8,34 +9,18 @@ namespace SharpCompress.Archives;
public static class IArchiveEntryExtensions
{
public static void WriteTo(this IArchiveEntry archiveEntry, Stream streamToWriteTo)
{
if (archiveEntry.IsDirectory)
{
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
}
private const int BufferSize = 81920;
var streamListener = (IArchiveExtractionListener)archiveEntry.Archive;
streamListener.EnsureEntriesLoaded();
streamListener.FireEntryExtractionBegin(archiveEntry);
streamListener.FireFilePartExtractionBegin(
archiveEntry.Key ?? "Key",
archiveEntry.Size,
archiveEntry.CompressedSize
);
var entryStream = archiveEntry.OpenEntryStream();
using (entryStream)
{
using Stream s = new ListeningStream(streamListener, entryStream);
s.CopyTo(streamToWriteTo);
}
streamListener.FireEntryExtractionEnd(archiveEntry);
}
public static async Task WriteToAsync(
/// <summary>
/// Extract entry to the specified stream.
/// </summary>
/// <param name="archiveEntry">The archive entry to extract.</param>
/// <param name="streamToWriteTo">The stream to write the entry content to.</param>
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
public static void WriteTo(
this IArchiveEntry archiveEntry,
Stream streamToWriteTo,
CancellationToken cancellationToken = default
IProgress<ProgressReport>? progress = null
)
{
if (archiveEntry.IsDirectory)
@@ -43,21 +28,70 @@ public static class IArchiveEntryExtensions
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
}
var streamListener = (IArchiveExtractionListener)archiveEntry.Archive;
streamListener.EnsureEntriesLoaded();
streamListener.FireEntryExtractionBegin(archiveEntry);
streamListener.FireFilePartExtractionBegin(
archiveEntry.Key ?? "Key",
archiveEntry.Size,
archiveEntry.CompressedSize
);
var entryStream = archiveEntry.OpenEntryStream();
using (entryStream)
using var entryStream = archiveEntry.OpenEntryStream();
var sourceStream = WrapWithProgress(entryStream, archiveEntry, progress);
sourceStream.CopyTo(streamToWriteTo, BufferSize);
}
/// <summary>
/// Extract entry to the specified stream asynchronously.
/// </summary>
/// <param name="archiveEntry">The archive entry to extract.</param>
/// <param name="streamToWriteTo">The stream to write the entry content to.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
public static async Task WriteToAsync(
this IArchiveEntry archiveEntry,
Stream streamToWriteTo,
CancellationToken cancellationToken = default,
IProgress<ProgressReport>? progress = null
)
{
if (archiveEntry.IsDirectory)
{
using Stream s = new ListeningStream(streamListener, entryStream);
await s.CopyToAsync(streamToWriteTo, 81920, cancellationToken).ConfigureAwait(false);
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
}
using var entryStream = archiveEntry.OpenEntryStream();
var sourceStream = WrapWithProgress(entryStream, archiveEntry, progress);
await sourceStream
.CopyToAsync(streamToWriteTo, BufferSize, cancellationToken)
.ConfigureAwait(false);
}
private static Stream WrapWithProgress(
Stream source,
IArchiveEntry entry,
IProgress<ProgressReport>? progress
)
{
if (progress is null)
{
return source;
}
var entryPath = entry.Key ?? string.Empty;
long? totalBytes = GetEntrySizeSafe(entry);
return new ProgressReportingStream(
source,
progress,
entryPath,
totalBytes,
leaveOpen: true
);
}
private static long? GetEntrySizeSafe(IArchiveEntry entry)
{
try
{
var size = entry.Size;
return size >= 0 ? size : null;
}
catch (NotImplementedException)
{
return null;
}
streamListener.FireEntryExtractionEnd(archiveEntry);
}
/// <summary>
@@ -127,7 +161,9 @@ public static class IArchiveEntryExtensions
async (x, fm) =>
{
using var fs = File.Open(destinationFileName, fm);
await entry.WriteToAsync(fs, cancellationToken).ConfigureAwait(false);
await entry
.WriteToAsync(fs, progress: null, cancellationToken: cancellationToken)
.ConfigureAwait(false);
},
cancellationToken
);

View File

@@ -1,10 +0,0 @@
using SharpCompress.Common;
namespace SharpCompress.Archives;
internal interface IArchiveExtractionListener : IExtractionListener
{
void EnsureEntriesLoaded();
void FireEntryExtractionBegin(IArchiveEntry entry);
void FireEntryExtractionEnd(IArchiveEntry entry);
}

View File

@@ -76,7 +76,7 @@ public class RarArchiveEntry : RarEntry, IArchiveEntry
stream = new RarStream(
archive.UnpackV1.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>())
);
}
else
@@ -84,7 +84,7 @@ public class RarArchiveEntry : RarEntry, IArchiveEntry
stream = new RarStream(
archive.UnpackV2017.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>())
);
}
@@ -100,7 +100,7 @@ public class RarArchiveEntry : RarEntry, IArchiveEntry
stream = new RarStream(
archive.UnpackV1.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>())
);
}
else
@@ -108,7 +108,7 @@ public class RarArchiveEntry : RarEntry, IArchiveEntry
stream = new RarStream(
archive.UnpackV2017.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>())
);
}

View File

@@ -1,10 +0,0 @@
using System;
namespace SharpCompress.Common;
public class ArchiveExtractionEventArgs<T> : EventArgs
{
internal ArchiveExtractionEventArgs(T entry) => Item = entry;
public T Item { get; }
}

View File

@@ -1,25 +0,0 @@
using System;
namespace SharpCompress.Common;
public sealed class CompressedBytesReadEventArgs : EventArgs
{
public CompressedBytesReadEventArgs(
long compressedBytesRead,
long currentFilePartCompressedBytesRead
)
{
CompressedBytesRead = compressedBytesRead;
CurrentFilePartCompressedBytesRead = currentFilePartCompressedBytesRead;
}
/// <summary>
/// Compressed bytes read for the current entry
/// </summary>
public long CompressedBytesRead { get; }
/// <summary>
/// Current file part read for Multipart files (e.g. Rar)
/// </summary>
public long CurrentFilePartCompressedBytesRead { get; }
}

View File

@@ -1,28 +0,0 @@
using System;
namespace SharpCompress.Common;
public sealed class FilePartExtractionBeginEventArgs : EventArgs
{
public FilePartExtractionBeginEventArgs(string name, long size, long compressedSize)
{
Name = name;
Size = size;
CompressedSize = compressedSize;
}
/// <summary>
/// File name for the part for the current entry
/// </summary>
public string Name { get; }
/// <summary>
/// Uncompressed size of the current entry in the part
/// </summary>
public long Size { get; }
/// <summary>
/// Compressed size of the current entry in the part
/// </summary>
public long CompressedSize { get; }
}

View File

@@ -1,7 +0,0 @@
namespace SharpCompress.Common;
public interface IExtractionListener
{
void FireFilePartExtractionBegin(string name, long size, long compressedSize);
void FireCompressedBytesRead(long currentPartCompressedBytes, long compressedReadBytes);
}

View File

@@ -0,0 +1,43 @@
namespace SharpCompress.Common;
/// <summary>
/// Represents progress information for compression or extraction operations.
/// </summary>
public sealed class ProgressReport
{
/// <summary>
/// Initializes a new instance of the <see cref="ProgressReport"/> class.
/// </summary>
/// <param name="entryPath">The path of the entry being processed.</param>
/// <param name="bytesTransferred">Number of bytes transferred so far.</param>
/// <param name="totalBytes">Total bytes to be transferred, or null if unknown.</param>
public ProgressReport(string entryPath, long bytesTransferred, long? totalBytes)
{
EntryPath = entryPath;
BytesTransferred = bytesTransferred;
TotalBytes = totalBytes;
}
/// <summary>
/// Gets the path of the entry being processed.
/// </summary>
public string EntryPath { get; }
/// <summary>
/// Gets the number of bytes transferred so far.
/// </summary>
public long BytesTransferred { get; }
/// <summary>
/// Gets the total number of bytes to be transferred, or null if unknown.
/// </summary>
public long? TotalBytes { get; }
/// <summary>
/// Gets the progress percentage (0-100), or null if total bytes is unknown.
/// </summary>
public double? PercentComplete =>
TotalBytes.HasValue && TotalBytes.Value > 0
? (double)BytesTransferred / TotalBytes.Value * 100
: null;
}

View File

@@ -1,17 +0,0 @@
using System;
using SharpCompress.Readers;
namespace SharpCompress.Common;
public sealed class ReaderExtractionEventArgs<T> : EventArgs
{
internal ReaderExtractionEventArgs(T entry, ReaderProgress? readerProgress = null)
{
Item = entry;
ReaderProgress = readerProgress;
}
public T Item { get; }
public ReaderProgress? ReaderProgress { get; }
}

View File

@@ -37,18 +37,8 @@ internal sealed class MultiVolumeReadOnlyStream : Stream, IStreamStack
private IEnumerator<RarFilePart> filePartEnumerator;
private Stream currentStream;
private readonly IExtractionListener streamListener;
private long currentPartTotalReadBytes;
private long currentEntryTotalReadBytes;
internal MultiVolumeReadOnlyStream(
IEnumerable<RarFilePart> parts,
IExtractionListener streamListener
)
internal MultiVolumeReadOnlyStream(IEnumerable<RarFilePart> parts)
{
this.streamListener = streamListener;
filePartEnumerator = parts.GetEnumerator();
filePartEnumerator.MoveNext();
InitializeNextFilePart();
@@ -81,15 +71,7 @@ internal sealed class MultiVolumeReadOnlyStream : Stream, IStreamStack
currentPosition = 0;
currentStream = filePartEnumerator.Current.GetCompressedStream();
currentPartTotalReadBytes = 0;
CurrentCrc = filePartEnumerator.Current.FileHeader.FileCrc;
streamListener.FireFilePartExtractionBegin(
filePartEnumerator.Current.FilePartName,
filePartEnumerator.Current.FileHeader.CompressedSize,
filePartEnumerator.Current.FileHeader.UncompressedSize
);
}
public override int Read(byte[] buffer, int offset, int count)
@@ -141,12 +123,6 @@ internal sealed class MultiVolumeReadOnlyStream : Stream, IStreamStack
break;
}
}
currentPartTotalReadBytes += totalRead;
currentEntryTotalReadBytes += totalRead;
streamListener.FireCompressedBytesRead(
currentPartTotalReadBytes,
currentEntryTotalReadBytes
);
return totalRead;
}
@@ -206,12 +182,6 @@ internal sealed class MultiVolumeReadOnlyStream : Stream, IStreamStack
break;
}
}
currentPartTotalReadBytes += totalRead;
currentEntryTotalReadBytes += totalRead;
streamListener.FireCompressedBytesRead(
currentPartTotalReadBytes,
currentEntryTotalReadBytes
);
return totalRead;
}
@@ -270,12 +240,6 @@ internal sealed class MultiVolumeReadOnlyStream : Stream, IStreamStack
break;
}
}
currentPartTotalReadBytes += totalRead;
currentEntryTotalReadBytes += totalRead;
streamListener.FireCompressedBytesRead(
currentPartTotalReadBytes,
currentEntryTotalReadBytes
);
return totalRead;
}
#endif

View File

@@ -1,97 +0,0 @@
using System.IO;
using SharpCompress.Common;
namespace SharpCompress.IO;
internal class ListeningStream : Stream, IStreamStack
{
#if DEBUG_STREAMS
long IStreamStack.InstanceId { get; set; }
#endif
int IStreamStack.DefaultBufferSize { get; set; }
Stream IStreamStack.BaseStream() => Stream;
int IStreamStack.BufferSize
{
get => 0;
set { return; }
}
int IStreamStack.BufferPosition
{
get => 0;
set { return; }
}
void IStreamStack.SetPosition(long position) { }
private long _currentEntryTotalReadBytes;
private readonly IExtractionListener _listener;
public ListeningStream(IExtractionListener listener, Stream stream)
{
Stream = stream;
this._listener = listener;
#if DEBUG_STREAMS
this.DebugConstruct(typeof(ListeningStream));
#endif
}
protected override void Dispose(bool disposing)
{
#if DEBUG_STREAMS
this.DebugDispose(typeof(ListeningStream));
#endif
if (disposing)
{
Stream.Dispose();
}
base.Dispose(disposing);
}
public Stream Stream { get; }
public override bool CanRead => Stream.CanRead;
public override bool CanSeek => Stream.CanSeek;
public override bool CanWrite => Stream.CanWrite;
public override void Flush() => Stream.Flush();
public override long Length => Stream.Length;
public override long Position
{
get => Stream.Position;
set => Stream.Position = value;
}
public override int Read(byte[] buffer, int offset, int count)
{
var read = Stream.Read(buffer, offset, count);
_currentEntryTotalReadBytes += read;
_listener.FireCompressedBytesRead(_currentEntryTotalReadBytes, _currentEntryTotalReadBytes);
return read;
}
public override int ReadByte()
{
var value = Stream.ReadByte();
if (value == -1)
{
return -1;
}
++_currentEntryTotalReadBytes;
_listener.FireCompressedBytesRead(_currentEntryTotalReadBytes, _currentEntryTotalReadBytes);
return value;
}
public override long Seek(long offset, SeekOrigin origin) => Stream.Seek(offset, origin);
public override void SetLength(long value) => Stream.SetLength(value);
public override void Write(byte[] buffer, int offset, int count) =>
Stream.Write(buffer, offset, count);
}

View File

@@ -0,0 +1,160 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.IO;
/// <summary>
/// A stream wrapper that reports progress as data is read from the source.
/// Used to track compression or extraction progress by wrapping the source stream.
/// </summary>
internal sealed class ProgressReportingStream : Stream
{
private readonly Stream _baseStream;
private readonly IProgress<ProgressReport> _progress;
private readonly string _entryPath;
private readonly long? _totalBytes;
private long _bytesTransferred;
private readonly bool _leaveOpen;
public ProgressReportingStream(
Stream baseStream,
IProgress<ProgressReport> progress,
string entryPath,
long? totalBytes,
bool leaveOpen = false
)
{
_baseStream = baseStream;
_progress = progress;
_entryPath = entryPath;
_totalBytes = totalBytes;
_leaveOpen = leaveOpen;
}
public override bool CanRead => _baseStream.CanRead;
public override bool CanSeek => _baseStream.CanSeek;
public override bool CanWrite => false;
public override long Length => _baseStream.Length;
public override long Position
{
get => _baseStream.Position;
set =>
throw new NotSupportedException(
"Directly setting Position is not supported in ProgressReportingStream to maintain progress tracking integrity."
);
}
public override void Flush() => _baseStream.Flush();
public override int Read(byte[] buffer, int offset, int count)
{
var bytesRead = _baseStream.Read(buffer, offset, count);
if (bytesRead > 0)
{
_bytesTransferred += bytesRead;
ReportProgress();
}
return bytesRead;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override int Read(Span<byte> buffer)
{
var bytesRead = _baseStream.Read(buffer);
if (bytesRead > 0)
{
_bytesTransferred += bytesRead;
ReportProgress();
}
return bytesRead;
}
#endif
public override async Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
var bytesRead = await _baseStream
.ReadAsync(buffer, offset, count, cancellationToken)
.ConfigureAwait(false);
if (bytesRead > 0)
{
_bytesTransferred += bytesRead;
ReportProgress();
}
return bytesRead;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
)
{
var bytesRead = await _baseStream
.ReadAsync(buffer, cancellationToken)
.ConfigureAwait(false);
if (bytesRead > 0)
{
_bytesTransferred += bytesRead;
ReportProgress();
}
return bytesRead;
}
#endif
public override int ReadByte()
{
var value = _baseStream.ReadByte();
if (value != -1)
{
_bytesTransferred++;
ReportProgress();
}
return value;
}
public override long Seek(long offset, SeekOrigin origin) => _baseStream.Seek(offset, origin);
public override void SetLength(long value) => _baseStream.SetLength(value);
public override void Write(byte[] buffer, int offset, int count) =>
throw new NotSupportedException(
"ProgressReportingStream is designed for read operations to track progress."
);
private void ReportProgress()
{
_progress.Report(new ProgressReport(_entryPath, _bytesTransferred, _totalBytes));
}
protected override void Dispose(bool disposing)
{
if (disposing && !_leaveOpen)
{
_baseStream.Dispose();
}
base.Dispose(disposing);
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override async ValueTask DisposeAsync()
{
if (!_leaveOpen)
{
await _baseStream.DisposeAsync().ConfigureAwait(false);
}
await base.DisposeAsync().ConfigureAwait(false);
}
#endif
}

View File

@@ -5,13 +5,14 @@ using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Readers;
/// <summary>
/// A generic push reader that reads unseekable comrpessed streams.
/// </summary>
public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtractionListener
public abstract class AbstractReader<TEntry, TVolume> : IReader
where TEntry : Entry
where TVolume : Volume
{
@@ -19,11 +20,6 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
private IEnumerator<TEntry>? _entriesForCurrentReadStream;
private bool _wroteCurrentEntry;
public event EventHandler<ReaderExtractionEventArgs<IEntry>>? EntryExtractionProgress;
public event EventHandler<CompressedBytesReadEventArgs>? CompressedBytesRead;
public event EventHandler<FilePartExtractionBeginEventArgs>? FilePartExtractionBegin;
internal AbstractReader(ReaderOptions options, ArchiveType archiveType)
{
ArchiveType = archiveType;
@@ -264,25 +260,58 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
internal void Write(Stream writeStream)
{
var streamListener = this as IReaderExtractionListener;
using Stream s = OpenEntryStream();
s.TransferTo(writeStream, Entry, streamListener);
var sourceStream = WrapWithProgress(s, Entry);
sourceStream.CopyTo(writeStream, 81920);
}
internal async Task WriteAsync(Stream writeStream, CancellationToken cancellationToken)
{
var streamListener = this as IReaderExtractionListener;
#if NETFRAMEWORK || NETSTANDARD2_0
using Stream s = OpenEntryStream();
await s.TransferToAsync(writeStream, Entry, streamListener, cancellationToken)
.ConfigureAwait(false);
var sourceStream = WrapWithProgress(s, Entry);
await sourceStream.CopyToAsync(writeStream, 81920, cancellationToken).ConfigureAwait(false);
#else
await using Stream s = OpenEntryStream();
await s.TransferToAsync(writeStream, Entry, streamListener, cancellationToken)
.ConfigureAwait(false);
var sourceStream = WrapWithProgress(s, Entry);
await sourceStream.CopyToAsync(writeStream, 81920, cancellationToken).ConfigureAwait(false);
#endif
}
private Stream WrapWithProgress(Stream source, Entry entry)
{
var progress = Options.Progress;
if (progress is null)
{
return source;
}
var entryPath = entry.Key ?? string.Empty;
long? totalBytes = GetEntrySizeSafe(entry);
return new ProgressReportingStream(
source,
progress,
entryPath,
totalBytes,
leaveOpen: true
);
}
private static long? GetEntrySizeSafe(Entry entry)
{
try
{
var size = entry.Size;
// Return the actual size (including 0 for empty entries)
// Negative values indicate unknown size
return size >= 0 ? size : null;
}
catch (NotImplementedException)
{
return null;
}
}
public EntryStream OpenEntryStream()
{
if (_wroteCurrentEntry)
@@ -325,43 +354,4 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
#endregion
IEntry IReader.Entry => Entry;
void IExtractionListener.FireCompressedBytesRead(
long currentPartCompressedBytes,
long compressedReadBytes
) =>
CompressedBytesRead?.Invoke(
this,
new CompressedBytesReadEventArgs(
currentFilePartCompressedBytesRead: currentPartCompressedBytes,
compressedBytesRead: compressedReadBytes
)
);
void IExtractionListener.FireFilePartExtractionBegin(
string name,
long size,
long compressedSize
) =>
FilePartExtractionBegin?.Invoke(
this,
new FilePartExtractionBeginEventArgs(
compressedSize: compressedSize,
size: size,
name: name
)
);
void IReaderExtractionListener.FireEntryExtractionProgress(
Entry entry,
long bytesTransferred,
int iterations
) =>
EntryExtractionProgress?.Invoke(
this,
new ReaderExtractionEventArgs<IEntry>(
entry,
new ReaderProgress(entry, bytesTransferred, iterations)
)
);
}

View File

@@ -8,11 +8,6 @@ namespace SharpCompress.Readers;
public interface IReader : IDisposable
{
event EventHandler<ReaderExtractionEventArgs<IEntry>> EntryExtractionProgress;
event EventHandler<CompressedBytesReadEventArgs> CompressedBytesRead;
event EventHandler<FilePartExtractionBeginEventArgs> FilePartExtractionBegin;
ArchiveType ArchiveType { get; }
IEntry Entry { get; }

View File

@@ -1,8 +0,0 @@
using SharpCompress.Common;
namespace SharpCompress.Readers;
public interface IReaderExtractionListener : IExtractionListener
{
void FireEntryExtractionProgress(Entry entry, long sizeTransferred, int iterations);
}

View File

@@ -108,8 +108,7 @@ public abstract class RarReader : AbstractReader<RarReaderEntry, RarVolume>
}
var stream = new MultiVolumeReadOnlyStream(
CreateFilePartEnumerableForCurrentEntry().Cast<RarFilePart>(),
this
CreateFilePartEnumerableForCurrentEntry().Cast<RarFilePart>()
);
if (Entry.IsRarV3)
{
@@ -136,8 +135,7 @@ public abstract class RarReader : AbstractReader<RarReaderEntry, RarVolume>
}
var stream = new MultiVolumeReadOnlyStream(
CreateFilePartEnumerableForCurrentEntry().Cast<RarFilePart>(),
this
CreateFilePartEnumerableForCurrentEntry().Cast<RarFilePart>()
);
if (Entry.IsRarV3)
{

View File

@@ -1,3 +1,4 @@
using System;
using SharpCompress.Common;
namespace SharpCompress.Readers;
@@ -21,4 +22,10 @@ public class ReaderOptions : OptionsBase
/// Provide a hint for the extension of the archive being read, can speed up finding the correct decoder. Should be without the leading period in the form like: tar.gz or zip
/// </summary>
public string? ExtensionHint { get; set; }
/// <summary>
/// An optional progress reporter for tracking extraction operations.
/// When set, progress updates will be reported as entries are extracted.
/// </summary>
public IProgress<ProgressReport>? Progress { get; set; }
}

View File

@@ -1,21 +0,0 @@
using System;
using SharpCompress.Common;
namespace SharpCompress.Readers;
public class ReaderProgress
{
private readonly IEntry _entry;
public long BytesTransferred { get; }
public int Iterations { get; }
public int PercentageRead => (int)Math.Round(PercentageReadExact);
public double PercentageReadExact => (float)BytesTransferred / _entry.Size * 100;
public ReaderProgress(IEntry entry, long bytesTransferred, int iterations)
{
_entry = entry;
BytesTransferred = bytesTransferred;
Iterations = iterations;
}
}

View File

@@ -6,7 +6,6 @@ using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Readers;
namespace SharpCompress;
@@ -216,34 +215,6 @@ internal static class Utility
}
}
public static long TransferTo(
this Stream source,
Stream destination,
Common.Entry entry,
IReaderExtractionListener readerExtractionListener
)
{
var array = ArrayPool<byte>.Shared.Rent(TEMP_BUFFER_SIZE);
try
{
var iterations = 0;
long total = 0;
int count;
while ((count = source.Read(array, 0, array.Length)) != 0)
{
total += count;
destination.Write(array, 0, count);
iterations++;
readerExtractionListener.FireEntryExtractionProgress(entry, total, iterations);
}
return total;
}
finally
{
ArrayPool<byte>.Shared.Return(array);
}
}
public static async Task<long> TransferToAsync(
this Stream source,
Stream destination,
@@ -290,43 +261,6 @@ internal static class Utility
}
}
public static async Task<long> TransferToAsync(
this Stream source,
Stream destination,
Common.Entry entry,
IReaderExtractionListener readerExtractionListener,
CancellationToken cancellationToken = default
)
{
var array = ArrayPool<byte>.Shared.Rent(TEMP_BUFFER_SIZE);
try
{
var iterations = 0;
long total = 0;
int count;
while (
(
count = await source
.ReadAsync(array, 0, array.Length, cancellationToken)
.ConfigureAwait(false)
) != 0
)
{
total += count;
await destination
.WriteAsync(array, 0, count, cancellationToken)
.ConfigureAwait(false);
iterations++;
readerExtractionListener.FireEntryExtractionProgress(entry, total, iterations);
}
return total;
}
finally
{
ArrayPool<byte>.Shared.Return(array);
}
}
private static bool ReadTransferBlock(Stream source, byte[] array, int maxSize, out int count)
{
var size = maxSize;

View File

@@ -3,6 +3,7 @@ using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Writers;
@@ -22,6 +23,29 @@ public abstract class AbstractWriter(ArchiveType type, WriterOptions writerOptio
protected WriterOptions WriterOptions { get; } = writerOptions;
/// <summary>
/// Wraps the source stream with a progress-reporting stream if progress reporting is enabled.
/// </summary>
/// <param name="source">The source stream to wrap.</param>
/// <param name="entryPath">The path of the entry being written.</param>
/// <returns>A stream that reports progress, or the original stream if progress is not enabled.</returns>
protected Stream WrapWithProgress(Stream source, string entryPath)
{
if (WriterOptions.Progress is null)
{
return source;
}
long? totalBytes = source.CanSeek ? source.Length : null;
return new ProgressReportingStream(
source,
WriterOptions.Progress,
entryPath,
totalBytes,
leaveOpen: true
);
}
public abstract void Write(string filename, Stream source, DateTime? modificationTime);
public virtual async Task WriteAsync(

View File

@@ -47,7 +47,8 @@ public sealed class GZipWriter : AbstractWriter
var stream = (GZipStream)OutputStream;
stream.FileName = filename;
stream.LastModified = modificationTime;
source.CopyTo(stream);
var progressStream = WrapWithProgress(source, filename);
progressStream.CopyTo(stream);
_wroteToStream = true;
}

View File

@@ -129,7 +129,8 @@ public class TarWriter : AbstractWriter
header.Name = NormalizeFilename(filename);
header.Size = realSize;
header.Write(OutputStream);
size = source.TransferTo(OutputStream, realSize);
var progressStream = WrapWithProgress(source, filename);
size = progressStream.TransferTo(OutputStream, realSize);
PadTo512(size.Value);
}
@@ -161,7 +162,8 @@ public class TarWriter : AbstractWriter
header.Name = NormalizeFilename(filename);
header.Size = realSize;
header.Write(OutputStream);
var written = await source
var progressStream = WrapWithProgress(source, filename);
var written = await progressStream
.TransferToAsync(OutputStream, realSize, cancellationToken)
.ConfigureAwait(false);
PadTo512(written);

View File

@@ -1,3 +1,4 @@
using System;
using SharpCompress.Common;
using D = SharpCompress.Compressors.Deflate;
@@ -36,6 +37,12 @@ public class WriterOptions : OptionsBase
/// </summary>
public int CompressionLevel { get; set; }
/// <summary>
/// An optional progress reporter for tracking compression operations.
/// When set, progress updates will be reported as entries are written.
/// </summary>
public IProgress<ProgressReport>? Progress { get; set; }
public static implicit operator WriterOptions(CompressionType compressionType) =>
new(compressionType);
}

View File

@@ -86,7 +86,8 @@ public class ZipWriter : AbstractWriter
public void Write(string entryPath, Stream source, ZipWriterEntryOptions zipWriterEntryOptions)
{
using var output = WriteToStream(entryPath, zipWriterEntryOptions);
source.CopyTo(output);
var progressStream = WrapWithProgress(source, entryPath);
progressStream.CopyTo(output);
}
public Stream WriteToStream(string entryPath, ZipWriterEntryOptions options)

View File

@@ -0,0 +1,605 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
using SharpCompress.Readers;
using SharpCompress.Writers;
using SharpCompress.Writers.Tar;
using SharpCompress.Writers.Zip;
using Xunit;
namespace SharpCompress.Test;
/// <summary>
/// A synchronous progress implementation for testing.
/// Unlike Progress&lt;T&gt;, this captures reports immediately without SynchronizationContext.
/// </summary>
internal sealed class TestProgress<T> : IProgress<T>
{
private readonly List<T> _reports = new();
public IReadOnlyList<T> Reports => _reports;
public void Report(T value) => _reports.Add(value);
}
public class ProgressReportTests : TestBase
{
private static byte[] CreateTestData(int size, byte fillValue)
{
var data = new byte[size];
for (var i = 0; i < size; i++)
{
data[i] = fillValue;
}
return data;
}
[Fact]
public void Zip_Write_ReportsProgress()
{
var progress = new TestProgress<ProgressReport>();
using var archiveStream = new MemoryStream();
var options = new ZipWriterOptions(CompressionType.Deflate) { Progress = progress };
using (var writer = new ZipWriter(archiveStream, options))
{
var testData = CreateTestData(10000, (byte)'A');
using var sourceStream = new MemoryStream(testData);
writer.Write("test.txt", sourceStream, DateTime.Now);
}
Assert.NotEmpty(progress.Reports);
Assert.All(progress.Reports, p => Assert.Equal("test.txt", p.EntryPath));
Assert.All(progress.Reports, p => Assert.Equal(10000, p.TotalBytes));
var lastReport = progress.Reports[progress.Reports.Count - 1];
Assert.Equal(10000, lastReport.BytesTransferred);
Assert.Equal(100.0, lastReport.PercentComplete);
}
[Fact]
public void Tar_Write_ReportsProgress()
{
var progress = new TestProgress<ProgressReport>();
using var archiveStream = new MemoryStream();
var options = new TarWriterOptions(CompressionType.None, true) { Progress = progress };
using (var writer = new TarWriter(archiveStream, options))
{
var testData = CreateTestData(10000, (byte)'A');
using var sourceStream = new MemoryStream(testData);
writer.Write("test.txt", sourceStream, DateTime.Now);
}
Assert.NotEmpty(progress.Reports);
Assert.All(progress.Reports, p => Assert.Equal("test.txt", p.EntryPath));
Assert.All(progress.Reports, p => Assert.Equal(10000, p.TotalBytes));
var lastReport = progress.Reports[progress.Reports.Count - 1];
Assert.Equal(10000, lastReport.BytesTransferred);
Assert.Equal(100.0, lastReport.PercentComplete);
}
[Fact]
public void Zip_Read_ReportsProgress()
{
var progress = new TestProgress<ProgressReport>();
// First create a zip archive
using var archiveStream = new MemoryStream();
using (
var writer = new ZipWriter(archiveStream, new ZipWriterOptions(CompressionType.Deflate))
)
{
var testData = CreateTestData(10000, (byte)'A');
using var sourceStream = new MemoryStream(testData);
writer.Write("test.txt", sourceStream, DateTime.Now);
}
// Now read it with progress reporting
archiveStream.Position = 0;
var readerOptions = new ReaderOptions { Progress = progress };
using (var reader = ReaderFactory.Open(archiveStream, readerOptions))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
using var extractedStream = new MemoryStream();
reader.WriteEntryTo(extractedStream);
}
}
}
Assert.NotEmpty(progress.Reports);
Assert.All(progress.Reports, p => Assert.Equal("test.txt", p.EntryPath));
var lastReport = progress.Reports[progress.Reports.Count - 1];
Assert.Equal(10000, lastReport.BytesTransferred);
}
[Fact]
public void ZipArchive_Entry_WriteTo_ReportsProgress()
{
var progress = new TestProgress<ProgressReport>();
// First create a zip archive
using var archiveStream = new MemoryStream();
using (
var writer = new ZipWriter(archiveStream, new ZipWriterOptions(CompressionType.Deflate))
)
{
var testData = CreateTestData(10000, (byte)'A');
using var sourceStream = new MemoryStream(testData);
writer.Write("test.txt", sourceStream, DateTime.Now);
}
// Now open as archive and extract entry with progress as parameter
archiveStream.Position = 0;
using var archive = ZipArchive.Open(archiveStream);
foreach (var entry in archive.Entries)
{
if (!entry.IsDirectory)
{
using var extractedStream = new MemoryStream();
entry.WriteTo(extractedStream, progress);
}
}
Assert.NotEmpty(progress.Reports);
Assert.All(progress.Reports, p => Assert.Equal("test.txt", p.EntryPath));
var lastReport = progress.Reports[progress.Reports.Count - 1];
Assert.Equal(10000, lastReport.BytesTransferred);
}
[Fact]
public async Task ZipArchive_Entry_WriteToAsync_ReportsProgress()
{
var progress = new TestProgress<ProgressReport>();
// First create a zip archive
using var archiveStream = new MemoryStream();
using (
var writer = new ZipWriter(archiveStream, new ZipWriterOptions(CompressionType.Deflate))
)
{
var testData = CreateTestData(10000, (byte)'A');
using var sourceStream = new MemoryStream(testData);
writer.Write("test.txt", sourceStream, DateTime.Now);
}
// Now open as archive and extract entry async with progress as parameter
archiveStream.Position = 0;
using var archive = ZipArchive.Open(archiveStream);
foreach (var entry in archive.Entries)
{
if (!entry.IsDirectory)
{
using var extractedStream = new MemoryStream();
await entry.WriteToAsync(extractedStream, CancellationToken.None, progress);
}
}
Assert.NotEmpty(progress.Reports);
Assert.All(progress.Reports, p => Assert.Equal("test.txt", p.EntryPath));
var lastReport = progress.Reports[progress.Reports.Count - 1];
Assert.Equal(10000, lastReport.BytesTransferred);
}
[Fact]
public void WriterOptions_WithoutProgress_DoesNotThrow()
{
using var archiveStream = new MemoryStream();
var options = new ZipWriterOptions(CompressionType.Deflate);
Assert.Null(options.Progress);
using (var writer = new ZipWriter(archiveStream, options))
{
var testData = CreateTestData(100, (byte)'A');
using var sourceStream = new MemoryStream(testData);
writer.Write("test.txt", sourceStream, DateTime.Now);
}
Assert.True(archiveStream.Length > 0);
}
[Fact]
public void ReaderOptions_WithoutProgress_DoesNotThrow()
{
// First create a zip archive
using var archiveStream = new MemoryStream();
using (
var writer = new ZipWriter(archiveStream, new ZipWriterOptions(CompressionType.Deflate))
)
{
var testData = CreateTestData(100, (byte)'A');
using var sourceStream = new MemoryStream(testData);
writer.Write("test.txt", sourceStream, DateTime.Now);
}
// Read without progress
archiveStream.Position = 0;
var readerOptions = new ReaderOptions();
Assert.Null(readerOptions.Progress);
using (var reader = ReaderFactory.Open(archiveStream, readerOptions))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
using var extractedStream = new MemoryStream();
reader.WriteEntryTo(extractedStream);
}
}
}
}
[Fact]
public void ZipArchive_WithoutProgress_DoesNotThrow()
{
// First create a zip archive
using var archiveStream = new MemoryStream();
using (
var writer = new ZipWriter(archiveStream, new ZipWriterOptions(CompressionType.Deflate))
)
{
var testData = CreateTestData(100, (byte)'A');
using var sourceStream = new MemoryStream(testData);
writer.Write("test.txt", sourceStream, DateTime.Now);
}
// Open archive and extract without progress
archiveStream.Position = 0;
using var archive = ZipArchive.Open(archiveStream);
foreach (var entry in archive.Entries)
{
if (!entry.IsDirectory)
{
using var extractedStream = new MemoryStream();
entry.WriteTo(extractedStream);
}
}
}
[Fact]
public void ProgressReport_PercentComplete_WithUnknownTotalBytes_ReturnsNull()
{
var progress = new ProgressReport("test.txt", 100, null);
Assert.Null(progress.PercentComplete);
}
[Fact]
public void ProgressReport_PercentComplete_WithZeroTotalBytes_ReturnsNull()
{
var progress = new ProgressReport("test.txt", 0, 0);
Assert.Null(progress.PercentComplete);
}
[Fact]
public void ProgressReport_Properties_AreSetCorrectly()
{
var progress = new ProgressReport("path/to/file.txt", 500, 1000);
Assert.Equal("path/to/file.txt", progress.EntryPath);
Assert.Equal(500, progress.BytesTransferred);
Assert.Equal(1000, progress.TotalBytes);
Assert.Equal(50.0, progress.PercentComplete);
}
[Fact]
public void Tar_Read_ReportsProgress()
{
var progress = new TestProgress<ProgressReport>();
// Create a tar archive first
using var archiveStream = new MemoryStream();
using (
var writer = new TarWriter(
archiveStream,
new TarWriterOptions(CompressionType.None, true)
)
)
{
var testData = CreateTestData(10000, (byte)'B');
using var sourceStream = new MemoryStream(testData);
writer.Write("data.bin", sourceStream, DateTime.Now);
}
// Now read it with progress reporting
archiveStream.Position = 0;
var readerOptions = new ReaderOptions { Progress = progress };
using (var reader = ReaderFactory.Open(archiveStream, readerOptions))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
using var extractedStream = new MemoryStream();
reader.WriteEntryTo(extractedStream);
}
}
}
Assert.NotEmpty(progress.Reports);
Assert.All(progress.Reports, p => Assert.Equal("data.bin", p.EntryPath));
var lastReport = progress.Reports[progress.Reports.Count - 1];
Assert.Equal(10000, lastReport.BytesTransferred);
}
[Fact]
public void TarArchive_Entry_WriteTo_ReportsProgress()
{
var progress = new TestProgress<ProgressReport>();
// Create a tar archive first
using var archiveStream = new MemoryStream();
using (
var writer = new TarWriter(
archiveStream,
new TarWriterOptions(CompressionType.None, true)
)
)
{
var testData = CreateTestData(10000, (byte)'C');
using var sourceStream = new MemoryStream(testData);
writer.Write("file.dat", sourceStream, DateTime.Now);
}
// Now open as archive and extract entry with progress as parameter
archiveStream.Position = 0;
using var archive = SharpCompress.Archives.Tar.TarArchive.Open(archiveStream);
foreach (var entry in archive.Entries)
{
if (!entry.IsDirectory)
{
using var extractedStream = new MemoryStream();
entry.WriteTo(extractedStream, progress);
}
}
Assert.NotEmpty(progress.Reports);
Assert.All(progress.Reports, p => Assert.Equal("file.dat", p.EntryPath));
var lastReport = progress.Reports[progress.Reports.Count - 1];
Assert.Equal(10000, lastReport.BytesTransferred);
}
[Fact]
public async Task TarArchive_Entry_WriteToAsync_ReportsProgress()
{
var progress = new TestProgress<ProgressReport>();
// Create a tar archive first
using var archiveStream = new MemoryStream();
using (
var writer = new TarWriter(
archiveStream,
new TarWriterOptions(CompressionType.None, true)
)
)
{
var testData = CreateTestData(10000, (byte)'D');
using var sourceStream = new MemoryStream(testData);
writer.Write("async.dat", sourceStream, DateTime.Now);
}
// Now open as archive and extract entry async with progress as parameter
archiveStream.Position = 0;
using var archive = SharpCompress.Archives.Tar.TarArchive.Open(archiveStream);
foreach (var entry in archive.Entries)
{
if (!entry.IsDirectory)
{
using var extractedStream = new MemoryStream();
await entry.WriteToAsync(extractedStream, CancellationToken.None, progress);
}
}
Assert.NotEmpty(progress.Reports);
Assert.All(progress.Reports, p => Assert.Equal("async.dat", p.EntryPath));
var lastReport = progress.Reports[progress.Reports.Count - 1];
Assert.Equal(10000, lastReport.BytesTransferred);
}
[Fact]
public void Zip_Read_MultipleEntries_ReportsProgress()
{
var progress = new TestProgress<ProgressReport>();
// Create a zip archive with multiple entries
using var archiveStream = new MemoryStream();
using (
var writer = new ZipWriter(archiveStream, new ZipWriterOptions(CompressionType.Deflate))
)
{
var testData1 = CreateTestData(5000, (byte)'A');
using var sourceStream1 = new MemoryStream(testData1);
writer.Write("file1.txt", sourceStream1, DateTime.Now);
var testData2 = CreateTestData(8000, (byte)'B');
using var sourceStream2 = new MemoryStream(testData2);
writer.Write("file2.txt", sourceStream2, DateTime.Now);
}
// Now read it with progress reporting
archiveStream.Position = 0;
var readerOptions = new ReaderOptions { Progress = progress };
using (var reader = ReaderFactory.Open(archiveStream, readerOptions))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
using var extractedStream = new MemoryStream();
reader.WriteEntryTo(extractedStream);
}
}
}
Assert.NotEmpty(progress.Reports);
// Should have reports for both files
var file1Reports = progress.Reports.Where(p => p.EntryPath == "file1.txt").ToList();
var file2Reports = progress.Reports.Where(p => p.EntryPath == "file2.txt").ToList();
Assert.NotEmpty(file1Reports);
Assert.NotEmpty(file2Reports);
// Verify final bytes for each file
Assert.Equal(5000, file1Reports[file1Reports.Count - 1].BytesTransferred);
Assert.Equal(8000, file2Reports[file2Reports.Count - 1].BytesTransferred);
}
[Fact]
public void ZipArchive_MultipleEntries_WriteTo_ReportsProgress()
{
var progress = new TestProgress<ProgressReport>();
// Create a zip archive with multiple entries
using var archiveStream = new MemoryStream();
using (
var writer = new ZipWriter(archiveStream, new ZipWriterOptions(CompressionType.Deflate))
)
{
var testData1 = CreateTestData(5000, (byte)'A');
using var sourceStream1 = new MemoryStream(testData1);
writer.Write("entry1.txt", sourceStream1, DateTime.Now);
var testData2 = CreateTestData(7000, (byte)'B');
using var sourceStream2 = new MemoryStream(testData2);
writer.Write("entry2.txt", sourceStream2, DateTime.Now);
}
// Now open as archive and extract entries with progress as parameter
archiveStream.Position = 0;
using var archive = ZipArchive.Open(archiveStream);
foreach (var entry in archive.Entries)
{
if (!entry.IsDirectory)
{
using var extractedStream = new MemoryStream();
entry.WriteTo(extractedStream, progress);
}
}
Assert.NotEmpty(progress.Reports);
// Should have reports for both files
var entry1Reports = progress.Reports.Where(p => p.EntryPath == "entry1.txt").ToList();
var entry2Reports = progress.Reports.Where(p => p.EntryPath == "entry2.txt").ToList();
Assert.NotEmpty(entry1Reports);
Assert.NotEmpty(entry2Reports);
// Verify final bytes for each entry
Assert.Equal(5000, entry1Reports[entry1Reports.Count - 1].BytesTransferred);
Assert.Equal(7000, entry2Reports[entry2Reports.Count - 1].BytesTransferred);
}
[Fact]
public async Task Zip_ReadAsync_ReportsProgress()
{
var progress = new TestProgress<ProgressReport>();
// Create a zip archive
using var archiveStream = new MemoryStream();
using (
var writer = new ZipWriter(archiveStream, new ZipWriterOptions(CompressionType.Deflate))
)
{
var testData = CreateTestData(10000, (byte)'E');
using var sourceStream = new MemoryStream(testData);
writer.Write("async_read.txt", sourceStream, DateTime.Now);
}
// Now read it with progress reporting
archiveStream.Position = 0;
var readerOptions = new ReaderOptions { Progress = progress };
using (var reader = ReaderFactory.Open(archiveStream, readerOptions))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
using var extractedStream = new MemoryStream();
await reader.WriteEntryToAsync(extractedStream);
}
}
}
Assert.NotEmpty(progress.Reports);
Assert.All(progress.Reports, p => Assert.Equal("async_read.txt", p.EntryPath));
var lastReport = progress.Reports[progress.Reports.Count - 1];
Assert.Equal(10000, lastReport.BytesTransferred);
}
[Fact]
public void GZip_Write_ReportsProgress()
{
var progress = new TestProgress<ProgressReport>();
using var archiveStream = new MemoryStream();
var options = new SharpCompress.Writers.GZip.GZipWriterOptions { Progress = progress };
using (var writer = new SharpCompress.Writers.GZip.GZipWriter(archiveStream, options))
{
var testData = CreateTestData(10000, (byte)'G');
using var sourceStream = new MemoryStream(testData);
writer.Write("gzip_test.txt", sourceStream, DateTime.Now);
}
Assert.NotEmpty(progress.Reports);
Assert.All(progress.Reports, p => Assert.Equal("gzip_test.txt", p.EntryPath));
Assert.All(progress.Reports, p => Assert.Equal(10000, p.TotalBytes));
var lastReport = progress.Reports[progress.Reports.Count - 1];
Assert.Equal(10000, lastReport.BytesTransferred);
Assert.Equal(100.0, lastReport.PercentComplete);
}
[Fact]
public async Task Tar_WriteAsync_ReportsProgress()
{
var progress = new TestProgress<ProgressReport>();
using var archiveStream = new MemoryStream();
var options = new TarWriterOptions(CompressionType.None, true) { Progress = progress };
using (var writer = new TarWriter(archiveStream, options))
{
var testData = CreateTestData(10000, (byte)'A');
using var sourceStream = new MemoryStream(testData);
await writer.WriteAsync("test.txt", sourceStream, DateTime.Now);
}
Assert.NotEmpty(progress.Reports);
Assert.All(progress.Reports, p => Assert.Equal("test.txt", p.EntryPath));
var lastReport = progress.Reports[progress.Reports.Count - 1];
Assert.Equal(10000, lastReport.BytesTransferred);
}
}