Compare commits

..

15 Commits

Author SHA1 Message Date
Adam Hathcock
e21631526b mismerge 2026-02-09 17:36:22 +00:00
Adam Hathcock
cf0ad9b323 Merge remote-tracking branch 'origin/copilot/fix-rar-extraction-issues' into adam/cleanup-options 2026-02-09 17:30:50 +00:00
Adam Hathcock
938692ef33 refactor how options for reading was done 2026-02-09 17:30:11 +00:00
Adam Hathcock
84c49f152e more removal 2026-02-09 17:05:23 +00:00
Adam Hathcock
04dd177f19 first pass of removing extraction options (folded into reader options) 2026-02-09 16:52:54 +00:00
Adam Hathcock
2e074e18d4 Merge remote-tracking branch 'origin/master' into adam/cleanup-options 2026-02-09 16:22:34 +00:00
Adam Hathcock
756cb7bd9d Merge pull request #1192 from adamhathcock/dependabot/nuget/dot-config/csharpier-1.2.6
Bump csharpier from 1.2.5 to 1.2.6
2026-02-09 13:14:17 +00:00
dependabot[bot]
c37209618d Bump csharpier from 1.2.5 to 1.2.6
---
updated-dependencies:
- dependency-name: csharpier
  dependency-version: 1.2.6
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-02-09 10:48:15 +00:00
Adam Hathcock
c4a28e7cfb Update tests/SharpCompress.Test/Rar/RarArchiveAsyncTests.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-02-09 09:11:19 +00:00
Adam Hathcock
29197f2142 Update tests/SharpCompress.Test/Rar/RarArchiveTests.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-02-09 09:10:59 +00:00
copilot-swe-agent[bot]
2a4081362e Complete fix for RAR extraction subdirectory issue
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-02-08 12:56:44 +00:00
copilot-swe-agent[bot]
d5cab8172b Address code review feedback: clarify file size comment
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-02-08 12:54:11 +00:00
copilot-swe-agent[bot]
4084b347d4 Fix RAR extraction to preserve subdirectory structure
- Set default ExtractFullPath=true in WriteToDirectoryInternal methods
- Add test case with sample RAR archive containing subdirectories
- Tests verify files are extracted to correct subdirectories, not root

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-02-08 12:52:25 +00:00
copilot-swe-agent[bot]
5437d9ff8c Initial plan 2026-02-08 12:38:49 +00:00
Adam Hathcock
cc6e410be8 some options 2026-02-06 15:16:45 +00:00
142 changed files with 1483 additions and 1007 deletions

View File

@@ -3,7 +3,7 @@
"isRoot": true,
"tools": {
"csharpier": {
"version": "1.2.5",
"version": "1.2.6",
"commands": [
"csharpier"
],

View File

@@ -20,7 +20,7 @@ public abstract partial class AbstractArchive<TEntry, TVolume> : IArchive, IAsyn
private readonly LazyAsyncReadOnlyCollection<TVolume> _lazyVolumesAsync;
private readonly LazyAsyncReadOnlyCollection<TEntry> _lazyEntriesAsync;
protected ReaderOptions ReaderOptions { get; }
public ReaderOptions ReaderOptions { get; protected set; }
internal AbstractArchive(ArchiveType type, SourceStream sourceStream)
{

View File

@@ -5,6 +5,7 @@ using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.Writers;
namespace SharpCompress.Archives;
@@ -111,7 +112,7 @@ public abstract partial class AbstractWritableArchive<TEntry, TVolume>
public async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IWriterOptions options,
CancellationToken cancellationToken = default
)
{

View File

@@ -5,6 +5,7 @@ using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.IO;
using SharpCompress.Writers;
@@ -174,7 +175,7 @@ public abstract partial class AbstractWritableArchive<TEntry, TVolume>
return entry;
}
public void SaveTo(Stream stream, WriterOptions options)
public void SaveTo(Stream stream, IWriterOptions options)
{
//reset streams of new entries
newEntries.Cast<IWritableArchiveEntry>().ForEach(x => x.Stream.Seek(0, SeekOrigin.Begin));
@@ -210,14 +211,14 @@ public abstract partial class AbstractWritableArchive<TEntry, TVolume>
protected abstract void SaveTo(
Stream stream,
WriterOptions options,
IWriterOptions options,
IEnumerable<TEntry> oldEntries,
IEnumerable<TEntry> newEntries
);
protected abstract ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IWriterOptions options,
IAsyncEnumerable<TEntry> oldEntries,
IEnumerable<TEntry> newEntries,
CancellationToken cancellationToken = default

View File

@@ -94,11 +94,11 @@ public static partial class ArchiveFactory
public static void WriteToDirectory(
string sourceArchive,
string destinationDirectory,
ExtractionOptions? options = null
ReaderOptions? options = null
)
{
using var archive = OpenArchive(sourceArchive);
archive.WriteToDirectory(destinationDirectory, options);
using var archive = OpenArchive(sourceArchive, options);
archive.WriteToDirectory(destinationDirectory);
}
public static T FindFactory<T>(string path)

View File

@@ -5,6 +5,7 @@ using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.GZip;
using SharpCompress.Common.Options;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.GZip;
@@ -30,7 +31,7 @@ public partial class GZipArchive
protected override async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IWriterOptions options,
IAsyncEnumerable<GZipArchiveEntry> oldEntries,
IEnumerable<GZipArchiveEntry> newEntries,
CancellationToken cancellationToken = default
@@ -40,7 +41,10 @@ public partial class GZipArchive
{
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
}
using var writer = new GZipWriter(stream, new GZipWriterOptions(options));
using var writer = new GZipWriter(
stream,
options as GZipWriterOptions ?? new GZipWriterOptions(options)
);
await foreach (
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
)
@@ -80,7 +84,8 @@ public partial class GZipArchive
var stream = (await volumes.SingleAsync()).Stream;
yield return new GZipArchiveEntry(
this,
await GZipFilePart.CreateAsync(stream, ReaderOptions.ArchiveEncoding)
await GZipFilePart.CreateAsync(stream, ReaderOptions.ArchiveEncoding),
ReaderOptions
);
}
}

View File

@@ -6,6 +6,7 @@ using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.GZip;
using SharpCompress.Common.Options;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.GZip;
@@ -58,7 +59,7 @@ public partial class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZi
protected override void SaveTo(
Stream stream,
WriterOptions options,
IWriterOptions options,
IEnumerable<GZipArchiveEntry> oldEntries,
IEnumerable<GZipArchiveEntry> newEntries
)
@@ -67,7 +68,10 @@ public partial class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZi
{
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
}
using var writer = new GZipWriter(stream, new GZipWriterOptions(options));
using var writer = new GZipWriter(
stream,
options as GZipWriterOptions ?? new GZipWriterOptions(options)
);
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
@@ -84,7 +88,8 @@ public partial class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZi
var stream = volumes.Single().Stream;
yield return new GZipArchiveEntry(
this,
GZipFilePart.Create(stream, ReaderOptions.ArchiveEncoding)
GZipFilePart.Create(stream, ReaderOptions.ArchiveEncoding),
ReaderOptions
);
}

View File

@@ -1,15 +1,16 @@
using System.IO;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.GZip;
using SharpCompress.Common.Options;
namespace SharpCompress.Archives.GZip;
public class GZipArchiveEntry : GZipEntry, IArchiveEntry
{
internal GZipArchiveEntry(GZipArchive archive, GZipFilePart? part)
: base(part) => Archive = archive;
internal GZipArchiveEntry(GZipArchive archive, GZipFilePart? part, IReaderOptions readerOptions)
: base(part, readerOptions) => Archive = archive;
public virtual Stream OpenEntryStream()
{

View File

@@ -19,7 +19,7 @@ internal sealed class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArch
DateTime? lastModified,
bool closeStream
)
: base(archive, null)
: base(archive, null, archive.ReaderOptions)
{
this.stream = stream;
Key = path;

View File

@@ -12,6 +12,11 @@ public interface IArchive : IDisposable
ArchiveType Type { get; }
/// <summary>
/// The options used when opening this archive, including extraction behavior settings.
/// </summary>
ReaderOptions ReaderOptions { get; }
/// <summary>
/// Use this method to extract all entries in an archive in order.
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be

View File

@@ -100,15 +100,11 @@ public static class IArchiveEntryExtensions
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public void WriteToDirectory(
string destinationDirectory,
ExtractionOptions? options = null
) =>
public void WriteToDirectory(string destinationDirectory) =>
ExtractionMethods.WriteEntryToDirectory(
entry,
destinationDirectory,
options,
entry.WriteToFile
(path) => entry.WriteToFile(path)
);
/// <summary>
@@ -116,15 +112,14 @@ public static class IArchiveEntryExtensions
/// </summary>
public async ValueTask WriteToDirectoryAsync(
string destinationDirectory,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default
) =>
await ExtractionMethods
.WriteEntryToDirectoryAsync(
entry,
destinationDirectory,
options,
entry.WriteToFileAsync,
async (path, ct) =>
await entry.WriteToFileAsync(path, ct).ConfigureAwait(false),
cancellationToken
)
.ConfigureAwait(false);
@@ -132,11 +127,10 @@ public static class IArchiveEntryExtensions
/// <summary>
/// Extract to specific file
/// </summary>
public void WriteToFile(string destinationFileName, ExtractionOptions? options = null) =>
public void WriteToFile(string destinationFileName) =>
ExtractionMethods.WriteEntryToFile(
entry,
destinationFileName,
options,
(x, fm) =>
{
using var fs = File.Open(destinationFileName, fm);
@@ -149,14 +143,12 @@ public static class IArchiveEntryExtensions
/// </summary>
public async ValueTask WriteToFileAsync(
string destinationFileName,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default
) =>
await ExtractionMethods
.WriteEntryToFileAsync(
entry,
destinationFileName,
options,
async (x, fm, ct) =>
{
using var fs = File.Open(destinationFileName, fm);

View File

@@ -14,28 +14,25 @@ public static class IArchiveExtensions
/// Extract to specific directory with progress reporting
/// </summary>
/// <param name="destinationDirectory">The folder to extract into.</param>
/// <param name="options">Extraction options.</param>
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
public void WriteToDirectory(
string destinationDirectory,
ExtractionOptions? options = null,
IProgress<ProgressReport>? progress = null
)
{
if (archive.IsSolid || archive.Type == ArchiveType.SevenZip)
{
using var reader = archive.ExtractAllEntries();
reader.WriteAllToDirectory(destinationDirectory, options);
reader.WriteAllToDirectory(destinationDirectory);
}
else
{
archive.WriteToDirectoryInternal(destinationDirectory, options, progress);
archive.WriteToDirectoryInternal(destinationDirectory, progress);
}
}
private void WriteToDirectoryInternal(
string destinationDirectory,
ExtractionOptions? options,
IProgress<ProgressReport>? progress
)
{
@@ -61,7 +58,7 @@ public static class IArchiveExtensions
continue;
}
entry.WriteToDirectory(destinationDirectory, options);
entry.WriteToDirectory(destinationDirectory);
bytesRead += entry.Size;
progress?.Report(

View File

@@ -17,12 +17,10 @@ public static class IAsyncArchiveExtensions
/// </summary>
/// <param name="archive">The archive to extract.</param>
/// <param name="destinationDirectory">The folder to extract into.</param>
/// <param name="options">Extraction options.</param>
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
/// <param name="cancellationToken">Optional cancellation token.</param>
public async ValueTask WriteToDirectoryAsync(
string destinationDirectory,
ExtractionOptions? options = null,
IProgress<ProgressReport>? progress = null,
CancellationToken cancellationToken = default
)
@@ -30,17 +28,14 @@ public static class IAsyncArchiveExtensions
if (await archive.IsSolidAsync() || archive.Type == ArchiveType.SevenZip)
{
await using var reader = await archive.ExtractAllEntriesAsync();
await reader.WriteAllToDirectoryAsync(
destinationDirectory,
options,
cancellationToken
);
await reader
.WriteAllToDirectoryAsync(destinationDirectory, cancellationToken)
.ConfigureAwait(false);
}
else
{
await archive.WriteToDirectoryAsyncInternal(
destinationDirectory,
options,
progress,
cancellationToken
);
@@ -49,7 +44,6 @@ public static class IAsyncArchiveExtensions
private async ValueTask WriteToDirectoryAsyncInternal(
string destinationDirectory,
ExtractionOptions? options,
IProgress<ProgressReport>? progress,
CancellationToken cancellationToken
)
@@ -79,7 +73,7 @@ public static class IAsyncArchiveExtensions
}
await entry
.WriteToDirectoryAsync(destinationDirectory, options, cancellationToken)
.WriteToDirectoryAsync(destinationDirectory, cancellationToken)
.ConfigureAwait(false);
bytesRead += entry.Size;

View File

@@ -2,6 +2,7 @@ using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Options;
using SharpCompress.Writers;
namespace SharpCompress.Archives;
@@ -30,7 +31,7 @@ public interface IWritableArchive : IArchive, IWritableArchiveCommon
/// <summary>
/// Saves the archive to the specified stream using the given writer options.
/// </summary>
void SaveTo(Stream stream, WriterOptions options);
void SaveTo(Stream stream, IWriterOptions options);
/// <summary>
/// Removes the specified entry from the archive.
@@ -45,7 +46,7 @@ public interface IWritableAsyncArchive : IAsyncArchive, IWritableArchiveCommon
/// </summary>
ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IWriterOptions options,
CancellationToken cancellationToken = default
);

View File

@@ -1,6 +1,7 @@
using System;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.Writers;
namespace SharpCompress.Archives;
@@ -58,13 +59,16 @@ public static class IWritableArchiveExtensions
);
}
public void SaveTo(string filePath, WriterOptions? options = null) =>
writableArchive.SaveTo(new FileInfo(filePath), options ?? new(CompressionType.Deflate));
public void SaveTo(string filePath, IWriterOptions? options = null) =>
writableArchive.SaveTo(
new FileInfo(filePath),
options ?? new WriterOptions(CompressionType.Deflate)
);
public void SaveTo(FileInfo fileInfo, WriterOptions? options = null)
public void SaveTo(FileInfo fileInfo, IWriterOptions? options = null)
{
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
writableArchive.SaveTo(stream, options ?? new(CompressionType.Deflate));
writableArchive.SaveTo(stream, options ?? new WriterOptions(CompressionType.Deflate));
}
}
}

View File

@@ -3,6 +3,7 @@ using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.Writers;
namespace SharpCompress.Archives;
@@ -62,24 +63,28 @@ public static class IWritableAsyncArchiveExtensions
public ValueTask SaveToAsync(
string filePath,
WriterOptions? options = null,
IWriterOptions? options = null,
CancellationToken cancellationToken = default
) =>
writableArchive.SaveToAsync(
new FileInfo(filePath),
options ?? new(CompressionType.Deflate),
options ?? new WriterOptions(CompressionType.Deflate),
cancellationToken
);
public async ValueTask SaveToAsync(
FileInfo fileInfo,
WriterOptions? options = null,
IWriterOptions? options = null,
CancellationToken cancellationToken = default
)
{
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
await writableArchive
.SaveToAsync(stream, options ?? new(CompressionType.Deflate), cancellationToken)
.SaveToAsync(
stream,
options ?? new WriterOptions(CompressionType.Deflate),
cancellationToken
)
.ConfigureAwait(false);
}
}

View File

@@ -24,8 +24,7 @@ internal class FileInfoRarArchiveVolume : RarVolume
private static ReaderOptions FixOptions(ReaderOptions options)
{
//make sure we're closing streams with fileinfo
options.LeaveStreamOpen = false;
return options;
return options with { LeaveStreamOpen = false };
}
internal ReadOnlyCollection<RarFilePart> FileParts { get; }

View File

@@ -23,6 +23,7 @@ public partial class RarArchiveEntry : RarEntry, IArchiveEntry
IEnumerable<RarFilePart> parts,
ReaderOptions readerOptions
)
: base(readerOptions)
{
this.parts = parts.ToList();
this.archive = archive;

View File

@@ -49,7 +49,8 @@ public partial class SevenZipArchive
var file = _database._files[i];
entries[i] = new SevenZipArchiveEntry(
this,
new SevenZipFilePart(stream, _database, i, file, ReaderOptions.ArchiveEncoding)
new SevenZipFilePart(stream, _database, i, file, ReaderOptions.ArchiveEncoding),
ReaderOptions
);
}
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))

View File

@@ -55,7 +55,8 @@ public partial class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, Sev
i,
file,
ReaderOptions.ArchiveEncoding
)
),
ReaderOptions
);
}
foreach (

View File

@@ -1,14 +1,19 @@
using System.IO;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Options;
using SharpCompress.Common.SevenZip;
namespace SharpCompress.Archives.SevenZip;
public class SevenZipArchiveEntry : SevenZipEntry, IArchiveEntry
{
internal SevenZipArchiveEntry(SevenZipArchive archive, SevenZipFilePart part)
: base(part) => Archive = archive;
internal SevenZipArchiveEntry(
SevenZipArchive archive,
SevenZipFilePart part,
IReaderOptions readerOptions
)
: base(part, readerOptions) => Archive = archive;
public Stream OpenEntryStream() => FilePart.GetCompressedStream();

View File

@@ -4,6 +4,7 @@ using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.Common.Tar;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
@@ -18,13 +19,16 @@ public partial class TarArchive
{
protected override async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IWriterOptions options,
IAsyncEnumerable<TarArchiveEntry> oldEntries,
IEnumerable<TarArchiveEntry> newEntries,
CancellationToken cancellationToken = default
)
{
using var writer = new TarWriter(stream, new TarWriterOptions(options));
using var writer = new TarWriter(
stream,
options as TarWriterOptions ?? new TarWriterOptions(options)
);
await foreach (
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
)
@@ -123,7 +127,8 @@ public partial class TarArchive
var entry = new TarArchiveEntry(
this,
new TarFilePart(previousHeader, stream),
CompressionType.None
CompressionType.None,
ReaderOptions
);
var oldStreamPos = stream.Position;
@@ -147,7 +152,8 @@ public partial class TarArchive
yield return new TarArchiveEntry(
this,
new TarFilePart(header, stream),
CompressionType.None
CompressionType.None,
ReaderOptions
);
}
}

View File

@@ -5,6 +5,7 @@ using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.Common.Tar;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
@@ -58,7 +59,8 @@ public partial class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVo
var entry = new TarArchiveEntry(
this,
new TarFilePart(previousHeader, stream),
CompressionType.None
CompressionType.None,
ReaderOptions
);
var oldStreamPos = stream.Position;
@@ -80,7 +82,8 @@ public partial class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVo
yield return new TarArchiveEntry(
this,
new TarFilePart(header, stream),
CompressionType.None
CompressionType.None,
ReaderOptions
);
}
}
@@ -115,12 +118,15 @@ public partial class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVo
protected override void SaveTo(
Stream stream,
WriterOptions options,
IWriterOptions options,
IEnumerable<TarArchiveEntry> oldEntries,
IEnumerable<TarArchiveEntry> newEntries
)
{
using var writer = new TarWriter(stream, new TarWriterOptions(options));
using var writer = new TarWriter(
stream,
options as TarWriterOptions ?? new TarWriterOptions(options)
);
foreach (var entry in oldEntries.Concat(newEntries))
{
if (entry.IsDirectory)

View File

@@ -1,16 +1,22 @@
using System.IO;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.Common.Tar;
namespace SharpCompress.Archives.Tar;
public class TarArchiveEntry : TarEntry, IArchiveEntry
{
internal TarArchiveEntry(TarArchive archive, TarFilePart? part, CompressionType compressionType)
: base(part, compressionType) => Archive = archive;
internal TarArchiveEntry(
TarArchive archive,
TarFilePart? part,
CompressionType compressionType,
IReaderOptions readerOptions
)
: base(part, compressionType, readerOptions) => Archive = archive;
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();

View File

@@ -21,7 +21,7 @@ internal sealed class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiv
DateTime? lastModified,
bool closeStream
)
: base(archive, null, compressionType)
: base(archive, null, compressionType, archive.ReaderOptions)
{
this.stream = stream;
Key = path;
@@ -36,7 +36,7 @@ internal sealed class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiv
string directoryPath,
DateTime? lastModified
)
: base(archive, null, CompressionType.None)
: base(archive, null, CompressionType.None, archive.ReaderOptions)
{
stream = null;
Key = directoryPath;

View File

@@ -5,6 +5,7 @@ using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.Common.Zip;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
@@ -54,7 +55,8 @@ public partial class ZipArchive
yield return new ZipArchiveEntry(
this,
new SeekableZipFilePart(headerFactory.NotNull(), deh, s)
new SeekableZipFilePart(headerFactory.NotNull(), deh, s),
ReaderOptions
);
}
break;
@@ -71,13 +73,16 @@ public partial class ZipArchive
protected override async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IWriterOptions options,
IAsyncEnumerable<ZipArchiveEntry> oldEntries,
IEnumerable<ZipArchiveEntry> newEntries,
CancellationToken cancellationToken = default
)
{
using var writer = new ZipWriter(stream, new ZipWriterOptions(options));
using var writer = new ZipWriter(
stream,
options as ZipWriterOptions ?? new ZipWriterOptions(options)
);
await foreach (
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
)

View File

@@ -5,6 +5,7 @@ using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.Common.Zip;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors.Deflate;
@@ -94,7 +95,8 @@ public partial class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVo
yield return new ZipArchiveEntry(
this,
new SeekableZipFilePart(headerFactory.NotNull(), deh, s)
new SeekableZipFilePart(headerFactory.NotNull(), deh, s),
ReaderOptions
);
}
break;
@@ -113,12 +115,15 @@ public partial class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVo
protected override void SaveTo(
Stream stream,
WriterOptions options,
IWriterOptions options,
IEnumerable<ZipArchiveEntry> oldEntries,
IEnumerable<ZipArchiveEntry> newEntries
)
{
using var writer = new ZipWriter(stream, new ZipWriterOptions(options));
using var writer = new ZipWriter(
stream,
options as ZipWriterOptions ?? new ZipWriterOptions(options)
);
foreach (var entry in oldEntries.Concat(newEntries))
{
if (entry.IsDirectory)

View File

@@ -1,15 +1,20 @@
using System.IO;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Options;
using SharpCompress.Common.Zip;
namespace SharpCompress.Archives.Zip;
public partial class ZipArchiveEntry : ZipEntry, IArchiveEntry
{
internal ZipArchiveEntry(ZipArchive archive, SeekableZipFilePart? part)
: base(part) => Archive = archive;
internal ZipArchiveEntry(
ZipArchive archive,
SeekableZipFilePart? part,
IReaderOptions readerOptions
)
: base(part, readerOptions) => Archive = archive;
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();

View File

@@ -21,7 +21,7 @@ internal class ZipWritableArchiveEntry : ZipArchiveEntry, IWritableArchiveEntry
DateTime? lastModified,
bool closeStream
)
: base(archive, null)
: base(archive, null, archive.ReaderOptions)
{
this.stream = stream;
Key = path;
@@ -36,7 +36,7 @@ internal class ZipWritableArchiveEntry : ZipArchiveEntry, IWritableArchiveEntry
string directoryPath,
DateTime? lastModified
)
: base(archive, null)
: base(archive, null, archive.ReaderOptions)
{
stream = null;
Key = directoryPath;

View File

@@ -5,6 +5,7 @@ using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common.Ace.Headers;
using SharpCompress.Common.Options;
namespace SharpCompress.Common.Ace;
@@ -12,7 +13,8 @@ public class AceEntry : Entry
{
private readonly AceFilePart _filePart;
internal AceEntry(AceFilePart filePart)
internal AceEntry(AceFilePart filePart, IReaderOptions readerOptions)
: base(readerOptions)
{
_filePart = filePart;
}

View File

@@ -5,6 +5,7 @@ using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common.GZip;
using SharpCompress.Common.Options;
using SharpCompress.Common.Tar;
namespace SharpCompress.Common.Arc;
@@ -13,7 +14,8 @@ public class ArcEntry : Entry
{
private readonly ArcFilePart? _filePart;
internal ArcEntry(ArcFilePart? filePart)
internal ArcEntry(ArcFilePart? filePart, IReaderOptions readerOptions)
: base(readerOptions)
{
_filePart = filePart;
}

View File

@@ -5,6 +5,7 @@ using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common.Arc;
using SharpCompress.Common.Arj.Headers;
using SharpCompress.Common.Options;
namespace SharpCompress.Common.Arj;
@@ -12,7 +13,8 @@ public class ArjEntry : Entry
{
private readonly ArjFilePart _filePart;
internal ArjEntry(ArjFilePart filePart)
internal ArjEntry(ArjFilePart filePart, IReaderOptions readerOptions)
: base(readerOptions)
{
_filePart = filePart;
}

View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.Linq;
using SharpCompress.Common.Options;
namespace SharpCompress.Common;
@@ -87,4 +88,14 @@ public abstract class Entry : IEntry
/// Entry file attribute.
/// </summary>
public virtual int? Attrib => throw new NotImplementedException();
/// <summary>
/// The options used when opening this entry's source (reader or archive).
/// </summary>
public IReaderOptions Options { get; protected set; }
protected Entry(IReaderOptions readerOptions)
{
Options = readerOptions;
}
}

View File

@@ -2,6 +2,7 @@ using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Readers;
namespace SharpCompress.Common;
@@ -10,8 +11,7 @@ internal static partial class ExtractionMethods
public static async ValueTask WriteEntryToDirectoryAsync(
IEntry entry,
string destinationDirectory,
ExtractionOptions? options,
Func<string, ExtractionOptions?, CancellationToken, ValueTask> writeAsync,
Func<string, CancellationToken, ValueTask> writeAsync,
CancellationToken cancellationToken = default
)
{
@@ -34,11 +34,9 @@ internal static partial class ExtractionMethods
);
}
options ??= new ExtractionOptions() { Overwrite = true };
var file = Path.GetFileName(entry.Key.NotNull("Entry Key is null")).NotNull("File is null");
file = Utility.ReplaceInvalidFileNameChars(file);
if (options.ExtractFullPath)
if (entry.Options.ExtractFullPath)
{
var folder = Path.GetDirectoryName(entry.Key.NotNull("Entry Key is null"))
.NotNull("Directory is null");
@@ -72,9 +70,9 @@ internal static partial class ExtractionMethods
"Entry is trying to write a file outside of the destination directory."
);
}
await writeAsync(destinationFileName, options, cancellationToken).ConfigureAwait(false);
await writeAsync(destinationFileName, cancellationToken).ConfigureAwait(false);
}
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
else if (entry.Options.ExtractFullPath && !Directory.Exists(destinationFileName))
{
Directory.CreateDirectory(destinationFileName);
}
@@ -83,34 +81,34 @@ internal static partial class ExtractionMethods
public static async ValueTask WriteEntryToFileAsync(
IEntry entry,
string destinationFileName,
ExtractionOptions? options,
Func<string, FileMode, CancellationToken, ValueTask> openAndWriteAsync,
CancellationToken cancellationToken = default
)
{
if (entry.LinkTarget != null)
{
if (options?.WriteSymbolicLink is null)
if (entry.Options.SymbolicLinkHandler is not null)
{
throw new ExtractionException(
"Entry is a symbolic link but ExtractionOptions.WriteSymbolicLink delegate is null"
);
entry.Options.SymbolicLinkHandler(destinationFileName, entry.LinkTarget);
}
options.WriteSymbolicLink(destinationFileName, entry.LinkTarget);
else
{
ReaderOptions.DefaultSymbolicLinkHandler(destinationFileName, entry.LinkTarget);
}
return;
}
else
{
var fm = FileMode.Create;
options ??= new ExtractionOptions() { Overwrite = true };
if (!options.Overwrite)
if (!entry.Options.Overwrite)
{
fm = FileMode.CreateNew;
}
await openAndWriteAsync(destinationFileName, fm, cancellationToken)
.ConfigureAwait(false);
entry.PreserveExtractionOptions(destinationFileName, options);
entry.PreserveExtractionOptions(destinationFileName);
}
}
}

View File

@@ -3,6 +3,7 @@ using System.IO;
using System.Runtime.InteropServices;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Readers;
namespace SharpCompress.Common;
@@ -23,8 +24,7 @@ internal static partial class ExtractionMethods
public static void WriteEntryToDirectory(
IEntry entry,
string destinationDirectory,
ExtractionOptions? options,
Action<string, ExtractionOptions?> write
Action<string> write
)
{
string destinationFileName;
@@ -46,11 +46,9 @@ internal static partial class ExtractionMethods
);
}
options ??= new ExtractionOptions() { Overwrite = true };
var file = Path.GetFileName(entry.Key.NotNull("Entry Key is null")).NotNull("File is null");
file = Utility.ReplaceInvalidFileNameChars(file);
if (options.ExtractFullPath)
if (entry.Options.ExtractFullPath)
{
var folder = Path.GetDirectoryName(entry.Key.NotNull("Entry Key is null"))
.NotNull("Directory is null");
@@ -84,9 +82,9 @@ internal static partial class ExtractionMethods
"Entry is trying to write a file outside of the destination directory."
);
}
write(destinationFileName, options);
write(destinationFileName);
}
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
else if (entry.Options.ExtractFullPath && !Directory.Exists(destinationFileName))
{
Directory.CreateDirectory(destinationFileName);
}
@@ -95,32 +93,32 @@ internal static partial class ExtractionMethods
public static void WriteEntryToFile(
IEntry entry,
string destinationFileName,
ExtractionOptions? options,
Action<string, FileMode> openAndWrite
)
{
if (entry.LinkTarget != null)
{
if (options?.WriteSymbolicLink is null)
if (entry.Options.SymbolicLinkHandler is not null)
{
throw new ExtractionException(
"Entry is a symbolic link but ExtractionOptions.WriteSymbolicLink delegate is null"
);
entry.Options.SymbolicLinkHandler(destinationFileName, entry.LinkTarget);
}
options.WriteSymbolicLink(destinationFileName, entry.LinkTarget);
else
{
ReaderOptions.DefaultSymbolicLinkHandler(destinationFileName, entry.LinkTarget);
}
return;
}
else
{
var fm = FileMode.Create;
options ??= new ExtractionOptions() { Overwrite = true };
if (!options.Overwrite)
if (!entry.Options.Overwrite)
{
fm = FileMode.CreateNew;
}
openAndWrite(destinationFileName, fm);
entry.PreserveExtractionOptions(destinationFileName, options);
entry.PreserveExtractionOptions(destinationFileName);
}
}
}

View File

@@ -1,40 +0,0 @@
using System;
namespace SharpCompress.Common;
public class ExtractionOptions
{
/// <summary>
/// overwrite target if it exists
/// </summary>
public bool Overwrite { get; set; }
/// <summary>
/// extract with internal directory structure
/// </summary>
public bool ExtractFullPath { get; set; }
/// <summary>
/// preserve file time
/// </summary>
public bool PreserveFileTime { get; set; }
/// <summary>
/// preserve windows file attributes
/// </summary>
public bool PreserveAttributes { get; set; }
/// <summary>
/// Delegate for writing symbolic links to disk.
/// sourcePath is where the symlink is created.
/// targetPath is what the symlink refers to.
/// </summary>
public delegate void SymbolicLinkWriterDelegate(string sourcePath, string targetPath);
public SymbolicLinkWriterDelegate WriteSymbolicLink = (sourcePath, targetPath) =>
{
Console.WriteLine(
$"Could not write symlink {sourcePath} -> {targetPath}, for more information please see https://github.com/dotnet/runtime/issues/24271"
);
};
}

View File

@@ -1,5 +1,6 @@
using System.Collections.Generic;
using System.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common.GZip;
@@ -7,9 +8,12 @@ public partial class GZipEntry
{
internal static async IAsyncEnumerable<GZipEntry> GetEntriesAsync(
Stream stream,
OptionsBase options
ReaderOptions options
)
{
yield return new GZipEntry(await GZipFilePart.CreateAsync(stream, options.ArchiveEncoding));
yield return new GZipEntry(
await GZipFilePart.CreateAsync(stream, options.ArchiveEncoding),
options
);
}
}

View File

@@ -1,6 +1,8 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Options;
using SharpCompress.Readers;
namespace SharpCompress.Common.GZip;
@@ -8,7 +10,11 @@ public partial class GZipEntry : Entry
{
private readonly GZipFilePart? _filePart;
internal GZipEntry(GZipFilePart? filePart) => _filePart = filePart;
internal GZipEntry(GZipFilePart? filePart, IReaderOptions readerOptions)
: base(readerOptions)
{
_filePart = filePart;
}
public override CompressionType CompressionType => CompressionType.GZip;
@@ -38,9 +44,9 @@ public partial class GZipEntry : Entry
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
internal static IEnumerable<GZipEntry> GetEntries(Stream stream, OptionsBase options)
internal static IEnumerable<GZipEntry> GetEntries(Stream stream, ReaderOptions options)
{
yield return new GZipEntry(GZipFilePart.Create(stream, options.ArchiveEncoding));
yield return new GZipEntry(GZipFilePart.Create(stream, options.ArchiveEncoding), options);
}
// Async methods moved to GZipEntry.Async.cs

View File

@@ -9,7 +9,7 @@ public class GZipVolume : Volume
: base(stream, options, index) { }
public GZipVolume(FileInfo fileInfo, ReaderOptions options)
: base(fileInfo.OpenRead(), options) => options.LeaveStreamOpen = false;
: base(fileInfo.OpenRead(), options with { LeaveStreamOpen = false }) { }
public override bool IsFirstVolume => true;

View File

@@ -4,13 +4,9 @@ namespace SharpCompress.Common;
internal static class EntryExtensions
{
internal static void PreserveExtractionOptions(
this IEntry entry,
string destinationFileName,
ExtractionOptions options
)
internal static void PreserveExtractionOptions(this IEntry entry, string destinationFileName)
{
if (options.PreserveFileTime || options.PreserveAttributes)
if (entry.Options.PreserveFileTime || entry.Options.PreserveAttributes)
{
var nf = new FileInfo(destinationFileName);
if (!nf.Exists)
@@ -19,7 +15,7 @@ internal static class EntryExtensions
}
// update file time to original packed time
if (options.PreserveFileTime)
if (entry.Options.PreserveFileTime)
{
if (entry.CreatedTime.HasValue)
{
@@ -37,7 +33,7 @@ internal static class EntryExtensions
}
}
if (options.PreserveAttributes)
if (entry.Options.PreserveAttributes)
{
if (entry.Attrib.HasValue)
{

View File

@@ -1,4 +1,5 @@
using System;
using SharpCompress.Common.Options;
namespace SharpCompress.Common;
@@ -21,4 +22,9 @@ public interface IEntry
DateTime? LastModifiedTime { get; }
long Size { get; }
int? Attrib { get; }
/// <summary>
/// The options used when opening this entry's source (reader or archive).
/// </summary>
IReaderOptions Options { get; }
}

View File

@@ -0,0 +1,18 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// This file is required for init-only properties to work on older target frameworks (.NET Framework 4.8, .NET Standard 2.0)
// The IsExternalInit type is used by the compiler for records and init-only properties
#if NETFRAMEWORK || NETSTANDARD2_0
using System.ComponentModel;
namespace System.Runtime.CompilerServices;
/// <summary>
/// Reserved to be used by the compiler for tracking metadata.
/// This class should not be used by developers in source code.
/// </summary>
[EditorBrowsable(EditorBrowsableState.Never)]
internal static class IsExternalInit { }
#endif

View File

@@ -0,0 +1,6 @@
namespace SharpCompress.Common.Options;
public interface IEncodingOptions
{
IArchiveEncoding ArchiveEncoding { get; init; }
}

View File

@@ -0,0 +1,39 @@
using System;
namespace SharpCompress.Common.Options;
/// <summary>
/// Options for configuring extraction behavior when extracting archive entries to the filesystem.
/// </summary>
public interface IExtractionOptions
{
/// <summary>
/// Overwrite target if it exists.
/// <para><b>Breaking change:</b> Default changed from false to true in version 0.40.0.</para>
/// </summary>
bool Overwrite { get; init; }
/// <summary>
/// Extract with internal directory structure.
/// <para><b>Breaking change:</b> Default changed from false to true in version 0.40.0.</para>
/// </summary>
bool ExtractFullPath { get; init; }
/// <summary>
/// Preserve file time.
/// <para><b>Breaking change:</b> Default changed from false to true in version 0.40.0.</para>
/// </summary>
bool PreserveFileTime { get; init; }
/// <summary>
/// Preserve windows file attributes.
/// </summary>
bool PreserveAttributes { get; init; }
/// <summary>
/// Delegate for writing symbolic links to disk.
/// The first parameter is the source path (where the symlink is created).
/// The second parameter is the target path (what the symlink refers to).
/// </summary>
Action<string, string>? SymbolicLinkHandler { get; init; }
}

View File

@@ -0,0 +1,8 @@
using System;
namespace SharpCompress.Common.Options;
public interface IProgressOptions
{
IProgress<ProgressReport>? Progress { get; init; }
}

View File

@@ -0,0 +1,15 @@
namespace SharpCompress.Common.Options;
public interface IReaderOptions
: IStreamOptions,
IEncodingOptions,
IProgressOptions,
IExtractionOptions
{
bool LookForHeader { get; init; }
string? Password { get; init; }
bool DisableCheckIncomplete { get; init; }
int BufferSize { get; init; }
string? ExtensionHint { get; init; }
int? RewindableBufferSize { get; init; }
}

View File

@@ -0,0 +1,6 @@
namespace SharpCompress.Common.Options;
public interface IStreamOptions
{
bool LeaveStreamOpen { get; init; }
}

View File

@@ -0,0 +1,9 @@
using SharpCompress.Common;
namespace SharpCompress.Common.Options;
public interface IWriterOptions : IStreamOptions, IEncodingOptions, IProgressOptions
{
CompressionType CompressionType { get; init; }
int CompressionLevel { get; init; }
}

View File

@@ -1,11 +0,0 @@
namespace SharpCompress.Common;
public class OptionsBase
{
/// <summary>
/// SharpCompress will keep the supplied streams open. Default is true.
/// </summary>
public bool LeaveStreamOpen { get; set; } = true;
public IArchiveEncoding ArchiveEncoding { get; set; } = new ArchiveEncoding();
}

View File

@@ -1,3 +1,5 @@
#nullable disable
using System.Security.Cryptography;
using System.Text;
using SharpCompress.Common.Rar.Headers;
@@ -10,7 +12,7 @@ internal class CryptKey3 : ICryptKey
private string _password;
public CryptKey3(string? password) => _password = password ?? "";
public CryptKey3(string password) => _password = password ?? "";
public ICryptoTransform Transformer(byte[] salt)
{

View File

@@ -1,3 +1,5 @@
#nullable disable
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Rar;

View File

@@ -1,3 +1,5 @@
#nullable disable
using SharpCompress.Common.Rar;
using SharpCompress.IO;

View File

@@ -65,7 +65,7 @@ public partial class RarHeaderFactory
if (_isRar5 && _cryptInfo != null)
{
await _cryptInfo.ReadInitVAsync(new AsyncMarkingBinaryReader(stream));
var _headerKey = new CryptKey5(Options.Password.NotNull(), _cryptInfo);
var _headerKey = new CryptKey5(Options.Password!, _cryptInfo);
reader = await AsyncRarCryptoBinaryReader.Create(
stream,
@@ -189,7 +189,7 @@ public partial class RarHeaderFactory
Options.Password,
fh.Rar5CryptoInfo.NotNull()
)
: new CryptKey3(Options.Password.NotNull())
: new CryptKey3(Options.Password)
);
}
}

View File

@@ -62,7 +62,7 @@ public partial class RarHeaderFactory
if (_isRar5 && _cryptInfo != null)
{
_cryptInfo.ReadInitV(new MarkingBinaryReader(stream));
var _headerKey = new CryptKey5(Options.Password.NotNull(), _cryptInfo);
var _headerKey = new CryptKey5(Options.Password!, _cryptInfo);
reader = RarCryptoBinaryReader.Create(stream, _headerKey, _cryptInfo.Salt);
}

View File

@@ -1,4 +1,5 @@
using System;
using SharpCompress.Common.Options;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Common.Rar;
@@ -7,6 +8,9 @@ public abstract class RarEntry : Entry
{
internal abstract FileHeader FileHeader { get; }
protected RarEntry(IReaderOptions readerOptions)
: base(readerOptions) { }
/// <summary>
/// As the V2017 port isn't complete, add this check to use the legacy Rar code.
/// </summary>

View File

@@ -1,3 +1,5 @@
#nullable disable
using System.IO;
using System.Threading;
using System.Threading.Tasks;

View File

@@ -1,3 +1,5 @@
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
@@ -16,7 +18,7 @@ internal partial class ArchiveDatabase
internal List<long> _packSizes = new();
internal List<uint?> _packCrCs = new();
internal List<CFolder> _folders = new();
internal List<int>? _numUnpackStreamsVector;
internal List<int> _numUnpackStreamsVector;
internal List<CFileItem> _files = new();
internal List<long> _packStreamStartPositions = new();
@@ -45,7 +47,7 @@ internal partial class ArchiveDatabase
_packSizes.Count == 0
&& _packCrCs.Count == 0
&& _folders.Count == 0
&& (_numUnpackStreamsVector?.Count ?? 0) == 0
&& _numUnpackStreamsVector.Count == 0
&& _files.Count == 0;
private void FillStartPos()
@@ -92,7 +94,7 @@ internal partial class ArchiveDatabase
_folderStartFileIndex.Add(i); // check it
if (_numUnpackStreamsVector.NotNull()[folderIndex] != 0)
if (_numUnpackStreamsVector![folderIndex] != 0)
{
break;
}

View File

@@ -1,9 +1,11 @@
#nullable disable
namespace SharpCompress.Common.SevenZip;
internal class CCoderInfo
{
internal CMethodId _methodId;
internal byte[]? _props;
internal byte[] _props;
internal int _numInStreams;
internal int _numOutStreams;
}

View File

@@ -1,3 +1,5 @@
#nullable disable
using System;
namespace SharpCompress.Common.SevenZip;
@@ -8,7 +10,7 @@ internal class CFileItem
public uint? Attrib { get; internal set; }
public uint? ExtendedAttrib { get; internal set; }
public uint? Crc { get; internal set; }
public string? Name { get; internal set; }
public string Name { get; internal set; }
public bool HasStream { get; internal set; }
public bool IsDir { get; internal set; }

View File

@@ -1,11 +1,16 @@
using System;
using System;
using System.Collections.Generic;
using SharpCompress.Common.Options;
namespace SharpCompress.Common.SevenZip;
public class SevenZipEntry : Entry
{
internal SevenZipEntry(SevenZipFilePart filePart) => FilePart = filePart;
internal SevenZipEntry(SevenZipFilePart filePart, IReaderOptions readerOptions)
: base(readerOptions)
{
FilePart = filePart;
}
internal SevenZipFilePart FilePart { get; }

View File

@@ -34,7 +34,7 @@ internal class SevenZipFilePart : FilePart
internal CFileItem Header { get; }
internal CFolder? Folder { get; }
internal override string? FilePartName => Header.Name;
internal override string FilePartName => Header.Name;
internal override Stream? GetRawStream() => null;

View File

@@ -1,5 +1,6 @@
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Options;
using SharpCompress.IO;
namespace SharpCompress.Common.Tar;
@@ -10,7 +11,8 @@ public partial class TarEntry
StreamingMode mode,
Stream stream,
CompressionType compressionType,
IArchiveEncoding archiveEncoding
IArchiveEncoding archiveEncoding,
IReaderOptions readerOptions
)
{
await foreach (
@@ -21,11 +23,19 @@ public partial class TarEntry
{
if (mode == StreamingMode.Seekable)
{
yield return new TarEntry(new TarFilePart(header, stream), compressionType);
yield return new TarEntry(
new TarFilePart(header, stream),
compressionType,
readerOptions
);
}
else
{
yield return new TarEntry(new TarFilePart(header, null), compressionType);
yield return new TarEntry(
new TarFilePart(header, null),
compressionType,
readerOptions
);
}
}
else

View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Options;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
@@ -10,7 +11,8 @@ public partial class TarEntry : Entry
{
private readonly TarFilePart? _filePart;
internal TarEntry(TarFilePart? filePart, CompressionType type)
internal TarEntry(TarFilePart? filePart, CompressionType type, IReaderOptions readerOptions)
: base(readerOptions)
{
_filePart = filePart;
CompressionType = type;
@@ -54,7 +56,8 @@ public partial class TarEntry : Entry
StreamingMode mode,
Stream stream,
CompressionType compressionType,
IArchiveEncoding archiveEncoding
IArchiveEncoding archiveEncoding,
IReaderOptions readerOptions
)
{
foreach (var header in TarHeaderFactory.ReadHeader(mode, stream, archiveEncoding))
@@ -63,11 +66,19 @@ public partial class TarEntry : Entry
{
if (mode == StreamingMode.Seekable)
{
yield return new TarEntry(new TarFilePart(header, stream), compressionType);
yield return new TarEntry(
new TarFilePart(header, stream),
compressionType,
readerOptions
);
}
else
{
yield return new TarEntry(new TarFilePart(header, null), compressionType);
yield return new TarEntry(
new TarFilePart(header, null),
compressionType,
readerOptions
);
}
}
else

View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.Linq;
using SharpCompress.Common.Options;
using SharpCompress.Common.Zip.Headers;
namespace SharpCompress.Common.Zip;
@@ -9,7 +10,8 @@ public class ZipEntry : Entry
{
private readonly ZipFilePart? _filePart;
internal ZipEntry(ZipFilePart? filePart)
internal ZipEntry(ZipFilePart? filePart, IReaderOptions readerOptions)
: base(readerOptions)
{
if (filePart == null)
{

View File

@@ -24,6 +24,8 @@
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
#nullable disable
using System;
using System.IO;
using System.Threading;
@@ -76,12 +78,11 @@ public sealed partial class ADCStream
var toCopy = count;
var copied = 0;
var outBuf = _outBuffer.NotNull();
while (_outPosition + toCopy >= outBuf.Length)
while (_outPosition + toCopy >= _outBuffer.Length)
{
cancellationToken.ThrowIfCancellationRequested();
var piece = outBuf.Length - _outPosition;
Array.Copy(outBuf, _outPosition, buffer, inPosition, piece);
var piece = _outBuffer.Length - _outPosition;
Array.Copy(_outBuffer, _outPosition, buffer, inPosition, piece);
inPosition += piece;
copied += piece;
_position += piece;
@@ -96,10 +97,9 @@ public sealed partial class ADCStream
{
return copied;
}
outBuf = _outBuffer;
}
Array.Copy(outBuf, _outPosition, buffer, inPosition, toCopy);
Array.Copy(_outBuffer, _outPosition, buffer, inPosition, toCopy);
_outPosition += toCopy;
_position += toCopy;
copied += toCopy;

View File

@@ -24,6 +24,8 @@
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
#nullable disable
using System;
using System.IO;
using System.Threading;
@@ -54,7 +56,7 @@ public sealed partial class ADCStream : Stream
/// <summary>
/// Buffer with currently used chunk of decompressed data
/// </summary>
private byte[]? _outBuffer;
private byte[] _outBuffer;
/// <summary>
/// Position in buffer of decompressed data
@@ -137,11 +139,10 @@ public sealed partial class ADCStream : Stream
var toCopy = count;
var copied = 0;
while (_outPosition + toCopy >= _outBuffer.NotNull().Length)
while (_outPosition + toCopy >= _outBuffer.Length)
{
var outBuf = _outBuffer.NotNull();
var piece = outBuf.Length - _outPosition;
Array.Copy(outBuf, _outPosition, buffer, inPosition, piece);
var piece = _outBuffer.Length - _outPosition;
Array.Copy(_outBuffer, _outPosition, buffer, inPosition, piece);
inPosition += piece;
copied += piece;
_position += piece;
@@ -154,7 +155,7 @@ public sealed partial class ADCStream : Stream
}
}
Array.Copy(_outBuffer.NotNull(), _outPosition, buffer, inPosition, toCopy);
Array.Copy(_outBuffer, _outPosition, buffer, inPosition, toCopy);
_outPosition += toCopy;
_position += toCopy;
copied += toCopy;

View File

@@ -1,5 +1,3 @@
#nullable disable
using System;
using System.IO;
using System.Threading;

View File

@@ -1,3 +1,5 @@
#nullable disable
using System;
using System.IO;
@@ -9,8 +11,8 @@ internal sealed class BinTree : InWindow
private uint _cyclicBufferSize;
private uint _matchMaxLen;
private uint[]? _son;
private uint[]? _hash;
private uint[] _son;
private uint[] _hash;
private uint _cutValue = 0xFF;
private uint _hashMask;
@@ -54,10 +56,9 @@ internal sealed class BinTree : InWindow
public new void Init()
{
base.Init();
var hash = _hash.NotNull();
for (uint i = 0; i < _hashSizeSum; i++)
{
hash[i] = K_EMPTY_HASH_VALUE;
_hash[i] = K_EMPTY_HASH_VALUE;
}
_cyclicBufferPos = 0;
ReduceOffsets(-1);
@@ -140,8 +141,6 @@ internal sealed class BinTree : InWindow
public uint GetMatches(uint[] distances)
{
var son = _son.NotNull();
var hash = _hash.NotNull();
uint lenLimit;
if (_pos + _matchMaxLen <= _streamPos)
{
@@ -165,27 +164,26 @@ internal sealed class BinTree : InWindow
hash2Value = 0,
hash3Value = 0;
var bufferBase = _bufferBase.NotNull();
if (_hashArray)
{
var temp = Crc.TABLE[bufferBase[cur]] ^ bufferBase[cur + 1];
var temp = Crc.TABLE[_bufferBase[cur]] ^ _bufferBase[cur + 1];
hash2Value = temp & (K_HASH2_SIZE - 1);
temp ^= ((uint)(bufferBase[cur + 2]) << 8);
temp ^= ((uint)(_bufferBase[cur + 2]) << 8);
hash3Value = temp & (K_HASH3_SIZE - 1);
hashValue = (temp ^ (Crc.TABLE[bufferBase[cur + 3]] << 5)) & _hashMask;
hashValue = (temp ^ (Crc.TABLE[_bufferBase[cur + 3]] << 5)) & _hashMask;
}
else
{
hashValue = bufferBase[cur] ^ ((uint)(bufferBase[cur + 1]) << 8);
hashValue = _bufferBase[cur] ^ ((uint)(_bufferBase[cur + 1]) << 8);
}
var curMatch = hash[_kFixHashSize + hashValue];
var curMatch = _hash[_kFixHashSize + hashValue];
if (_hashArray)
{
var curMatch2 = hash[hash2Value];
var curMatch3 = hash[K_HASH3_OFFSET + hash3Value];
hash[hash2Value] = _pos;
hash[K_HASH3_OFFSET + hash3Value] = _pos;
var curMatch2 = _hash[hash2Value];
var curMatch3 = _hash[K_HASH3_OFFSET + hash3Value];
_hash[hash2Value] = _pos;
_hash[K_HASH3_OFFSET + hash3Value] = _pos;
if (curMatch2 > matchMinPos)
{
if (_bufferBase[_bufferOffset + curMatch2] == _bufferBase[cur])
@@ -214,7 +212,7 @@ internal sealed class BinTree : InWindow
}
}
hash[_kFixHashSize + hashValue] = _pos;
_hash[_kFixHashSize + hashValue] = _pos;
var ptr0 = (_cyclicBufferPos << 1) + 1;
var ptr1 = (_cyclicBufferPos << 1);
@@ -244,7 +242,7 @@ internal sealed class BinTree : InWindow
{
if (curMatch <= matchMinPos || count-- == 0)
{
son[ptr0] = son[ptr1] = K_EMPTY_HASH_VALUE;
_son[ptr0] = _son[ptr1] = K_EMPTY_HASH_VALUE;
break;
}
var delta = _pos - curMatch;
@@ -272,24 +270,24 @@ internal sealed class BinTree : InWindow
distances[offset++] = delta - 1;
if (len == lenLimit)
{
son[ptr1] = son[cyclicPos];
son[ptr0] = son[cyclicPos + 1];
_son[ptr1] = _son[cyclicPos];
_son[ptr0] = _son[cyclicPos + 1];
break;
}
}
}
if (_bufferBase[pby1 + len] < _bufferBase[cur + len])
{
son[ptr1] = curMatch;
_son[ptr1] = curMatch;
ptr1 = cyclicPos + 1;
curMatch = son[ptr1];
curMatch = _son[ptr1];
len1 = len;
}
else
{
son[ptr0] = curMatch;
_son[ptr0] = curMatch;
ptr0 = cyclicPos;
curMatch = son[ptr0];
curMatch = _son[ptr0];
len0 = len;
}
}
@@ -299,8 +297,6 @@ internal sealed class BinTree : InWindow
public void Skip(uint num)
{
var son = _son.NotNull();
var hash = _hash.NotNull();
do
{
uint lenLimit;
@@ -327,10 +323,10 @@ internal sealed class BinTree : InWindow
{
var temp = Crc.TABLE[_bufferBase[cur]] ^ _bufferBase[cur + 1];
var hash2Value = temp & (K_HASH2_SIZE - 1);
hash[hash2Value] = _pos;
_hash[hash2Value] = _pos;
temp ^= ((uint)(_bufferBase[cur + 2]) << 8);
var hash3Value = temp & (K_HASH3_SIZE - 1);
hash[K_HASH3_OFFSET + hash3Value] = _pos;
_hash[K_HASH3_OFFSET + hash3Value] = _pos;
hashValue = (temp ^ (Crc.TABLE[_bufferBase[cur + 3]] << 5)) & _hashMask;
}
else
@@ -338,8 +334,8 @@ internal sealed class BinTree : InWindow
hashValue = _bufferBase[cur] ^ ((uint)(_bufferBase[cur + 1]) << 8);
}
var curMatch = hash[_kFixHashSize + hashValue];
hash[_kFixHashSize + hashValue] = _pos;
var curMatch = _hash[_kFixHashSize + hashValue];
_hash[_kFixHashSize + hashValue] = _pos;
var ptr0 = (_cyclicBufferPos << 1) + 1;
var ptr1 = (_cyclicBufferPos << 1);
@@ -353,7 +349,7 @@ internal sealed class BinTree : InWindow
{
if (curMatch <= matchMinPos || count-- == 0)
{
son[ptr0] = son[ptr1] = K_EMPTY_HASH_VALUE;
_son[ptr0] = _son[ptr1] = K_EMPTY_HASH_VALUE;
break;
}
@@ -378,23 +374,23 @@ internal sealed class BinTree : InWindow
}
if (len == lenLimit)
{
son[ptr1] = son[cyclicPos];
son[ptr0] = son[cyclicPos + 1];
_son[ptr1] = _son[cyclicPos];
_son[ptr0] = _son[cyclicPos + 1];
break;
}
}
if (_bufferBase[pby1 + len] < _bufferBase[cur + len])
{
son[ptr1] = curMatch;
_son[ptr1] = curMatch;
ptr1 = cyclicPos + 1;
curMatch = son[ptr1];
curMatch = _son[ptr1];
len1 = len;
}
else
{
son[ptr0] = curMatch;
_son[ptr0] = curMatch;
ptr0 = cyclicPos;
curMatch = son[ptr0];
curMatch = _son[ptr0];
len0 = len;
}
}
@@ -422,8 +418,8 @@ internal sealed class BinTree : InWindow
private void Normalize()
{
var subValue = _pos - _cyclicBufferSize;
NormalizeLinks(_son.NotNull(), _cyclicBufferSize * 2, subValue);
NormalizeLinks(_hash.NotNull(), _hashSizeSum, subValue);
NormalizeLinks(_son, _cyclicBufferSize * 2, subValue);
NormalizeLinks(_hash, _hashSizeSum, subValue);
ReduceOffsets((int)subValue);
}

View File

@@ -1,3 +1,5 @@
#nullable disable
using System;
using System.IO;
using System.Threading;
@@ -23,7 +25,6 @@ public partial class Decoder : ICoder, ISetDecoderProperties
)
{
return await _lowCoder[posState]
.NotNull()
.DecodeAsync(rangeDecoder, cancellationToken)
.ConfigureAwait(false);
}
@@ -34,7 +35,6 @@ public partial class Decoder : ICoder, ISetDecoderProperties
)
{
symbol += await _midCoder[posState]
.NotNull()
.DecodeAsync(rangeDecoder, cancellationToken)
.ConfigureAwait(false);
}
@@ -108,8 +108,7 @@ public partial class Decoder : ICoder, ISetDecoderProperties
byte prevByte,
CancellationToken cancellationToken = default
) =>
await _coders
.NotNull()[GetState(pos, prevByte)]
await _coders[GetState(pos, prevByte)]
.DecodeNormalAsync(rangeDecoder, cancellationToken)
.ConfigureAwait(false);
@@ -120,8 +119,7 @@ public partial class Decoder : ICoder, ISetDecoderProperties
byte matchByte,
CancellationToken cancellationToken = default
) =>
await _coders
.NotNull()[GetState(pos, prevByte)]
await _coders[GetState(pos, prevByte)]
.DecodeWithMatchByteAsync(rangeDecoder, matchByte, cancellationToken)
.ConfigureAwait(false);
}
@@ -139,26 +137,26 @@ public partial class Decoder : ICoder, ISetDecoderProperties
{
CreateDictionary();
}
await _outWindow.NotNull().InitAsync(outStream);
await _outWindow.InitAsync(outStream);
if (outSize > 0)
{
_outWindow.NotNull().SetLimit(outSize);
_outWindow.SetLimit(outSize);
}
else
{
_outWindow.NotNull().SetLimit(long.MaxValue - _outWindow.NotNull().Total);
_outWindow.SetLimit(long.MaxValue - _outWindow.Total);
}
var rangeDecoder = new RangeCoder.Decoder();
await rangeDecoder.InitAsync(inStream, cancellationToken).ConfigureAwait(false);
await CodeAsync(_dictionarySize, _outWindow.NotNull(), rangeDecoder, cancellationToken)
await CodeAsync(_dictionarySize, _outWindow, rangeDecoder, cancellationToken)
.ConfigureAwait(false);
await _outWindow.NotNull().ReleaseStreamAsync(cancellationToken).ConfigureAwait(false);
await _outWindow.ReleaseStreamAsync(cancellationToken).ConfigureAwait(false);
rangeDecoder.ReleaseStream();
await _outWindow.NotNull().DisposeAsync().ConfigureAwait(false);
await _outWindow.DisposeAsync().ConfigureAwait(false);
_outWindow = null;
}
@@ -341,6 +339,6 @@ public partial class Decoder : ICoder, ISetDecoderProperties
{
CreateDictionary();
}
await _outWindow.NotNull().TrainAsync(stream);
await _outWindow.TrainAsync(stream);
}
}

View File

@@ -1,3 +1,5 @@
#nullable disable
using System;
using System.Diagnostics.CodeAnalysis;
using System.IO;
@@ -13,12 +15,8 @@ public partial class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Strea
{
private BitDecoder _choice = new();
private BitDecoder _choice2 = new();
private readonly BitTreeDecoder?[] _lowCoder = new BitTreeDecoder?[
Base.K_NUM_POS_STATES_MAX
];
private readonly BitTreeDecoder?[] _midCoder = new BitTreeDecoder?[
Base.K_NUM_POS_STATES_MAX
];
private readonly BitTreeDecoder[] _lowCoder = new BitTreeDecoder[Base.K_NUM_POS_STATES_MAX];
private readonly BitTreeDecoder[] _midCoder = new BitTreeDecoder[Base.K_NUM_POS_STATES_MAX];
private BitTreeDecoder _highCoder = new(Base.K_NUM_HIGH_LEN_BITS);
private uint _numPosStates;
@@ -37,8 +35,8 @@ public partial class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Strea
_choice.Init();
for (uint posState = 0; posState < _numPosStates; posState++)
{
_lowCoder[posState].NotNull().Init();
_midCoder[posState].NotNull().Init();
_lowCoder[posState].Init();
_midCoder[posState].Init();
}
_choice2.Init();
_highCoder.Init();
@@ -48,12 +46,12 @@ public partial class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Strea
{
if (_choice.Decode(rangeDecoder) == 0)
{
return _lowCoder[posState].NotNull().Decode(rangeDecoder);
return _lowCoder[posState].Decode(rangeDecoder);
}
var symbol = Base.K_NUM_LOW_LEN_SYMBOLS;
if (_choice2.Decode(rangeDecoder) == 0)
{
symbol += _midCoder[posState].NotNull().Decode(rangeDecoder);
symbol += _midCoder[posState].Decode(rangeDecoder);
}
else
{
@@ -112,7 +110,7 @@ public partial class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Strea
}
}
private Decoder2[]? _coders;
private Decoder2[] _coders;
private int _numPrevBits;
private int _numPosBits;
private uint _posMask;
@@ -136,11 +134,10 @@ public partial class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Strea
public void Init()
{
var coders = _coders.NotNull();
var numStates = (uint)1 << (_numPrevBits + _numPosBits);
for (uint i = 0; i < numStates; i++)
{
coders[i].Init();
_coders[i].Init();
}
}
@@ -148,18 +145,17 @@ public partial class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Strea
((pos & _posMask) << _numPrevBits) + (uint)(prevByte >> (8 - _numPrevBits));
public byte DecodeNormal(RangeCoder.Decoder rangeDecoder, uint pos, byte prevByte) =>
_coders.NotNull()[GetState(pos, prevByte)].DecodeNormal(rangeDecoder);
_coders[GetState(pos, prevByte)].DecodeNormal(rangeDecoder);
public byte DecodeWithMatchByte(
RangeCoder.Decoder rangeDecoder,
uint pos,
byte prevByte,
byte matchByte
) =>
_coders.NotNull()[GetState(pos, prevByte)].DecodeWithMatchByte(rangeDecoder, matchByte);
) => _coders[GetState(pos, prevByte)].DecodeWithMatchByte(rangeDecoder, matchByte);
}
private OutWindow? _outWindow;
private OutWindow _outWindow;
private readonly BitDecoder[] _isMatchDecoders = new BitDecoder[
Base.K_NUM_STATES << Base.K_NUM_POS_STATES_BITS_MAX
@@ -289,32 +285,32 @@ public partial class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Strea
Stream outStream,
long inSize,
long outSize,
ICodeProgress? progress
ICodeProgress progress
)
{
if (_outWindow is null)
{
CreateDictionary();
}
_outWindow.NotNull().Init(outStream);
_outWindow.Init(outStream);
if (outSize > 0)
{
_outWindow.NotNull().SetLimit(outSize);
_outWindow.SetLimit(outSize);
}
else
{
_outWindow.NotNull().SetLimit(long.MaxValue - _outWindow.NotNull().Total);
_outWindow.SetLimit(long.MaxValue - _outWindow.Total);
}
var rangeDecoder = new RangeCoder.Decoder();
rangeDecoder.Init(inStream);
Code(_dictionarySize, _outWindow.NotNull(), rangeDecoder);
Code(_dictionarySize, _outWindow, rangeDecoder);
_outWindow.NotNull().ReleaseStream();
_outWindow.ReleaseStream();
rangeDecoder.ReleaseStream();
_outWindow.NotNull().Dispose();
_outWindow.Dispose();
_outWindow = null;
}
@@ -477,6 +473,6 @@ public partial class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Strea
{
CreateDictionary();
}
_outWindow.NotNull().Train(stream);
_outWindow.Train(stream);
}
}

View File

@@ -147,7 +147,7 @@ public partial class LzmaStream
_decoder.SetDecoderProperties(Properties);
}
await _rangeDecoder.InitAsync(_inputStream.NotNull(), cancellationToken);
await _rangeDecoder.InitAsync(_inputStream, cancellationToken);
}
else if (control > 0x02)
{

View File

@@ -1,3 +1,5 @@
#nullable disable
using System.IO;
using System.Threading;
using System.Threading.Tasks;
@@ -15,7 +17,7 @@ internal partial class Encoder
{
var b = (byte)(temp + (_low >> 32));
var buffer = new[] { b };
await Stream.WriteAsync(buffer, 0, 1, cancellationToken).ConfigureAwait(false);
await _stream.WriteAsync(buffer, 0, 1, cancellationToken).ConfigureAwait(false);
temp = 0xFF;
} while (--_cacheSize != 0);
_cache = (byte)(((uint)_low) >> 24);
@@ -70,7 +72,7 @@ internal partial class Encoder
}
public async ValueTask FlushStreamAsync(CancellationToken cancellationToken = default) =>
await Stream.FlushAsync(cancellationToken).ConfigureAwait(false);
await _stream.FlushAsync(cancellationToken).ConfigureAwait(false);
}
internal partial class Decoder
@@ -101,7 +103,7 @@ internal partial class Decoder
while (_range < K_TOP_VALUE)
{
var buffer = new byte[1];
var read = await Stream
var read = await _stream
.ReadAsync(buffer, 0, 1, cancellationToken)
.ConfigureAwait(false);
if (read == 0)
@@ -119,7 +121,7 @@ internal partial class Decoder
if (_range < K_TOP_VALUE)
{
var buffer = new byte[1];
var read = await Stream
var read = await _stream
.ReadAsync(buffer, 0, 1, cancellationToken)
.ConfigureAwait(false);
if (read == 0)
@@ -150,7 +152,7 @@ internal partial class Decoder
if (range < K_TOP_VALUE)
{
var read = await Stream
var read = await _stream
.ReadAsync(buffer, 0, 1, cancellationToken)
.ConfigureAwait(false);
if (read == 0)

View File

@@ -1,3 +1,5 @@
#nullable disable
using System.IO;
using System.Runtime.CompilerServices;
@@ -7,7 +9,7 @@ internal partial class Encoder
{
public const uint K_TOP_VALUE = (1 << 24);
private Stream? _stream;
private Stream _stream;
public ulong _low;
public uint _range;
@@ -20,8 +22,6 @@ internal partial class Encoder
public void ReleaseStream() => _stream = null;
private Stream Stream => _stream.NotNull();
public void Init()
{
//StartPosition = Stream.Position;
@@ -40,9 +40,9 @@ internal partial class Encoder
}
}
public void FlushStream() => Stream.Flush();
public void FlushStream() => _stream.Flush();
public void CloseStream() => Stream.Dispose();
public void CloseStream() => _stream.Dispose();
public void ShiftLow()
{
@@ -51,7 +51,7 @@ internal partial class Encoder
var temp = _cache;
do
{
Stream.WriteByte((byte)(temp + (_low >> 32)));
_stream.WriteByte((byte)(temp + (_low >> 32)));
temp = 0xFF;
} while (--_cacheSize != 0);
_cache = (byte)(((uint)_low) >> 24);
@@ -86,7 +86,7 @@ internal partial class Decoder
public uint _range;
public uint _code;
public Stream? _stream;
public Stream _stream;
public long _total;
public void Init(Stream stream)
@@ -97,7 +97,7 @@ internal partial class Decoder
_range = 0xFFFFFFFF;
for (var i = 0; i < 5; i++)
{
_code = (_code << 8) | (byte)stream.ReadByte();
_code = (_code << 8) | (byte)_stream.ReadByte();
}
_total = 5;
}
@@ -106,13 +106,11 @@ internal partial class Decoder
// Stream.ReleaseStream();
_stream = null;
private Stream Stream => _stream.NotNull();
public void Normalize()
{
while (_range < K_TOP_VALUE)
{
_code = (_code << 8) | (byte)Stream.ReadByte();
_code = (_code << 8) | (byte)_stream.ReadByte();
_range <<= 8;
_total++;
}
@@ -123,7 +121,7 @@ internal partial class Decoder
{
if (_range < K_TOP_VALUE)
{
_code = (_code << 8) | (byte)Stream.ReadByte();
_code = (_code << 8) | (byte)_stream.ReadByte();
_range <<= 8;
_total++;
}
@@ -152,7 +150,7 @@ internal partial class Decoder
if (range < K_TOP_VALUE)
{
code = (code << 8) | (byte)Stream.ReadByte();
code = (code << 8) | (byte)_stream.ReadByte();
range <<= 8;
_total++;
}

View File

@@ -1,3 +1,5 @@
#nullable disable
using System.Threading;
using System.Threading.Tasks;

View File

@@ -1,3 +1,5 @@
#nullable disable
using System.Threading;
using System.Threading.Tasks;

View File

@@ -1,3 +1,5 @@
#nullable disable
using System;
using System.IO;
using System.Threading;
@@ -13,9 +15,9 @@ public class PpmdStream : Stream
private readonly PpmdProperties _properties;
private readonly Stream _stream;
private readonly bool _compress;
private Model? _model;
private ModelPpm? _modelH;
private Decoder? _decoder;
private Model _model;
private ModelPpm _modelH;
private Decoder _decoder;
private long _position;
private bool _isDisposed;
@@ -176,7 +178,7 @@ public class PpmdStream : Stream
{
if (_compress)
{
_model.NotNull().EncodeBlock(_stream, new MemoryStream(), true);
_model.EncodeBlock(_stream, new MemoryStream(), true);
}
}
base.Dispose(isDisposing);
@@ -199,12 +201,12 @@ public class PpmdStream : Stream
var size = 0;
if (_properties.Version == PpmdVersion.I1)
{
size = _model.NotNull().DecodeBlock(_stream, buffer, offset, count);
size = _model.DecodeBlock(_stream, buffer, offset, count);
}
if (_properties.Version == PpmdVersion.H)
{
int c;
while (size < count && (c = _modelH.NotNull().DecodeChar()) >= 0)
while (size < count && (c = _modelH.DecodeChar()) >= 0)
{
buffer[offset++] = (byte)c;
size++;
@@ -213,7 +215,7 @@ public class PpmdStream : Stream
if (_properties.Version == PpmdVersion.H7Z)
{
int c;
while (size < count && (c = _modelH.NotNull().DecodeChar(_decoder.NotNull())) >= 0)
while (size < count && (c = _modelH.DecodeChar(_decoder)) >= 0)
{
buffer[offset++] = (byte)c;
size++;
@@ -245,7 +247,6 @@ public class PpmdStream : Stream
if (_properties.Version == PpmdVersion.I1)
{
size = await _model
.NotNull()
.DecodeBlockAsync(_stream, buffer, offset, count, cancellationToken)
.ConfigureAwait(false);
}
@@ -254,12 +255,7 @@ public class PpmdStream : Stream
int c;
while (
size < count
&& (
c = await _modelH
.NotNull()
.DecodeCharAsync(cancellationToken)
.ConfigureAwait(false)
) >= 0
&& (c = await _modelH.DecodeCharAsync(cancellationToken).ConfigureAwait(false)) >= 0
)
{
buffer[offset++] = (byte)c;
@@ -273,8 +269,7 @@ public class PpmdStream : Stream
size < count
&& (
c = await _modelH
.NotNull()
.DecodeCharAsync(_decoder.NotNull(), cancellationToken)
.DecodeCharAsync(_decoder, cancellationToken)
.ConfigureAwait(false)
) >= 0
)
@@ -309,7 +304,6 @@ public class PpmdStream : Stream
// Need to use a temporary buffer since DecodeBlockAsync works with byte[]
var tempBuffer = new byte[count];
size = await _model
.NotNull()
.DecodeBlockAsync(_stream, tempBuffer, 0, count, cancellationToken)
.ConfigureAwait(false);
tempBuffer.AsMemory(0, size).CopyTo(buffer);
@@ -319,12 +313,7 @@ public class PpmdStream : Stream
int c;
while (
size < count
&& (
c = await _modelH
.NotNull()
.DecodeCharAsync(cancellationToken)
.ConfigureAwait(false)
) >= 0
&& (c = await _modelH.DecodeCharAsync(cancellationToken).ConfigureAwait(false)) >= 0
)
{
buffer.Span[offset++] = (byte)c;
@@ -338,8 +327,7 @@ public class PpmdStream : Stream
size < count
&& (
c = await _modelH
.NotNull()
.DecodeCharAsync(_decoder.NotNull(), cancellationToken)
.DecodeCharAsync(_decoder, cancellationToken)
.ConfigureAwait(false)
) >= 0
)
@@ -357,7 +345,7 @@ public class PpmdStream : Stream
{
if (_compress)
{
_model.NotNull().EncodeBlock(_stream, new MemoryStream(buffer, offset, count), false);
_model.EncodeBlock(_stream, new MemoryStream(buffer, offset, count), false);
}
}
}

View File

@@ -1,3 +1,5 @@
#nullable disable
using System;
using System.Buffers;
using System.IO;
@@ -45,7 +47,7 @@ internal partial class RarStream
if (tmpCount > 0)
{
var toCopy = tmpCount < count ? tmpCount : count;
Buffer.BlockCopy(tmpBuffer.NotNull(), tmpOffset, buffer, offset, toCopy);
Buffer.BlockCopy(tmpBuffer, tmpOffset, buffer, offset, toCopy);
tmpOffset += toCopy;
tmpCount -= toCopy;
offset += toCopy;

View File

@@ -1,3 +1,5 @@
#nullable disable
using System;
using System.Buffers;
using System.IO;
@@ -13,11 +15,11 @@ internal partial class RarStream : Stream
private bool fetch;
private byte[]? tmpBuffer = ArrayPool<byte>.Shared.Rent(65536);
private byte[] tmpBuffer = ArrayPool<byte>.Shared.Rent(65536);
private int tmpOffset;
private int tmpCount;
private byte[]? outBuffer;
private byte[] outBuffer;
private int outOffset;
private int outCount;
private int outTotal;
@@ -45,11 +47,8 @@ internal partial class RarStream : Stream
{
if (disposing)
{
if (this.tmpBuffer is not null)
{
ArrayPool<byte>.Shared.Return(this.tmpBuffer);
this.tmpBuffer = null;
}
ArrayPool<byte>.Shared.Return(this.tmpBuffer);
this.tmpBuffer = null;
readStream.Dispose();
}
isDisposed = true;
@@ -80,7 +79,7 @@ internal partial class RarStream : Stream
if (tmpCount > 0)
{
var toCopy = tmpCount < count ? tmpCount : count;
Buffer.BlockCopy(tmpBuffer.NotNull(), tmpOffset, buffer, offset, toCopy);
Buffer.BlockCopy(tmpBuffer, tmpOffset, buffer, offset, toCopy);
tmpOffset += toCopy;
tmpCount -= toCopy;
offset += toCopy;
@@ -120,7 +119,7 @@ internal partial class RarStream : Stream
if (outCount > 0)
{
var toCopy = outCount < count ? outCount : count;
Buffer.BlockCopy(buffer, offset, outBuffer.NotNull(), outOffset, toCopy);
Buffer.BlockCopy(buffer, offset, outBuffer, outOffset, toCopy);
outOffset += toCopy;
outCount -= toCopy;
offset += toCopy;
@@ -130,7 +129,7 @@ internal partial class RarStream : Stream
if (count > 0)
{
EnsureBufferCapacity(count);
Buffer.BlockCopy(buffer, offset, tmpBuffer.NotNull(), tmpCount, count);
Buffer.BlockCopy(buffer, offset, tmpBuffer, tmpCount, count);
tmpCount += count;
tmpOffset = 0;
unpack.Suspended = true;
@@ -143,16 +142,15 @@ internal partial class RarStream : Stream
private void EnsureBufferCapacity(int count)
{
var buffer = this.tmpBuffer.NotNull();
if (buffer.Length < this.tmpCount + count)
if (this.tmpBuffer.Length < this.tmpCount + count)
{
var newLength =
buffer.Length * 2 > this.tmpCount + count
? buffer.Length * 2
this.tmpBuffer.Length * 2 > this.tmpCount + count
? this.tmpBuffer.Length * 2
: this.tmpCount + count;
var newBuffer = ArrayPool<byte>.Shared.Rent(newLength);
Buffer.BlockCopy(buffer, 0, newBuffer, 0, this.tmpCount);
var oldBuffer = buffer;
Buffer.BlockCopy(this.tmpBuffer, 0, newBuffer, 0, this.tmpCount);
var oldBuffer = this.tmpBuffer;
this.tmpBuffer = newBuffer;
ArrayPool<byte>.Shared.Return(oldBuffer);
}

View File

@@ -1,3 +1,5 @@
#nullable disable
using System;
namespace SharpCompress.Compressors.Xz;
@@ -8,7 +10,7 @@ public static class Crc32
public const uint DefaultPolynomial = 0xedb88320u;
public const uint DefaultSeed = 0xffffffffu;
private static uint[]? defaultTable;
private static uint[] defaultTable;
public static uint Compute(byte[] buffer) => Compute(DefaultSeed, buffer);

View File

@@ -1,3 +1,5 @@
#nullable disable
using System;
namespace SharpCompress.Compressors.Xz;
@@ -7,7 +9,7 @@ public static class Crc64
{
public const ulong DefaultSeed = 0x0;
internal static ulong[]? Table;
internal static ulong[] Table;
public const ulong Iso3309Polynomial = 0xD800000000000000;

View File

@@ -1,3 +1,5 @@
#nullable disable
using System;
using System.IO;
using System.Threading;
@@ -34,7 +36,7 @@ public sealed partial class XZStream : XZReadOnlyStream
private void AssertBlockCheckTypeIsSupported()
{
switch (Header.NotNull().BlockCheckType)
switch (Header.BlockCheckType)
{
case CheckType.NONE:
case CheckType.CRC32:
@@ -47,11 +49,11 @@ public sealed partial class XZStream : XZReadOnlyStream
}
private readonly Stream _baseStream;
public XZHeader? Header { get; private set; }
public XZIndex? Index { get; private set; }
public XZFooter? Footer { get; private set; }
public XZHeader Header { get; private set; }
public XZIndex Index { get; private set; }
public XZFooter Footer { get; private set; }
public bool HeaderIsRead { get; private set; }
private XZBlock? _currentBlock;
private XZBlock _currentBlock;
private bool _endOfStream;
@@ -111,7 +113,7 @@ public sealed partial class XZStream : XZReadOnlyStream
var remaining = count - bytesRead;
var newOffset = offset + bytesRead;
var justRead = _currentBlock.NotNull().Read(buffer, newOffset, remaining);
var justRead = _currentBlock.Read(buffer, newOffset, remaining);
if (justRead < remaining)
{
NextBlock();
@@ -128,9 +130,5 @@ public sealed partial class XZStream : XZReadOnlyStream
}
private void NextBlock() =>
_currentBlock = new XZBlock(
BaseStream,
Header.NotNull().BlockCheckType,
Header.NotNull().BlockCheckSize
);
_currentBlock = new XZBlock(BaseStream, Header.BlockCheckType, Header.BlockCheckSize);
}

View File

@@ -1,3 +1,5 @@
#nullable disable
using System;
using System.Security.Cryptography;

View File

@@ -1,3 +1,5 @@
#nullable disable
using System;
using System.IO;
@@ -13,7 +15,7 @@ public sealed class Crc32Stream : Stream
public const uint DEFAULT_POLYNOMIAL = 0xedb88320u;
public const uint DEFAULT_SEED = 0xffffffffu;
private static uint[]? _defaultTable;
private static uint[] _defaultTable;
public Crc32Stream(Stream stream)
: this(stream, DEFAULT_POLYNOMIAL, DEFAULT_SEED) { }

View File

@@ -1,3 +1,4 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
@@ -7,6 +8,7 @@ using SharpCompress.Archives;
using SharpCompress.Archives.GZip;
using SharpCompress.Archives.Tar;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.GZip;
@@ -156,27 +158,33 @@ public class GZipFactory
#region IWriterFactory
/// <inheritdoc/>
public IWriter OpenWriter(Stream stream, WriterOptions writerOptions)
public IWriter OpenWriter(Stream stream, IWriterOptions writerOptions)
{
if (writerOptions.CompressionType != CompressionType.GZip)
{
throw new InvalidFormatException("GZip archives only support GZip compression type.");
}
return new GZipWriter(stream, new GZipWriterOptions(writerOptions));
GZipWriterOptions gzipOptions = writerOptions switch
{
GZipWriterOptions gwo => gwo,
WriterOptions wo => new GZipWriterOptions(wo),
_ => throw new ArgumentException(
$"Expected WriterOptions or GZipWriterOptions, got {writerOptions.GetType().Name}",
nameof(writerOptions)
),
};
return new GZipWriter(stream, gzipOptions);
}
/// <inheritdoc/>
public IAsyncWriter OpenAsyncWriter(
Stream stream,
WriterOptions writerOptions,
IWriterOptions writerOptions,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
if (writerOptions.CompressionType != CompressionType.GZip)
{
throw new InvalidFormatException("GZip archives only support GZip compression type.");
}
return (IAsyncWriter)OpenWriter(stream, writerOptions);
}

View File

@@ -7,6 +7,7 @@ using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Tar;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.Tar;
@@ -216,13 +217,24 @@ public class TarFactory
#region IWriterFactory
/// <inheritdoc/>
public IWriter OpenWriter(Stream stream, WriterOptions writerOptions) =>
new TarWriter(stream, new TarWriterOptions(writerOptions));
public IWriter OpenWriter(Stream stream, IWriterOptions writerOptions)
{
TarWriterOptions tarOptions = writerOptions switch
{
TarWriterOptions two => two,
WriterOptions wo => new TarWriterOptions(wo),
_ => throw new ArgumentException(
$"Expected WriterOptions or TarWriterOptions, got {writerOptions.GetType().Name}",
nameof(writerOptions)
),
};
return new TarWriter(stream, tarOptions);
}
/// <inheritdoc/>
public IAsyncWriter OpenAsyncWriter(
Stream stream,
WriterOptions writerOptions,
IWriterOptions writerOptions,
CancellationToken cancellationToken = default
)
{

View File

@@ -1,3 +1,4 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
@@ -5,6 +6,7 @@ using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.Zip;
@@ -185,13 +187,24 @@ public class ZipFactory
#region IWriterFactory
/// <inheritdoc/>
public IWriter OpenWriter(Stream stream, WriterOptions writerOptions) =>
new ZipWriter(stream, new ZipWriterOptions(writerOptions));
public IWriter OpenWriter(Stream stream, IWriterOptions writerOptions)
{
ZipWriterOptions zipOptions = writerOptions switch
{
ZipWriterOptions zwo => zwo,
WriterOptions wo => new ZipWriterOptions(wo),
_ => throw new ArgumentException(
$"Expected WriterOptions or ZipWriterOptions, got {writerOptions.GetType().Name}",
nameof(writerOptions)
),
};
return new ZipWriter(stream, zipOptions);
}
/// <inheritdoc/>
public IAsyncWriter OpenAsyncWriter(
Stream stream,
WriterOptions writerOptions,
IWriterOptions writerOptions,
CancellationToken cancellationToken = default
)
{

View File

@@ -1,3 +1,4 @@
#nullable disable
using System;
using System.Collections;
using System.Collections.Generic;

View File

@@ -1,3 +1,5 @@
#nullable disable
using System;
using System.Collections;
using System.Collections.Generic;
@@ -41,7 +43,7 @@ internal sealed class LazyReadOnlyCollection<T> : ICollection<T>
#region IEnumerator Members
object? IEnumerator.Current => Current;
object IEnumerator.Current => Current;
public bool MoveNext()
{

View File

@@ -74,7 +74,7 @@ public abstract partial class AceReader : AbstractReader<AceEntry, AceVolume>
break;
}
yield return new AceEntry(new AceFilePart((AceFileHeader)localHeader, stream));
yield return new AceEntry(new AceFilePart((AceFileHeader)localHeader, stream), Options);
}
}
@@ -114,7 +114,7 @@ public abstract partial class AceReader : AbstractReader<AceEntry, AceVolume>
break;
}
yield return new AceEntry(new AceFilePart((AceFileHeader)localHeader, stream));
yield return new AceEntry(new AceFilePart((AceFileHeader)localHeader, stream), Options);
}
}

View File

@@ -1,3 +1,5 @@
#nullable disable
using System;
using System.Collections;
using System.Collections.Generic;
@@ -13,7 +15,7 @@ namespace SharpCompress.Readers.Ace;
internal class MultiVolumeAceReader : AceReader
{
private readonly IEnumerator<Stream> streams;
private Stream? tempStream;
private Stream tempStream;
internal MultiVolumeAceReader(IEnumerable<Stream> streams, ReaderOptions options)
: base(options) => this.streams = streams.GetEnumerator();
@@ -52,13 +54,13 @@ internal class MultiVolumeAceReader : AceReader
{
private readonly MultiVolumeAceReader reader;
private readonly IEnumerator<Stream> nextReadableStreams;
private Stream? tempStream;
private Stream tempStream;
private bool isFirst = true;
internal MultiVolumeStreamEnumerator(
MultiVolumeAceReader r,
IEnumerator<Stream> nextReadableStreams,
Stream? tempStream
Stream tempStream
)
{
reader = r;
@@ -70,12 +72,7 @@ internal class MultiVolumeAceReader : AceReader
IEnumerator IEnumerable.GetEnumerator() => this;
private FilePart? _current;
public FilePart Current
{
get => _current.NotNull();
private set => _current = value;
}
public FilePart Current { get; private set; }
public void Dispose() { }
@@ -85,7 +82,7 @@ internal class MultiVolumeAceReader : AceReader
{
if (isFirst)
{
_current = reader.Entry.Parts.First();
Current = reader.Entry.Parts.First();
isFirst = false; //first stream already to go
return true;
}
@@ -96,7 +93,7 @@ internal class MultiVolumeAceReader : AceReader
}
if (tempStream != null)
{
reader.LoadStreamForReading(tempStream.NotNull());
reader.LoadStreamForReading(tempStream);
tempStream = null;
}
else if (!nextReadableStreams.MoveNext())
@@ -110,7 +107,7 @@ internal class MultiVolumeAceReader : AceReader
reader.LoadStreamForReading(nextReadableStreams.Current);
}
_current = reader.Entry.Parts.First();
Current = reader.Entry.Parts.First();
return true;
}

View File

@@ -15,7 +15,7 @@ public partial class ArcReader
(header = await headerReader.ReadHeaderAsync(stream, CancellationToken.None)) != null
)
{
yield return new ArcEntry(new ArcFilePart(header, stream));
yield return new ArcEntry(new ArcFilePart(header, stream), Options);
}
}
}

View File

@@ -34,7 +34,7 @@ public partial class ArcReader : AbstractReader<ArcEntry, ArcVolume>
ArcEntryHeader? header;
while ((header = headerReader.ReadHeader(stream)) != null)
{
yield return new ArcEntry(new ArcFilePart(header, stream));
yield return new ArcEntry(new ArcFilePart(header, stream), Options);
}
}
}

View File

@@ -90,7 +90,10 @@ public abstract partial class ArjReader : AbstractReader<ArjEntry, ArjVolume>
continue;
}
yield return new ArjEntry(new ArjFilePart((ArjLocalHeader)localHeader, stream));
yield return new ArjEntry(
new ArjFilePart((ArjLocalHeader)localHeader, stream),
Options
);
}
}
@@ -135,7 +138,10 @@ public abstract partial class ArjReader : AbstractReader<ArjEntry, ArjVolume>
continue;
}
yield return new ArjEntry(new ArjFilePart((ArjLocalHeader)localHeader, stream));
yield return new ArjEntry(
new ArjFilePart((ArjLocalHeader)localHeader, stream),
Options
);
}
}

View File

@@ -1,3 +1,5 @@
#nullable disable
using System;
using System.Collections;
using System.Collections.Generic;
@@ -14,7 +16,7 @@ namespace SharpCompress.Readers.Arj;
internal class MultiVolumeArjReader : ArjReader
{
private readonly IEnumerator<Stream> streams;
private Stream? tempStream;
private Stream tempStream;
internal MultiVolumeArjReader(IEnumerable<Stream> streams, ReaderOptions options)
: base(options) => this.streams = streams.GetEnumerator();
@@ -53,13 +55,13 @@ internal class MultiVolumeArjReader : ArjReader
{
private readonly MultiVolumeArjReader reader;
private readonly IEnumerator<Stream> nextReadableStreams;
private Stream? tempStream;
private Stream tempStream;
private bool isFirst = true;
internal MultiVolumeStreamEnumerator(
MultiVolumeArjReader r,
IEnumerator<Stream> nextReadableStreams,
Stream? tempStream
Stream tempStream
)
{
reader = r;
@@ -71,12 +73,7 @@ internal class MultiVolumeArjReader : ArjReader
IEnumerator IEnumerable.GetEnumerator() => this;
private FilePart? _current;
public FilePart Current
{
get => _current.NotNull();
private set => _current = value;
}
public FilePart Current { get; private set; }
public void Dispose() { }
@@ -97,7 +94,7 @@ internal class MultiVolumeArjReader : ArjReader
}
if (tempStream != null)
{
reader.LoadStreamForReading(tempStream.NotNull());
reader.LoadStreamForReading(tempStream);
tempStream = null;
}
else if (!nextReadableStreams.MoveNext())

View File

@@ -14,15 +14,14 @@ public static class IAsyncReaderExtensions
/// </summary>
public async ValueTask WriteEntryToDirectoryAsync(
string destinationDirectory,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default
) =>
await ExtractionMethods
.WriteEntryToDirectoryAsync(
reader.Entry,
destinationDirectory,
options,
reader.WriteEntryToFileAsync,
async (path, ct) =>
await reader.WriteEntryToFileAsync(path, ct).ConfigureAwait(false),
cancellationToken
)
.ConfigureAwait(false);
@@ -32,14 +31,12 @@ public static class IAsyncReaderExtensions
/// </summary>
public async ValueTask WriteEntryToFileAsync(
string destinationFileName,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default
) =>
await ExtractionMethods
.WriteEntryToFileAsync(
reader.Entry,
destinationFileName,
options,
async (x, fm, ct) =>
{
using var fs = File.Open(destinationFileName, fm);
@@ -54,28 +51,25 @@ public static class IAsyncReaderExtensions
/// </summary>
public async ValueTask WriteAllToDirectoryAsync(
string destinationDirectory,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default
)
{
while (await reader.MoveToNextEntryAsync(cancellationToken))
{
await reader
.WriteEntryToDirectoryAsync(destinationDirectory, options, cancellationToken)
.WriteEntryToDirectoryAsync(destinationDirectory, cancellationToken)
.ConfigureAwait(false);
}
}
public async ValueTask WriteEntryToAsync(
string destinationFileName,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default
) =>
await ExtractionMethods
.WriteEntryToFileAsync(
reader.Entry,
destinationFileName,
options,
async (x, fm, ct) =>
{
using var fs = File.Open(destinationFileName, fm);
@@ -87,11 +81,10 @@ public static class IAsyncReaderExtensions
public async ValueTask WriteEntryToAsync(
FileInfo destinationFileInfo,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default
) =>
await reader
.WriteEntryToAsync(destinationFileInfo.FullName, options, cancellationToken)
.WriteEntryToAsync(destinationFileInfo.FullName, cancellationToken)
.ConfigureAwait(false);
}
}

View File

@@ -1,4 +1,4 @@
using System.IO;
using System.IO;
using SharpCompress.Common;
namespace SharpCompress.Readers;
@@ -22,42 +22,31 @@ public static class IReaderExtensions
/// <summary>
/// Extract all remaining unread entries to specific directory, retaining filename
/// </summary>
public void WriteAllToDirectory(
string destinationDirectory,
ExtractionOptions? options = null
)
public void WriteAllToDirectory(string destinationDirectory)
{
while (reader.MoveToNextEntry())
{
reader.WriteEntryToDirectory(destinationDirectory, options);
reader.WriteEntryToDirectory(destinationDirectory);
}
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public void WriteEntryToDirectory(
string destinationDirectory,
ExtractionOptions? options = null
) =>
public void WriteEntryToDirectory(string destinationDirectory) =>
ExtractionMethods.WriteEntryToDirectory(
reader.Entry,
destinationDirectory,
options,
reader.WriteEntryToFile
(path) => reader.WriteEntryToFile(path)
);
/// <summary>
/// Extract to specific file
/// </summary>
public void WriteEntryToFile(
string destinationFileName,
ExtractionOptions? options = null
) =>
public void WriteEntryToFile(string destinationFileName) =>
ExtractionMethods.WriteEntryToFile(
reader.Entry,
destinationFileName,
options,
(x, fm) =>
{
using var fs = File.Open(destinationFileName, fm);

View File

@@ -1,3 +1,5 @@
#nullable disable
using System.Collections;
using System.Collections.Generic;
using System.IO;
@@ -24,13 +26,13 @@ internal partial class MultiVolumeRarReader : RarReader
{
private readonly MultiVolumeRarReader reader;
private readonly IEnumerator<Stream> nextReadableStreams;
private Stream? tempStream;
private Stream tempStream;
private bool isFirst = true;
internal MultiVolumeStreamAsyncEnumerator(
MultiVolumeRarReader r,
IEnumerator<Stream> nextReadableStreams,
Stream? tempStream
Stream tempStream
)
{
reader = r;
@@ -38,12 +40,7 @@ internal partial class MultiVolumeRarReader : RarReader
this.tempStream = tempStream;
}
private FilePart? _current;
public FilePart Current
{
get => _current.NotNull();
private set => _current = value;
}
public FilePart Current { get; private set; }
public async ValueTask<bool> MoveNextAsync()
{
@@ -60,7 +57,7 @@ internal partial class MultiVolumeRarReader : RarReader
}
if (tempStream != null)
{
await reader.LoadStreamForReadingAsync(tempStream.NotNull());
await reader.LoadStreamForReadingAsync(tempStream);
tempStream = null;
}
else if (!nextReadableStreams.MoveNext())

View File

@@ -1,3 +1,5 @@
#nullable disable
using System.Collections;
using System.Collections.Generic;
using System.IO;
@@ -12,7 +14,7 @@ namespace SharpCompress.Readers.Rar;
internal partial class MultiVolumeRarReader : RarReader
{
private readonly IEnumerator<Stream> streams;
private Stream? tempStream;
private Stream tempStream;
internal MultiVolumeRarReader(IEnumerable<Stream> streams, ReaderOptions options)
: base(options) => this.streams = streams.GetEnumerator();
@@ -53,13 +55,13 @@ internal partial class MultiVolumeRarReader : RarReader
{
private readonly MultiVolumeRarReader reader;
private readonly IEnumerator<Stream> nextReadableStreams;
private Stream? tempStream;
private Stream tempStream;
private bool isFirst = true;
internal MultiVolumeStreamEnumerator(
MultiVolumeRarReader r,
IEnumerator<Stream> nextReadableStreams,
Stream? tempStream
Stream tempStream
)
{
reader = r;
@@ -71,12 +73,7 @@ internal partial class MultiVolumeRarReader : RarReader
IEnumerator IEnumerable.GetEnumerator() => this;
private FilePart? _current;
public FilePart Current
{
get => _current.NotNull();
private set => _current = value;
}
public FilePart Current { get; private set; }
public void Dispose() { }
@@ -97,7 +94,7 @@ internal partial class MultiVolumeRarReader : RarReader
}
if (tempStream != null)
{
reader.LoadStreamForReading(tempStream.NotNull());
reader.LoadStreamForReading(tempStream);
tempStream = null;
}
else if (!nextReadableStreams.MoveNext())

View File

@@ -93,7 +93,7 @@ public abstract partial class RarReader : AbstractReader<RarReaderEntry, RarVolu
foreach (var fp in volume.ReadFileParts())
{
ValidateArchive(volume);
yield return new RarReaderEntry(volume.IsSolidArchive, fp);
yield return new RarReaderEntry(volume.IsSolidArchive, fp, Options);
}
}
@@ -103,7 +103,7 @@ public abstract partial class RarReader : AbstractReader<RarReaderEntry, RarVolu
await foreach (var fp in volume.ReadFilePartsAsync())
{
ValidateArchive(volume);
yield return new RarReaderEntry(volume.IsSolidArchive, fp);
yield return new RarReaderEntry(volume.IsSolidArchive, fp, Options);
}
}

View File

@@ -1,5 +1,6 @@
using System.Collections.Generic;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
@@ -7,7 +8,8 @@ namespace SharpCompress.Readers.Rar;
public class RarReaderEntry : RarEntry
{
internal RarReaderEntry(bool solid, RarFilePart part)
internal RarReaderEntry(bool solid, RarFilePart part, IReaderOptions readerOptions)
: base(readerOptions)
{
Part = part;
IsSolid = solid;

Some files were not shown because too many files have changed in this diff Show More