Compare commits

..

10 Commits

Author SHA1 Message Date
Adam Hathcock
a34f5a855c Mark for 0.28.2 2021-04-25 09:29:56 +01:00
Adam Hathcock
6474741af1 Merge pull request #593 from adamhathcock/fix-pkware-encryption
ReadFully used by pkware encryption didn’t like spans
2021-04-25 09:29:02 +01:00
Adam Hathcock
c10bd840c5 ReadFully used by pkware encryption didn’t like spans 2021-04-25 09:25:51 +01:00
Adam Hathcock
8a022c4b18 Update FORMATS.md
remove LZipArchive/Reader/Writer mention
2021-03-28 08:58:11 +01:00
Adam Hathcock
cfef228afc Merge pull request #579 from Looooong/fix/do-not-place-extention-classes-in-common-namespace
Do not place extension classes in common namespace
2021-03-18 13:52:40 +00:00
Nguyễn Đức Long
237ff9f055 Do not place extension classes in common namespace 2021-03-18 20:44:04 +07:00
Adam Hathcock
020f862814 Bug fix for recursive call introduced in 0.28 2021-02-18 08:31:50 +00:00
Adam Hathcock
fa6107200d Merge pull request #572 from Erior/feature/521
Not so elegant perhaps for checking 7z encryption
2021-02-16 08:05:08 +00:00
Adam Hathcock
eb81f972c4 Merge branch 'master' into feature/521 2021-02-16 08:01:32 +00:00
Lars Vahlenberg
93c1ff396e Not so elegant perhaps 2021-02-14 16:29:01 +01:00
126 changed files with 3001 additions and 3079 deletions

View File

@@ -19,7 +19,6 @@
| Tar.XZ | LZMA2 | Decompress | TarArchive | TarReader | TarWriter (3) |
| GZip (single file) | DEFLATE | Both | GZipArchive | GZipReader | GZipWriter |
| 7Zip (4) | LZMA, LZMA2, BZip2, PPMd, BCJ, BCJ2, Deflate | Decompress | SevenZipArchive | N/A | N/A |
| LZip (single file) (5) | LZip (LZMA) | Both | LZipArchive | LZipReader | LZipWriter |
1. SOLID Rars are only supported in the RarReader API.
2. Zip format supports pkware and WinzipAES encryption. However, encrypted LZMA is not supported. Zip64 reading/writing is supported but only with seekable streams as the Zip spec doesn't support Zip64 data in post data descriptors. Deflate64 is only supported for reading.

View File

@@ -2,25 +2,29 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
namespace SharpCompress.Archives
{
public abstract class AbstractArchive<TEntry, TVolume> : IArchive
public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtractionListener
where TEntry : IArchiveEntry
where TVolume : IVolume
{
private readonly LazyReadOnlyCollection<TVolume> lazyVolumes;
private readonly LazyReadOnlyCollection<TEntry> lazyEntries;
protected ReaderOptions ReaderOptions { get; } = new ();
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionBegin;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionEnd;
public event EventHandler<CompressedBytesReadEventArgs>? CompressedBytesRead;
public event EventHandler<FilePartExtractionBeginEventArgs>? FilePartExtractionBegin;
protected ReaderOptions ReaderOptions { get; }
private bool disposed;
internal AbstractArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerOptions, CancellationToken cancellationToken)
internal AbstractArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerOptions)
{
Type = type;
if (!fileInfo.Exists)
@@ -29,30 +33,42 @@ namespace SharpCompress.Archives
}
ReaderOptions = readerOptions;
readerOptions.LeaveStreamOpen = false;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(fileInfo, cancellationToken));
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes, cancellationToken));
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(fileInfo));
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
}
protected abstract IAsyncEnumerable<TVolume> LoadVolumes(FileInfo file, CancellationToken cancellationToken);
protected abstract IEnumerable<TVolume> LoadVolumes(FileInfo file);
internal AbstractArchive(ArchiveType type, IAsyncEnumerable<Stream> streams, ReaderOptions readerOptions, CancellationToken cancellationToken)
internal AbstractArchive(ArchiveType type, IEnumerable<Stream> streams, ReaderOptions readerOptions)
{
Type = type;
ReaderOptions = readerOptions;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(streams.Select(CheckStreams), cancellationToken));
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes, cancellationToken));
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(streams.Select(CheckStreams)));
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
}
#nullable disable
internal AbstractArchive(ArchiveType type)
{
Type = type;
lazyVolumes = new LazyReadOnlyCollection<TVolume>( AsyncEnumerable.Empty<TVolume>());
lazyEntries = new LazyReadOnlyCollection<TEntry>(AsyncEnumerable.Empty<TEntry>());
lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
}
#nullable enable
public ArchiveType Type { get; }
void IArchiveExtractionListener.FireEntryExtractionBegin(IArchiveEntry entry)
{
EntryExtractionBegin?.Invoke(this, new ArchiveExtractionEventArgs<IArchiveEntry>(entry));
}
void IArchiveExtractionListener.FireEntryExtractionEnd(IArchiveEntry entry)
{
EntryExtractionEnd?.Invoke(this, new ArchiveExtractionEventArgs<IArchiveEntry>(entry));
}
private static Stream CheckStreams(Stream stream)
{
if (!stream.CanSeek || !stream.CanRead)
@@ -65,48 +81,63 @@ namespace SharpCompress.Archives
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveEntries across the one or many parts of the RarArchive.
/// </summary>
public virtual IAsyncEnumerable<TEntry> Entries => lazyEntries;
public virtual ICollection<TEntry> Entries => lazyEntries;
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveVolumes across the one or many parts of the RarArchive.
/// </summary>
public IAsyncEnumerable<TVolume> Volumes => lazyVolumes;
public ICollection<TVolume> Volumes => lazyVolumes;
/// <summary>
/// The total size of the files compressed in the archive.
/// </summary>
public virtual async ValueTask<long> TotalSizeAsync()
{
await EnsureEntriesLoaded();
return await Entries.AggregateAsync(0L, (total, cf) => total + cf.CompressedSize);
}
public virtual long TotalSize => Entries.Aggregate(0L, (total, cf) => total + cf.CompressedSize);
/// <summary>
/// The total size of the files as uncompressed in the archive.
/// </summary>
public virtual async ValueTask<long> TotalUncompressedSizeAsync()
{
await EnsureEntriesLoaded();
return await Entries.AggregateAsync(0L, (total, cf) => total + cf.Size);
}
public virtual long TotalUncompressSize => Entries.Aggregate(0L, (total, cf) => total + cf.Size);
protected abstract IAsyncEnumerable<TVolume> LoadVolumes(IAsyncEnumerable<Stream> streams, CancellationToken cancellationToken);
protected abstract IAsyncEnumerable<TEntry> LoadEntries(IAsyncEnumerable<TVolume> volumes, CancellationToken cancellationToken);
protected abstract IEnumerable<TVolume> LoadVolumes(IEnumerable<Stream> streams);
protected abstract IEnumerable<TEntry> LoadEntries(IEnumerable<TVolume> volumes);
IAsyncEnumerable<IArchiveEntry> IArchive.Entries => Entries.Select(x => (IArchiveEntry)x);
IEnumerable<IArchiveEntry> IArchive.Entries => Entries.Cast<IArchiveEntry>();
IAsyncEnumerable<IVolume> IArchive.Volumes => lazyVolumes.Select(x => (IVolume)x);
IEnumerable<IVolume> IArchive.Volumes => lazyVolumes.Cast<IVolume>();
public virtual async ValueTask DisposeAsync()
public virtual void Dispose()
{
if (!disposed)
{
await lazyVolumes.ForEachAsync(async v => await v.DisposeAsync());
await lazyEntries.GetLoaded().Cast<Entry>().ForEachAsync(async x => await x.CloseAsync());
lazyVolumes.ForEach(v => v.Dispose());
lazyEntries.GetLoaded().Cast<Entry>().ForEach(x => x.Close());
disposed = true;
}
}
void IArchiveExtractionListener.EnsureEntriesLoaded()
{
lazyEntries.EnsureFullyLoaded();
lazyVolumes.EnsureFullyLoaded();
}
void IExtractionListener.FireCompressedBytesRead(long currentPartCompressedBytes, long compressedReadBytes)
{
CompressedBytesRead?.Invoke(this, new CompressedBytesReadEventArgs(
currentFilePartCompressedBytesRead: currentPartCompressedBytes,
compressedBytesRead: compressedReadBytes
));
}
void IExtractionListener.FireFilePartExtractionBegin(string name, long size, long compressedSize)
{
FilePartExtractionBegin?.Invoke(this, new FilePartExtractionBeginEventArgs(
compressedSize: compressedSize,
size: size,
name: name
));
}
/// <summary>
/// Use this method to extract all entries in an archive in order.
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
@@ -118,32 +149,29 @@ namespace SharpCompress.Archives
/// occur if this is used at the same time as other extraction methods on this instance.
/// </summary>
/// <returns></returns>
public async ValueTask<IReader> ExtractAllEntries()
public IReader ExtractAllEntries()
{
await EnsureEntriesLoaded();
return await CreateReaderForSolidExtraction();
}
public async ValueTask EnsureEntriesLoaded()
{
await lazyEntries.EnsureFullyLoaded();
await lazyVolumes.EnsureFullyLoaded();
((IArchiveExtractionListener)this).EnsureEntriesLoaded();
return CreateReaderForSolidExtraction();
}
protected abstract ValueTask<IReader> CreateReaderForSolidExtraction();
protected abstract IReader CreateReaderForSolidExtraction();
/// <summary>
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
/// </summary>
public virtual ValueTask<bool> IsSolidAsync() => new(false);
public virtual bool IsSolid => false;
/// <summary>
/// The archive can find all the parts of the archive needed to fully extract the archive. This forces the parsing of the entire archive.
/// </summary>
public async ValueTask<bool> IsCompleteAsync()
public bool IsComplete
{
await EnsureEntriesLoaded();
return await Entries.AllAsync(x => x.IsComplete);
get
{
((IArchiveExtractionListener)this).EnsureEntriesLoaded();
return Entries.All(x => x.IsComplete);
}
}
}
}

View File

@@ -2,8 +2,6 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
using SharpCompress.Writers;
@@ -14,7 +12,7 @@ namespace SharpCompress.Archives
where TEntry : IArchiveEntry
where TVolume : IVolume
{
private class RebuildPauseDisposable : IAsyncDisposable
private class RebuildPauseDisposable : IDisposable
{
private readonly AbstractWritableArchive<TEntry, TVolume> archive;
@@ -24,16 +22,16 @@ namespace SharpCompress.Archives
archive.pauseRebuilding = true;
}
public async ValueTask DisposeAsync()
public void Dispose()
{
archive.pauseRebuilding = false;
await archive.RebuildModifiedCollection();
archive.RebuildModifiedCollection();
}
}
private readonly List<TEntry> newEntries = new();
private readonly List<TEntry> removedEntries = new();
private readonly List<TEntry> newEntries = new List<TEntry>();
private readonly List<TEntry> removedEntries = new List<TEntry>();
private readonly List<TEntry> modifiedEntries = new();
private readonly List<TEntry> modifiedEntries = new List<TEntry>();
private bool hasModifications;
private bool pauseRebuilding;
@@ -42,36 +40,34 @@ namespace SharpCompress.Archives
{
}
internal AbstractWritableArchive(ArchiveType type, Stream stream, ReaderOptions readerFactoryOptions,
CancellationToken cancellationToken)
: base(type, stream.AsAsyncEnumerable(), readerFactoryOptions, cancellationToken)
internal AbstractWritableArchive(ArchiveType type, Stream stream, ReaderOptions readerFactoryOptions)
: base(type, stream.AsEnumerable(), readerFactoryOptions)
{
}
internal AbstractWritableArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerFactoryOptions,
CancellationToken cancellationToken)
: base(type, fileInfo, readerFactoryOptions, cancellationToken)
internal AbstractWritableArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerFactoryOptions)
: base(type, fileInfo, readerFactoryOptions)
{
}
public override IAsyncEnumerable<TEntry> Entries
public override ICollection<TEntry> Entries
{
get
{
if (hasModifications)
{
return modifiedEntries.ToAsyncEnumerable();
return modifiedEntries;
}
return base.Entries;
}
}
public IAsyncDisposable PauseEntryRebuilding()
public IDisposable PauseEntryRebuilding()
{
return new RebuildPauseDisposable(this);
}
private async ValueTask RebuildModifiedCollection()
private void RebuildModifiedCollection()
{
if (pauseRebuilding)
{
@@ -80,57 +76,56 @@ namespace SharpCompress.Archives
hasModifications = true;
newEntries.RemoveAll(v => removedEntries.Contains(v));
modifiedEntries.Clear();
modifiedEntries.AddRange(await OldEntries.Concat(newEntries.ToAsyncEnumerable()).ToListAsync());
modifiedEntries.AddRange(OldEntries.Concat(newEntries));
}
private IAsyncEnumerable<TEntry> OldEntries { get { return base.Entries.Where(x => !removedEntries.Contains(x)); } }
private IEnumerable<TEntry> OldEntries { get { return base.Entries.Where(x => !removedEntries.Contains(x)); } }
public async ValueTask RemoveEntryAsync(TEntry entry)
public void RemoveEntry(TEntry entry)
{
if (!removedEntries.Contains(entry))
{
removedEntries.Add(entry);
await RebuildModifiedCollection();
RebuildModifiedCollection();
}
}
ValueTask IWritableArchive.RemoveEntryAsync(IArchiveEntry entry, CancellationToken cancellationToken)
void IWritableArchive.RemoveEntry(IArchiveEntry entry)
{
return RemoveEntryAsync((TEntry)entry);
RemoveEntry((TEntry)entry);
}
public ValueTask<TEntry> AddEntryAsync(string key, Stream source,
long size = 0, DateTime? modified = null,
CancellationToken cancellationToken = default)
public TEntry AddEntry(string key, Stream source,
long size = 0, DateTime? modified = null)
{
return AddEntryAsync(key, source, false, size, modified, cancellationToken);
return AddEntry(key, source, false, size, modified);
}
async ValueTask<IArchiveEntry> IWritableArchive.AddEntryAsync(string key, Stream source, bool closeStream, long size, DateTime? modified, CancellationToken cancellationToken)
IArchiveEntry IWritableArchive.AddEntry(string key, Stream source, bool closeStream, long size, DateTime? modified)
{
return await AddEntryAsync(key, source, closeStream, size, modified, cancellationToken);
return AddEntry(key, source, closeStream, size, modified);
}
public async ValueTask<TEntry> AddEntryAsync(string key, Stream source, bool closeStream,
long size = 0, DateTime? modified = null, CancellationToken cancellationToken = default)
public TEntry AddEntry(string key, Stream source, bool closeStream,
long size = 0, DateTime? modified = null)
{
if (key.Length > 0 && key[0] is '/' or '\\')
{
key = key.Substring(1);
}
if (await DoesKeyMatchExisting(key))
if (DoesKeyMatchExisting(key))
{
throw new ArchiveException("Cannot add entry with duplicate key: " + key);
}
var entry = await CreateEntry(key, source, size, modified, closeStream, cancellationToken);
var entry = CreateEntry(key, source, size, modified, closeStream);
newEntries.Add(entry);
await RebuildModifiedCollection();
RebuildModifiedCollection();
return entry;
}
private async ValueTask<bool> DoesKeyMatchExisting(string key)
private bool DoesKeyMatchExisting(string key)
{
await foreach (var path in Entries.Select(x => x.Key))
foreach (var path in Entries.Select(x => x.Key))
{
var p = path.Replace('/', '\\');
if (p.Length > 0 && p[0] == '\\')
@@ -142,35 +137,34 @@ namespace SharpCompress.Archives
return false;
}
public async ValueTask SaveToAsync(Stream stream, WriterOptions options, CancellationToken cancellationToken = default)
public void SaveTo(Stream stream, WriterOptions options)
{
//reset streams of new entries
newEntries.Cast<IWritableArchiveEntry>().ForEach(x => x.Stream.Seek(0, SeekOrigin.Begin));
await SaveToAsync(stream, options, OldEntries, newEntries.ToAsyncEnumerable(), cancellationToken);
SaveTo(stream, options, OldEntries, newEntries);
}
protected ValueTask<TEntry> CreateEntry(string key, Stream source, long size, DateTime? modified,
bool closeStream, CancellationToken cancellationToken)
protected TEntry CreateEntry(string key, Stream source, long size, DateTime? modified,
bool closeStream)
{
if (!source.CanRead || !source.CanSeek)
{
throw new ArgumentException("Streams must be readable and seekable to use the Writing Archive API");
}
return CreateEntryInternal(key, source, size, modified, closeStream, cancellationToken);
return CreateEntryInternal(key, source, size, modified, closeStream);
}
protected abstract ValueTask<TEntry> CreateEntryInternal(string key, Stream source, long size, DateTime? modified,
bool closeStream, CancellationToken cancellationToken);
protected abstract TEntry CreateEntryInternal(string key, Stream source, long size, DateTime? modified,
bool closeStream);
protected abstract ValueTask SaveToAsync(Stream stream, WriterOptions options, IAsyncEnumerable<TEntry> oldEntries, IAsyncEnumerable<TEntry> newEntries,
CancellationToken cancellationToken = default);
protected abstract void SaveTo(Stream stream, WriterOptions options, IEnumerable<TEntry> oldEntries, IEnumerable<TEntry> newEntries);
public override async ValueTask DisposeAsync()
public override void Dispose()
{
await base.DisposeAsync();
await newEntries.Cast<Entry>().ForEachAsync(async x => await x.CloseAsync());
await removedEntries.Cast<Entry>().ForEachAsync(async x => await x.CloseAsync());
await modifiedEntries.Cast<Entry>().ForEachAsync(async x => await x.CloseAsync());
base.Dispose();
newEntries.Cast<Entry>().ForEach(x => x.Close());
removedEntries.Cast<Entry>().ForEach(x => x.Close());
modifiedEntries.Cast<Entry>().ForEach(x => x.Close());
}
}
}

View File

@@ -1,10 +1,8 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives.GZip;
//using SharpCompress.Archives.Rar;
//using SharpCompress.Archives.SevenZip;
using SharpCompress.Archives.Rar;
using SharpCompress.Archives.SevenZip;
using SharpCompress.Archives.Tar;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
@@ -20,7 +18,7 @@ namespace SharpCompress.Archives
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <returns></returns>
public static async ValueTask<IArchive> OpenAsync(Stream stream, ReaderOptions? readerOptions = null, CancellationToken cancellationToken = default)
public static IArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
@@ -28,35 +26,35 @@ namespace SharpCompress.Archives
throw new ArgumentException("Stream should be readable and seekable");
}
readerOptions ??= new ReaderOptions();
if (await ZipArchive.IsZipFileAsync(stream, null, cancellationToken))
if (ZipArchive.IsZipFile(stream, null))
{
stream.Seek(0, SeekOrigin.Begin);
return ZipArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
/*if (SevenZipArchive.IsSevenZipFile(stream))
if (SevenZipArchive.IsSevenZipFile(stream))
{
stream.Seek(0, SeekOrigin.Begin);
return SevenZipArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin); */
if (await GZipArchive.IsGZipFileAsync(stream, cancellationToken))
stream.Seek(0, SeekOrigin.Begin);
if (GZipArchive.IsGZipFile(stream))
{
stream.Seek(0, SeekOrigin.Begin);
return GZipArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
/* if (RarArchive.IsRarFile(stream, readerOptions))
if (RarArchive.IsRarFile(stream, readerOptions))
{
stream.Seek(0, SeekOrigin.Begin);
return RarArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin); */
if (await TarArchive.IsTarFileAsync(stream, cancellationToken))
stream.Seek(0, SeekOrigin.Begin);
if (TarArchive.IsTarFile(stream))
{
stream.Seek(0, SeekOrigin.Begin);
return TarArchive.Open(stream, readerOptions);
}
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip, LZip");
}
@@ -65,7 +63,7 @@ namespace SharpCompress.Archives
return type switch
{
ArchiveType.Zip => ZipArchive.Create(),
//ArchiveType.Tar => TarArchive.Create(),
ArchiveType.Tar => TarArchive.Create(),
ArchiveType.GZip => GZipArchive.Create(),
_ => throw new NotSupportedException("Cannot create Archives of type: " + type)
};
@@ -76,10 +74,10 @@ namespace SharpCompress.Archives
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
public static ValueTask<IArchive> OpenAsync(string filePath, ReaderOptions? options = null)
public static IArchive Open(string filePath, ReaderOptions? options = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
return OpenAsync(new FileInfo(filePath), options);
return Open(new FileInfo(filePath), options);
}
/// <summary>
@@ -87,28 +85,28 @@ namespace SharpCompress.Archives
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
public static async ValueTask<IArchive> OpenAsync(FileInfo fileInfo, ReaderOptions? options = null, CancellationToken cancellationToken = default)
public static IArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
options ??= new ReaderOptions { LeaveStreamOpen = false };
await using var stream = fileInfo.OpenRead();
if (await ZipArchive.IsZipFileAsync(stream, null, cancellationToken))
using var stream = fileInfo.OpenRead();
if (ZipArchive.IsZipFile(stream, null))
{
return ZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
/*if (SevenZipArchive.IsSevenZipFile(stream))
if (SevenZipArchive.IsSevenZipFile(stream))
{
return SevenZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin); */
if (await GZipArchive.IsGZipFileAsync(stream, cancellationToken))
stream.Seek(0, SeekOrigin.Begin);
if (GZipArchive.IsGZipFile(stream))
{
return GZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
/*if (RarArchive.IsRarFile(stream, options))
if (RarArchive.IsRarFile(stream, options))
{
return RarArchive.Open(fileInfo, options);
}
@@ -116,22 +114,20 @@ namespace SharpCompress.Archives
if (TarArchive.IsTarFile(stream))
{
return TarArchive.Open(fileInfo, options);
} */
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static async ValueTask WriteToDirectory(string sourceArchive,
string destinationDirectory,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default)
public static void WriteToDirectory(string sourceArchive, string destinationDirectory,
ExtractionOptions? options = null)
{
await using IArchive archive = await OpenAsync(sourceArchive);
await foreach (IArchiveEntry entry in archive.Entries.WithCancellation(cancellationToken))
using IArchive archive = Open(sourceArchive);
foreach (IArchiveEntry entry in archive.Entries)
{
await entry.WriteEntryToDirectoryAsync(destinationDirectory, options, cancellationToken);
entry.WriteToDirectory(destinationDirectory, options);
}
}
}

View File

@@ -1,11 +1,7 @@
using System;
using System.Buffers;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.GZip;
using SharpCompress.Readers;
@@ -33,11 +29,10 @@ namespace SharpCompress.Archives.GZip
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default)
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new GZipArchive(fileInfo, readerOptions ?? new ReaderOptions(), cancellationToken);
return new GZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
/// <summary>
@@ -45,11 +40,10 @@ namespace SharpCompress.Archives.GZip
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(Stream stream, ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default)
public static GZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
return new GZipArchive(stream, readerOptions ?? new ReaderOptions(), cancellationToken);
return new GZipArchive(stream, readerOptions ?? new ReaderOptions());
}
public static GZipArchive Create()
@@ -62,58 +56,57 @@ namespace SharpCompress.Archives.GZip
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
internal GZipArchive(FileInfo fileInfo, ReaderOptions options,
CancellationToken cancellationToken)
: base(ArchiveType.GZip, fileInfo, options, cancellationToken)
internal GZipArchive(FileInfo fileInfo, ReaderOptions options)
: base(ArchiveType.GZip, fileInfo, options)
{
}
protected override IAsyncEnumerable<GZipVolume> LoadVolumes(FileInfo file,
CancellationToken cancellationToken)
protected override IEnumerable<GZipVolume> LoadVolumes(FileInfo file)
{
return new GZipVolume(file, ReaderOptions).AsAsyncEnumerable();
return new GZipVolume(file, ReaderOptions).AsEnumerable();
}
public static ValueTask<bool> IsGZipFileAsync(string filePath, CancellationToken cancellationToken = default)
public static bool IsGZipFile(string filePath)
{
return IsGZipFileAsync(new FileInfo(filePath), cancellationToken);
return IsGZipFile(new FileInfo(filePath));
}
public static async ValueTask<bool> IsGZipFileAsync(FileInfo fileInfo, CancellationToken cancellationToken = default)
public static bool IsGZipFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
await using Stream stream = fileInfo.OpenRead();
return await IsGZipFileAsync(stream, cancellationToken);
using Stream stream = fileInfo.OpenRead();
return IsGZipFile(stream);
}
public Task SaveToAsync(string filePath, CancellationToken cancellationToken = default)
public void SaveTo(string filePath)
{
return SaveToAsync(new FileInfo(filePath), cancellationToken);
SaveTo(new FileInfo(filePath));
}
public async Task SaveToAsync(FileInfo fileInfo, CancellationToken cancellationToken = default)
public void SaveTo(FileInfo fileInfo)
{
await using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
await SaveToAsync(stream, new WriterOptions(CompressionType.GZip), cancellationToken);
using (var stream = fileInfo.Open(FileMode.Create, FileAccess.Write))
{
SaveTo(stream, new WriterOptions(CompressionType.GZip));
}
}
public static async ValueTask<bool> IsGZipFileAsync(Stream stream, CancellationToken cancellationToken = default)
public static bool IsGZipFile(Stream stream)
{
// read the header on the first read
using var header = MemoryPool<byte>.Shared.Rent(10);
var slice = header.Memory.Slice(0, 10);
Span<byte> header = stackalloc byte[10];
// workitem 8501: handle edge case (decompress empty stream)
if (await stream.ReadAsync(slice, cancellationToken) != 10)
if (!stream.ReadFully(header))
{
return false;
}
if (slice.Span[0] != 0x1F || slice.Span[1] != 0x8B || slice.Span[2] != 8)
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
return false;
}
@@ -126,9 +119,8 @@ namespace SharpCompress.Archives.GZip
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
internal GZipArchive(Stream stream, ReaderOptions options,
CancellationToken cancellationToken)
: base(ArchiveType.GZip, stream, options, cancellationToken)
internal GZipArchive(Stream stream, ReaderOptions options)
: base(ArchiveType.GZip, stream, options)
{
}
@@ -137,54 +129,51 @@ namespace SharpCompress.Archives.GZip
{
}
protected override async ValueTask<GZipArchiveEntry> CreateEntryInternal(string filePath, Stream source, long size, DateTime? modified,
bool closeStream, CancellationToken cancellationToken = default)
protected override GZipArchiveEntry CreateEntryInternal(string filePath, Stream source, long size, DateTime? modified,
bool closeStream)
{
if (await Entries.AnyAsync(cancellationToken: cancellationToken))
if (Entries.Any())
{
throw new InvalidOperationException("Only one entry is allowed in a GZip Archive");
}
return new GZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
}
protected override async ValueTask SaveToAsync(Stream stream, WriterOptions options,
IAsyncEnumerable<GZipArchiveEntry> oldEntries,
IAsyncEnumerable<GZipArchiveEntry> newEntries,
CancellationToken cancellationToken = default)
protected override void SaveTo(Stream stream, WriterOptions options,
IEnumerable<GZipArchiveEntry> oldEntries,
IEnumerable<GZipArchiveEntry> newEntries)
{
if (await Entries.CountAsync(cancellationToken: cancellationToken) > 1)
if (Entries.Count > 1)
{
throw new InvalidOperationException("Only one entry is allowed in a GZip Archive");
}
await using var writer = new GZipWriter(stream, new GZipWriterOptions(options));
await foreach (var entry in oldEntries.Concat(newEntries)
.Where(x => !x.IsDirectory)
.WithCancellation(cancellationToken))
using (var writer = new GZipWriter(stream, new GZipWriterOptions(options)))
{
await using var entryStream = await entry.OpenEntryStreamAsync(cancellationToken);
await writer.WriteAsync(entry.Key, entryStream, entry.LastModifiedTime, cancellationToken);
foreach (var entry in oldEntries.Concat(newEntries)
.Where(x => !x.IsDirectory))
{
using (var entryStream = entry.OpenEntryStream())
{
writer.Write(entry.Key, entryStream, entry.LastModifiedTime);
}
}
}
}
protected override async IAsyncEnumerable<GZipVolume> LoadVolumes(IAsyncEnumerable<Stream> streams,
[EnumeratorCancellation]CancellationToken cancellationToken)
protected override IEnumerable<GZipVolume> LoadVolumes(IEnumerable<Stream> streams)
{
yield return new GZipVolume(await streams.FirstAsync(cancellationToken: cancellationToken), ReaderOptions);
return new GZipVolume(streams.First(), ReaderOptions).AsEnumerable();
}
protected override async IAsyncEnumerable<GZipArchiveEntry> LoadEntries(IAsyncEnumerable<GZipVolume> volumes,
[EnumeratorCancellation]CancellationToken cancellationToken)
protected override IEnumerable<GZipArchiveEntry> LoadEntries(IEnumerable<GZipVolume> volumes)
{
Stream stream = (await volumes.SingleAsync(cancellationToken: cancellationToken)).Stream;
var part = new GZipFilePart(ReaderOptions.ArchiveEncoding);
await part.Initialize(stream, cancellationToken);
yield return new GZipArchiveEntry(this, part);
Stream stream = volumes.Single().Stream;
yield return new GZipArchiveEntry(this, new GZipFilePart(stream, ReaderOptions.ArchiveEncoding));
}
protected override async ValueTask<IReader> CreateReaderForSolidExtraction()
protected override IReader CreateReaderForSolidExtraction()
{
var stream = (await Volumes.SingleAsync()).Stream;
var stream = Volumes.Single().Stream;
stream.Position = 0;
return GZipReader.Open(stream);
}

View File

@@ -1,7 +1,5 @@
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.GZip;
namespace SharpCompress.Archives.GZip
@@ -14,7 +12,7 @@ namespace SharpCompress.Archives.GZip
Archive = archive;
}
public virtual async ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
public virtual Stream OpenEntryStream()
{
//this is to reset the stream to be read multiple times
var part = (GZipFilePart)Parts.Single();
@@ -22,7 +20,7 @@ namespace SharpCompress.Archives.GZip
{
part.GetRawStream().Position = part.EntryStartPosition;
}
return await Parts.Single().GetCompressedStreamAsync(cancellationToken);
return Parts.Single().GetCompressedStream();
}
#region IArchiveEntry Members

View File

@@ -3,8 +3,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
@@ -52,18 +50,18 @@ namespace SharpCompress.Archives.GZip
Stream IWritableArchiveEntry.Stream => stream;
public override ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
public override Stream OpenEntryStream()
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new(new NonDisposingStream(stream));
return new NonDisposingStream(stream);
}
internal override async ValueTask CloseAsync()
internal override void Close()
{
if (closeStream)
{
await stream.DisposeAsync();
stream.Dispose();
}
}
}

View File

@@ -1,44 +1,49 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
namespace SharpCompress.Archives
{
public interface IArchive : IAsyncDisposable
public interface IArchive : IDisposable
{
IAsyncEnumerable<IArchiveEntry> Entries { get; }
IAsyncEnumerable<IVolume> Volumes { get; }
event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionBegin;
event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionEnd;
event EventHandler<CompressedBytesReadEventArgs> CompressedBytesRead;
event EventHandler<FilePartExtractionBeginEventArgs> FilePartExtractionBegin;
IEnumerable<IArchiveEntry> Entries { get; }
IEnumerable<IVolume> Volumes { get; }
ArchiveType Type { get; }
ValueTask EnsureEntriesLoaded();
/// <summary>
/// Use this method to extract all entries in an archive in order.
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
/// extracted sequentially for the best performance.
/// </summary>
ValueTask<IReader> ExtractAllEntries();
IReader ExtractAllEntries();
/// <summary>
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
/// Rar Archives can be SOLID while all 7Zip archives are considered SOLID.
/// </summary>
ValueTask<bool> IsSolidAsync();
bool IsSolid { get; }
/// <summary>
/// This checks to see if all the known entries have IsComplete = true
/// </summary>
ValueTask<bool> IsCompleteAsync();
bool IsComplete { get; }
/// <summary>
/// The total size of the files compressed in the archive.
/// </summary>
ValueTask<long> TotalSizeAsync();
long TotalSize { get; }
/// <summary>
/// The total size of the files as uncompressed in the archive.
/// </summary>
ValueTask<long> TotalUncompressedSizeAsync();
long TotalUncompressSize { get; }
}
}

View File

@@ -1,6 +1,4 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Archives
@@ -11,7 +9,7 @@ namespace SharpCompress.Archives
/// Opens the current entry as a stream that will decompress as it is read.
/// Read the entire stream or use SkipEntry on EntryStream.
/// </summary>
ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default);
Stream OpenEntryStream();
/// <summary>
/// The archive can find all the parts of the archive needed to extract this entry.

View File

@@ -1,6 +1,4 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
@@ -8,53 +6,58 @@ namespace SharpCompress.Archives
{
public static class IArchiveEntryExtensions
{
public static async ValueTask WriteToAsync(this IArchiveEntry archiveEntry, Stream streamToWriteTo, CancellationToken cancellationToken = default)
public static void WriteTo(this IArchiveEntry archiveEntry, Stream streamToWriteTo)
{
if (archiveEntry.IsDirectory)
{
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
}
var archive = archiveEntry.Archive;
await archive.EnsureEntriesLoaded();
var entryStream = await archiveEntry.OpenEntryStreamAsync(cancellationToken);
var streamListener = (IArchiveExtractionListener)archiveEntry.Archive;
streamListener.EnsureEntriesLoaded();
streamListener.FireEntryExtractionBegin(archiveEntry);
streamListener.FireFilePartExtractionBegin(archiveEntry.Key, archiveEntry.Size, archiveEntry.CompressedSize);
var entryStream = archiveEntry.OpenEntryStream();
if (entryStream is null)
{
return;
}
await using (entryStream)
using (entryStream)
{
await entryStream.TransferToAsync(streamToWriteTo, cancellationToken);
using (Stream s = new ListeningStream(streamListener, entryStream))
{
s.TransferTo(streamToWriteTo);
}
}
streamListener.FireEntryExtractionEnd(archiveEntry);
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static ValueTask WriteEntryToDirectoryAsync(this IArchiveEntry entry,
string destinationDirectory,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default)
public static void WriteToDirectory(this IArchiveEntry entry, string destinationDirectory,
ExtractionOptions? options = null)
{
return ExtractionMethods.WriteEntryToDirectoryAsync(entry, destinationDirectory, options,
entry.WriteToFileAsync, cancellationToken);
ExtractionMethods.WriteEntryToDirectory(entry, destinationDirectory, options,
entry.WriteToFile);
}
/// <summary>
/// Extract to specific file
/// </summary>
public static ValueTask WriteToFileAsync(this IArchiveEntry entry,
public static void WriteToFile(this IArchiveEntry entry,
string destinationFileName,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default)
ExtractionOptions? options = null)
{
return ExtractionMethods.WriteEntryToFileAsync(entry, destinationFileName, options,
async (x, fm, ct) =>
ExtractionMethods.WriteEntryToFile(entry, destinationFileName, options,
(x, fm) =>
{
await using FileStream fs = File.Open(x, fm);
await entry.WriteToAsync(fs, ct);
}, cancellationToken);
using (FileStream fs = File.Open(destinationFileName, fm))
{
entry.WriteTo(fs);
}
});
}
}
}

View File

@@ -1,6 +1,4 @@
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Archives
@@ -10,14 +8,12 @@ namespace SharpCompress.Archives
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static async ValueTask WriteToDirectoryAsync(this IArchive archive,
string destinationDirectory,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default)
public static void WriteToDirectory(this IArchive archive, string destinationDirectory,
ExtractionOptions? options = null)
{
await foreach (IArchiveEntry entry in archive.Entries.Where(x => !x.IsDirectory).WithCancellation(cancellationToken))
foreach (IArchiveEntry entry in archive.Entries.Where(x => !x.IsDirectory))
{
await entry.WriteEntryToDirectoryAsync(destinationDirectory, options, cancellationToken);
entry.WriteToDirectory(destinationDirectory, options);
}
}
}

View File

@@ -0,0 +1,11 @@
using SharpCompress.Common;
namespace SharpCompress.Archives
{
internal interface IArchiveExtractionListener : IExtractionListener
{
void EnsureEntriesLoaded();
void FireEntryExtractionBegin(IArchiveEntry entry);
void FireEntryExtractionEnd(IArchiveEntry entry);
}
}

View File

@@ -1,23 +1,21 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Writers;
namespace SharpCompress.Archives
{
public interface IWritableArchive : IArchive
{
ValueTask RemoveEntryAsync(IArchiveEntry entry, CancellationToken cancellationToken = default);
void RemoveEntry(IArchiveEntry entry);
ValueTask<IArchiveEntry> AddEntryAsync(string key, Stream source, bool closeStream, long size = 0, DateTime? modified = null, CancellationToken cancellationToken = default);
IArchiveEntry AddEntry(string key, Stream source, bool closeStream, long size = 0, DateTime? modified = null);
ValueTask SaveToAsync(Stream stream, WriterOptions options, CancellationToken cancellationToken = default);
void SaveTo(Stream stream, WriterOptions options);
/// <summary>
/// Use this to pause entry rebuilding when adding large collections of entries. Dispose when complete. A using statement is recommended.
/// </summary>
/// <returns>IDisposeable to resume entry rebuilding</returns>
IAsyncDisposable PauseEntryRebuilding();
IDisposable PauseEntryRebuilding();
}
}

View File

@@ -1,62 +1,57 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Writers;
namespace SharpCompress.Archives
{
public static class IWritableArchiveExtensions
{
public static async ValueTask AddEntryAsync(this IWritableArchive writableArchive,
string entryPath, string filePath,
CancellationToken cancellationToken = default)
public static void AddEntry(this IWritableArchive writableArchive,
string entryPath, string filePath)
{
var fileInfo = new FileInfo(filePath);
if (!fileInfo.Exists)
{
throw new FileNotFoundException("Could not AddEntry: " + filePath);
}
await writableArchive.AddEntryAsync(entryPath, new FileInfo(filePath).OpenRead(), true, fileInfo.Length,
fileInfo.LastWriteTime, cancellationToken);
writableArchive.AddEntry(entryPath, new FileInfo(filePath).OpenRead(), true, fileInfo.Length,
fileInfo.LastWriteTime);
}
public static Task SaveToAsync(this IWritableArchive writableArchive, string filePath, WriterOptions options, CancellationToken cancellationToken = default)
public static void SaveTo(this IWritableArchive writableArchive, string filePath, WriterOptions options)
{
return writableArchive.SaveToAsync(new FileInfo(filePath), options, cancellationToken);
writableArchive.SaveTo(new FileInfo(filePath), options);
}
public static async Task SaveToAsync(this IWritableArchive writableArchive, FileInfo fileInfo, WriterOptions options, CancellationToken cancellationToken = default)
public static void SaveTo(this IWritableArchive writableArchive, FileInfo fileInfo, WriterOptions options)
{
await using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
await writableArchive.SaveToAsync(stream, options, cancellationToken);
using (var stream = fileInfo.Open(FileMode.Create, FileAccess.Write))
{
writableArchive.SaveTo(stream, options);
}
}
public static async ValueTask AddAllFromDirectoryAsync(
public static void AddAllFromDirectory(
this IWritableArchive writableArchive,
string filePath, string searchPattern = "*.*",
SearchOption searchOption = SearchOption.AllDirectories,
CancellationToken cancellationToken = default)
string filePath, string searchPattern = "*.*", SearchOption searchOption = SearchOption.AllDirectories)
{
await using (writableArchive.PauseEntryRebuilding())
using (writableArchive.PauseEntryRebuilding())
{
foreach (var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption))
{
var fileInfo = new FileInfo(path);
await writableArchive.AddEntryAsync(path.Substring(filePath.Length), fileInfo.OpenRead(), true, fileInfo.Length,
fileInfo.LastWriteTime,
cancellationToken);
writableArchive.AddEntry(path.Substring(filePath.Length), fileInfo.OpenRead(), true, fileInfo.Length,
fileInfo.LastWriteTime);
}
}
}
public static ValueTask<IArchiveEntry> AddEntryAsync(this IWritableArchive writableArchive, string key, FileInfo fileInfo,
CancellationToken cancellationToken = default)
public static IArchiveEntry AddEntry(this IWritableArchive writableArchive, string key, FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
throw new ArgumentException("FileInfo does not exist.");
}
return writableArchive.AddEntryAsync(key, fileInfo.OpenRead(), true, fileInfo.Length, fileInfo.LastWriteTime, cancellationToken);
return writableArchive.AddEntry(key, fileInfo.OpenRead(), true, fileInfo.Length, fileInfo.LastWriteTime);
}
}
}

View File

@@ -2,9 +2,6 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Tar;
using SharpCompress.Common.Tar.Headers;
@@ -34,11 +31,10 @@ namespace SharpCompress.Archives.Tar
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default)
public static TarArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new TarArchive(fileInfo, readerOptions ?? new ReaderOptions(), cancellationToken);
return new TarArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
/// <summary>
@@ -46,35 +42,35 @@ namespace SharpCompress.Archives.Tar
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(Stream stream, ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default)
public static TarArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
return new TarArchive(stream, readerOptions ?? new ReaderOptions(), cancellationToken);
return new TarArchive(stream, readerOptions ?? new ReaderOptions());
}
public static ValueTask<bool> IsTarFileAsync(string filePath, CancellationToken cancellationToken = default)
public static bool IsTarFile(string filePath)
{
return IsTarFileAsync(new FileInfo(filePath), cancellationToken);
return IsTarFile(new FileInfo(filePath));
}
public static async ValueTask<bool> IsTarFileAsync(FileInfo fileInfo, CancellationToken cancellationToken = default)
public static bool IsTarFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
await using Stream stream = fileInfo.OpenRead();
return await IsTarFileAsync(stream, cancellationToken);
using (Stream stream = fileInfo.OpenRead())
{
return IsTarFile(stream);
}
}
public static async ValueTask<bool> IsTarFileAsync(Stream stream, CancellationToken cancellationToken = default)
public static bool IsTarFile(Stream stream)
{
try
{
TarHeader tarHeader = new(new ArchiveEncoding());
bool readSucceeded = await tarHeader.Read(stream, cancellationToken);
TarHeader tarHeader = new TarHeader(new ArchiveEncoding());
bool readSucceeded = tarHeader.Read(new BinaryReader(stream));
bool isEmptyArchive = tarHeader.Name.Length == 0 && tarHeader.Size == 0 && Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
return readSucceeded || isEmptyArchive;
}
@@ -89,15 +85,14 @@ namespace SharpCompress.Archives.Tar
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
internal TarArchive(FileInfo fileInfo, ReaderOptions readerOptions,
CancellationToken cancellationToken)
: base(ArchiveType.Tar, fileInfo, readerOptions, cancellationToken)
internal TarArchive(FileInfo fileInfo, ReaderOptions readerOptions)
: base(ArchiveType.Tar, fileInfo, readerOptions)
{
}
protected override IAsyncEnumerable<TarVolume> LoadVolumes(FileInfo file, CancellationToken cancellationToken)
protected override IEnumerable<TarVolume> LoadVolumes(FileInfo file)
{
return new TarVolume(file.OpenRead(), ReaderOptions).AsAsyncEnumerable();
return new TarVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
}
/// <summary>
@@ -105,9 +100,8 @@ namespace SharpCompress.Archives.Tar
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
internal TarArchive(Stream stream, ReaderOptions readerOptions,
CancellationToken cancellationToken)
: base(ArchiveType.Tar, stream, readerOptions, cancellationToken)
internal TarArchive(Stream stream, ReaderOptions readerOptions)
: base(ArchiveType.Tar, stream, readerOptions)
{
}
@@ -116,18 +110,16 @@ namespace SharpCompress.Archives.Tar
{
}
protected override async IAsyncEnumerable<TarVolume> LoadVolumes(IAsyncEnumerable<Stream> streams,
[EnumeratorCancellation]CancellationToken cancellationToken)
protected override IEnumerable<TarVolume> LoadVolumes(IEnumerable<Stream> streams)
{
yield return new TarVolume(await streams.FirstAsync(cancellationToken: cancellationToken), ReaderOptions);
return new TarVolume(streams.First(), ReaderOptions).AsEnumerable();
}
protected override async IAsyncEnumerable<TarArchiveEntry> LoadEntries(IAsyncEnumerable<TarVolume> volumes,
[EnumeratorCancellation]CancellationToken cancellationToken)
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
{
Stream stream = (await volumes.SingleAsync(cancellationToken: cancellationToken)).Stream;
Stream stream = volumes.Single().Stream;
TarHeader? previousHeader = null;
await foreach (TarHeader? header in TarHeaderFactory.ReadHeader(StreamingMode.Seekable, stream, ReaderOptions.ArchiveEncoding, cancellationToken))
foreach (TarHeader? header in TarHeaderFactory.ReadHeader(StreamingMode.Seekable, stream, ReaderOptions.ArchiveEncoding))
{
if (header != null)
{
@@ -144,11 +136,11 @@ namespace SharpCompress.Archives.Tar
var oldStreamPos = stream.Position;
await using (var entryStream = await entry.OpenEntryStreamAsync(cancellationToken))
using (var entryStream = entry.OpenEntryStream())
{
await using (var memoryStream = new MemoryStream())
using (var memoryStream = new MemoryStream())
{
await entryStream.TransferToAsync(memoryStream, cancellationToken);
entryStream.TransferTo(memoryStream);
memoryStream.Position = 0;
var bytes = memoryStream.ToArray();
@@ -168,37 +160,38 @@ namespace SharpCompress.Archives.Tar
public static TarArchive Create()
{
return new();
return new TarArchive();
}
protected override ValueTask<TarArchiveEntry> CreateEntryInternal(string filePath, Stream source,
long size, DateTime? modified, bool closeStream,
CancellationToken cancellationToken)
protected override TarArchiveEntry CreateEntryInternal(string filePath, Stream source,
long size, DateTime? modified, bool closeStream)
{
return new (new TarWritableArchiveEntry(this, source, CompressionType.Unknown, filePath, size, modified,
closeStream));
return new TarWritableArchiveEntry(this, source, CompressionType.Unknown, filePath, size, modified,
closeStream);
}
protected override async ValueTask SaveToAsync(Stream stream, WriterOptions options,
IAsyncEnumerable<TarArchiveEntry> oldEntries,
IAsyncEnumerable<TarArchiveEntry> newEntries,
CancellationToken cancellationToken = default)
protected override void SaveTo(Stream stream, WriterOptions options,
IEnumerable<TarArchiveEntry> oldEntries,
IEnumerable<TarArchiveEntry> newEntries)
{
await using var writer = await TarWriter.CreateAsync(stream, new TarWriterOptions(options), cancellationToken);
await foreach (var entry in oldEntries.Concat(newEntries)
.Where(x => !x.IsDirectory)
.WithCancellation(cancellationToken))
using (var writer = new TarWriter(stream, new TarWriterOptions(options)))
{
await using var entryStream = await entry.OpenEntryStreamAsync(cancellationToken);
await writer.WriteAsync(entry.Key, entryStream, entry.LastModifiedTime, entry.Size, cancellationToken);
foreach (var entry in oldEntries.Concat(newEntries)
.Where(x => !x.IsDirectory))
{
using (var entryStream = entry.OpenEntryStream())
{
writer.Write(entry.Key, entryStream, entry.LastModifiedTime, entry.Size);
}
}
}
}
protected override async ValueTask<IReader> CreateReaderForSolidExtraction()
protected override IReader CreateReaderForSolidExtraction()
{
var stream = (await Volumes.SingleAsync()).Stream;
var stream = Volumes.Single().Stream;
stream.Position = 0;
return await TarReader.OpenAsync(stream);
return TarReader.Open(stream);
}
}
}

View File

@@ -1,7 +1,5 @@
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Tar;
@@ -15,9 +13,9 @@ namespace SharpCompress.Archives.Tar
Archive = archive;
}
public virtual async ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
public virtual Stream OpenEntryStream()
{
return await Parts.Single().GetCompressedStreamAsync(cancellationToken);
return Parts.Single().GetCompressedStream();
}
#region IArchiveEntry Members

View File

@@ -3,8 +3,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
@@ -51,18 +49,18 @@ namespace SharpCompress.Archives.Tar
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
Stream IWritableArchiveEntry.Stream => stream;
public override ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
public override Stream OpenEntryStream()
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new(new NonDisposingStream(stream));
return new NonDisposingStream(stream);
}
internal override async ValueTask CloseAsync()
internal override void Close()
{
if (closeStream)
{
await stream.DisposeAsync();
stream.Dispose();
}
}
}

View File

@@ -2,14 +2,10 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Zip;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors.Deflate;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.Zip;
using SharpCompress.Writers;
@@ -45,11 +41,10 @@ namespace SharpCompress.Archives.Zip
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default)
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new ZipArchive(fileInfo, readerOptions ?? new ReaderOptions(), cancellationToken);
return new ZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
/// <summary>
@@ -57,45 +52,35 @@ namespace SharpCompress.Archives.Zip
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(Stream stream, ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default)
public static ZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
return new ZipArchive(stream, readerOptions ?? new ReaderOptions(), cancellationToken);
return new ZipArchive(stream, readerOptions ?? new ReaderOptions());
}
public static ValueTask<bool> IsZipFile(string filePath, string? password = null)
public static bool IsZipFile(string filePath, string? password = null)
{
return IsZipFileAsync(new FileInfo(filePath), password);
return IsZipFile(new FileInfo(filePath), password);
}
public static async ValueTask<bool> IsZipFileAsync(FileInfo fileInfo, string? password = null)
public static bool IsZipFile(FileInfo fileInfo, string? password = null)
{
if (!fileInfo.Exists)
{
return false;
}
await using Stream stream = fileInfo.OpenRead();
return await IsZipFileAsync(stream, password);
using (Stream stream = fileInfo.OpenRead())
{
return IsZipFile(stream, password);
}
}
public static async ValueTask<bool> IsZipFileAsync(Stream stream, string? password = null, CancellationToken cancellationToken = default)
public static bool IsZipFile(Stream stream, string? password = null)
{
StreamingZipHeaderFactory headerFactory = new(password, new ArchiveEncoding());
StreamingZipHeaderFactory headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding());
try
{
RewindableStream rewindableStream;
if (stream is RewindableStream rs)
{
rewindableStream = rs;
}
else
{
rewindableStream = new RewindableStream(stream);
}
ZipHeader? header = await headerFactory.ReadStreamHeader(rewindableStream, cancellationToken)
.FirstOrDefaultAsync(x => x.ZipHeaderType != ZipHeaderType.Split, cancellationToken: cancellationToken);
ZipHeader? header = headerFactory.ReadStreamHeader(stream).FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
return false;
@@ -117,17 +102,15 @@ namespace SharpCompress.Archives.Zip
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
internal ZipArchive(FileInfo fileInfo, ReaderOptions readerOptions,
CancellationToken cancellationToken)
: base(ArchiveType.Zip, fileInfo, readerOptions, cancellationToken)
internal ZipArchive(FileInfo fileInfo, ReaderOptions readerOptions)
: base(ArchiveType.Zip, fileInfo, readerOptions)
{
headerFactory = new SeekableZipHeaderFactory(readerOptions.Password, readerOptions.ArchiveEncoding);
}
protected override IAsyncEnumerable<ZipVolume> LoadVolumes(FileInfo file,
CancellationToken cancellationToken)
protected override IEnumerable<ZipVolume> LoadVolumes(FileInfo file)
{
return new ZipVolume(file.OpenRead(), ReaderOptions).AsAsyncEnumerable();
return new ZipVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
}
internal ZipArchive()
@@ -140,86 +123,82 @@ namespace SharpCompress.Archives.Zip
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
internal ZipArchive(Stream stream, ReaderOptions readerOptions,
CancellationToken cancellationToken)
: base(ArchiveType.Zip, stream, readerOptions, cancellationToken)
internal ZipArchive(Stream stream, ReaderOptions readerOptions)
: base(ArchiveType.Zip, stream, readerOptions)
{
headerFactory = new SeekableZipHeaderFactory(readerOptions.Password, readerOptions.ArchiveEncoding);
}
protected override async IAsyncEnumerable<ZipVolume> LoadVolumes(IAsyncEnumerable<Stream> streams,
[EnumeratorCancellation]CancellationToken cancellationToken)
protected override IEnumerable<ZipVolume> LoadVolumes(IEnumerable<Stream> streams)
{
yield return new ZipVolume(await streams.FirstAsync(cancellationToken: cancellationToken), ReaderOptions);
return new ZipVolume(streams.First(), ReaderOptions).AsEnumerable();
}
protected override async IAsyncEnumerable<ZipArchiveEntry> LoadEntries(IAsyncEnumerable<ZipVolume> volumes,
[EnumeratorCancellation]CancellationToken cancellationToken)
protected override IEnumerable<ZipArchiveEntry> LoadEntries(IEnumerable<ZipVolume> volumes)
{
await Task.CompletedTask;
var volume = await volumes.SingleAsync(cancellationToken: cancellationToken);
var volume = volumes.Single();
Stream stream = volume.Stream;
await foreach (ZipHeader h in headerFactory.ReadSeekableHeader(stream, cancellationToken))
foreach (ZipHeader h in headerFactory.ReadSeekableHeader(stream))
{
if (h != null)
{
switch (h.ZipHeaderType)
{
case ZipHeaderType.DirectoryEntry:
{
yield return new ZipArchiveEntry(this,
new SeekableZipFilePart(headerFactory,
(DirectoryEntryHeader)h,
stream));
}
{
yield return new ZipArchiveEntry(this,
new SeekableZipFilePart(headerFactory,
(DirectoryEntryHeader)h,
stream));
}
break;
case ZipHeaderType.DirectoryEnd:
{
byte[] bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
volume.Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
yield break;
}
{
byte[] bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
volume.Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
yield break;
}
}
}
}
}
public ValueTask SaveToAsync(Stream stream, CancellationToken cancellationToken = default)
public void SaveTo(Stream stream)
{
return SaveToAsync(stream, new WriterOptions(CompressionType.Deflate), cancellationToken);
SaveTo(stream, new WriterOptions(CompressionType.Deflate));
}
protected override async ValueTask SaveToAsync(Stream stream, WriterOptions options,
IAsyncEnumerable<ZipArchiveEntry> oldEntries,
IAsyncEnumerable<ZipArchiveEntry> newEntries,
CancellationToken cancellationToken = default)
protected override void SaveTo(Stream stream, WriterOptions options,
IEnumerable<ZipArchiveEntry> oldEntries,
IEnumerable<ZipArchiveEntry> newEntries)
{
await using var writer = new ZipWriter(stream, new ZipWriterOptions(options));
await foreach (var entry in oldEntries.Concat(newEntries)
.Where(x => !x.IsDirectory)
.WithCancellation(cancellationToken))
using (var writer = new ZipWriter(stream, new ZipWriterOptions(options)))
{
await using (var entryStream = await entry.OpenEntryStreamAsync(cancellationToken))
foreach (var entry in oldEntries.Concat(newEntries)
.Where(x => !x.IsDirectory))
{
await writer.WriteAsync(entry.Key, entryStream, entry.LastModifiedTime, cancellationToken);
using (var entryStream = entry.OpenEntryStream())
{
writer.Write(entry.Key, entryStream, entry.LastModifiedTime);
}
}
}
}
protected override ValueTask<ZipArchiveEntry> CreateEntryInternal(string filePath, Stream source, long size, DateTime? modified,
bool closeStream, CancellationToken cancellationToken = default)
protected override ZipArchiveEntry CreateEntryInternal(string filePath, Stream source, long size, DateTime? modified,
bool closeStream)
{
return new(new ZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream));
return new ZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
}
public static ZipArchive Create()
{
return new();
return new ZipArchive();
}
protected override async ValueTask<IReader> CreateReaderForSolidExtraction()
protected override IReader CreateReaderForSolidExtraction()
{
var stream = (await Volumes.SingleAsync()).Stream;
var stream = Volumes.Single().Stream;
stream.Position = 0;
return ZipReader.Open(stream, ReaderOptions);
}

View File

@@ -1,7 +1,5 @@
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip;
namespace SharpCompress.Archives.Zip
@@ -14,9 +12,9 @@ namespace SharpCompress.Archives.Zip
Archive = archive;
}
public virtual ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
public virtual Stream OpenEntryStream()
{
return Parts.Single().GetCompressedStreamAsync(cancellationToken);
return Parts.Single().GetCompressedStream();
}
#region IArchiveEntry Members

View File

@@ -1,8 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
@@ -51,18 +49,18 @@ namespace SharpCompress.Archives.Zip
Stream IWritableArchiveEntry.Stream => stream;
public override ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
public override Stream OpenEntryStream()
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new(new NonDisposingStream(stream));
return new NonDisposingStream(stream);
}
internal override async ValueTask CloseAsync()
internal override void Close()
{
if (closeStream && !isDisposed)
{
await stream.DisposeAsync();
stream.Dispose();
isDisposed = true;
}
}

View File

@@ -1,25 +0,0 @@
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress
{
public static class AsyncEnumerable
{
public static IAsyncEnumerable<T> Empty<T>() => EmptyAsyncEnumerable<T>.Instance;
private class EmptyAsyncEnumerable<T> : IAsyncEnumerator<T>, IAsyncEnumerable<T>
{
public static readonly EmptyAsyncEnumerable<T> Instance =
new();
public T Current => default!;
public ValueTask DisposeAsync() => default;
public ValueTask<bool> MoveNextAsync() => new(false);
public IAsyncEnumerator<T> GetAsyncEnumerator(CancellationToken cancellationToken = new CancellationToken())
{
return this;
}
}
}
}

View File

@@ -24,7 +24,7 @@ namespace SharpCompress.Common
/// Set this when you want to use a custom method for all decoding operations.
/// </summary>
/// <returns>string Func(bytes, index, length)</returns>
//public Func<byte[], int, int, string>? CustomDecoder { get; set; }
public Func<byte[], int, int, string>? CustomDecoder { get; set; }
public ArchiveEncoding()
: this(Encoding.Default, Encoding.Default)
@@ -50,12 +50,7 @@ namespace SharpCompress.Common
public string Decode(byte[] bytes, int start, int length)
{
return GetEncoding().GetString(bytes, start, length);
}
public string Decode(ReadOnlySpan<byte> span)
{
return GetEncoding().GetString(span);
return GetDecoder().Invoke(bytes, start, length);
}
public string DecodeUTF8(byte[] bytes)
@@ -72,5 +67,10 @@ namespace SharpCompress.Common
{
return Forced ?? Default ?? Encoding.UTF8;
}
public Func<byte[], int, int, string> GetDecoder()
{
return CustomDecoder ?? ((bytes, index, count) => GetEncoding().GetString(bytes, index, count));
}
}
}

View File

@@ -1,6 +1,5 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
namespace SharpCompress.Common
{
@@ -78,9 +77,8 @@ namespace SharpCompress.Common
internal bool IsSolid { get; set; }
internal virtual ValueTask CloseAsync()
internal virtual void Close()
{
return new ();
}
/// <summary>

View File

@@ -1,13 +1,10 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common
{
public class EntryStream : AsyncStream
public class EntryStream : Stream
{
private readonly IReader _reader;
private readonly Stream _stream;
@@ -23,24 +20,25 @@ namespace SharpCompress.Common
/// <summary>
/// When reading a stream from OpenEntryStream, the stream must be completed so use this to finish reading the entire entry.
/// </summary>
public async ValueTask SkipEntryAsync(CancellationToken cancellationToken = default)
public void SkipEntry()
{
await this.SkipAsync(cancellationToken);
this.Skip();
_completed = true;
}
public override async ValueTask DisposeAsync()
protected override void Dispose(bool disposing)
{
if (!(_completed || _reader.Cancelled))
{
await SkipEntryAsync();
SkipEntry();
}
if (_isDisposed)
{
return;
}
_isDisposed = true;
await _stream.DisposeAsync();
base.Dispose(disposing);
_stream.Dispose();
}
public override bool CanRead => true;
@@ -48,13 +46,18 @@ namespace SharpCompress.Common
public override bool CanSeek => false;
public override bool CanWrite => false;
public override void Flush()
{
}
public override long Length => _stream.Length;
public override long Position { get => throw new NotSupportedException(); set => throw new NotSupportedException(); }
public override async ValueTask<int> ReadAsync(Memory<byte> buffer, CancellationToken cancellationToken = default)
public override int Read(byte[] buffer, int offset, int count)
{
int read = await _stream.ReadAsync(buffer, cancellationToken);
int read = _stream.Read(buffer, offset, count);
if (read <= 0)
{
_completed = true;
@@ -62,14 +65,14 @@ namespace SharpCompress.Common
return read;
}
public override ValueTask WriteAsync(ReadOnlyMemory<byte> buffer, CancellationToken cancellationToken = default)
public override int ReadByte()
{
throw new NotSupportedException();
}
public override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
throw new NotSupportedException();
int value = _stream.ReadByte();
if (value == -1)
{
_completed = true;
}
return value;
}
public override long Seek(long offset, SeekOrigin origin)
@@ -81,5 +84,10 @@ namespace SharpCompress.Common
{
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException();
}
}
}

View File

@@ -1,7 +1,5 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common
{
@@ -10,11 +8,10 @@ namespace SharpCompress.Common
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static async ValueTask WriteEntryToDirectoryAsync(IEntry entry,
public static void WriteEntryToDirectory(IEntry entry,
string destinationDirectory,
ExtractionOptions? options,
Func<string, ExtractionOptions?, CancellationToken, ValueTask> write,
CancellationToken cancellationToken = default)
Action<string, ExtractionOptions?> write)
{
string destinationFileName;
string file = Path.GetFileName(entry.Key);
@@ -55,7 +52,7 @@ namespace SharpCompress.Common
{
throw new ExtractionException("Entry is trying to write a file outside of the destination directory.");
}
await write(destinationFileName, options, cancellationToken);
write(destinationFileName, options);
}
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
{
@@ -63,12 +60,11 @@ namespace SharpCompress.Common
}
}
public static async ValueTask WriteEntryToFileAsync(IEntry entry, string destinationFileName,
public static void WriteEntryToFile(IEntry entry, string destinationFileName,
ExtractionOptions? options,
Func<string, FileMode, CancellationToken, ValueTask> openAndWrite,
CancellationToken cancellationToken = default)
Action<string, FileMode> openAndWrite)
{
if (entry.LinkTarget is not null)
if (entry.LinkTarget != null)
{
if (options?.WriteSymbolicLink is null)
{
@@ -89,7 +85,7 @@ namespace SharpCompress.Common
fm = FileMode.CreateNew;
}
await openAndWrite(destinationFileName, fm, cancellationToken);
openAndWrite(destinationFileName, fm);
entry.PreserveExtractionOptions(destinationFileName, options);
}
}

View File

@@ -1,6 +1,4 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common
{
@@ -13,9 +11,9 @@ namespace SharpCompress.Common
internal ArchiveEncoding ArchiveEncoding { get; }
internal abstract string? FilePartName { get; }
internal abstract string FilePartName { get; }
internal abstract ValueTask<Stream> GetCompressedStreamAsync(CancellationToken cancellationToken);
internal abstract Stream GetCompressedStream();
internal abstract Stream? GetRawStream();
internal bool Skipped { get; set; }
}

View File

@@ -1,8 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Runtime.CompilerServices;
using System.Threading;
namespace SharpCompress.Common.GZip
{
@@ -19,7 +17,7 @@ namespace SharpCompress.Common.GZip
public override long Crc => _filePart.Crc ?? 0;
public override string Key => _filePart.FilePartName ?? string.Empty;
public override string Key => _filePart.FilePartName;
public override string? LinkTarget => null;
@@ -43,12 +41,9 @@ namespace SharpCompress.Common.GZip
internal override IEnumerable<FilePart> Parts => _filePart.AsEnumerable<FilePart>();
internal static async IAsyncEnumerable<GZipEntry> GetEntries(Stream stream, OptionsBase options,
[EnumeratorCancellation] CancellationToken cancellationToken)
internal static IEnumerable<GZipEntry> GetEntries(Stream stream, OptionsBase options)
{
var part = new GZipFilePart(options.ArchiveEncoding);
await part.Initialize(stream, cancellationToken);
yield return new GZipEntry(part);
yield return new GZipEntry(new GZipFilePart(stream, options.ArchiveEncoding));
}
}
}

View File

@@ -1,10 +1,7 @@
using System;
using System.Buffers;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
@@ -14,44 +11,34 @@ namespace SharpCompress.Common.GZip
internal sealed class GZipFilePart : FilePart
{
private string? _name;
//init only
#nullable disable
private Stream _stream;
#nullable enable
private readonly Stream _stream;
internal GZipFilePart(ArchiveEncoding archiveEncoding)
internal GZipFilePart(Stream stream, ArchiveEncoding archiveEncoding)
: base(archiveEncoding)
{
}
internal async ValueTask Initialize(Stream stream, CancellationToken cancellationToken)
{
_stream = stream;
ReadAndValidateGzipHeader();
if (stream.CanSeek)
{
long position = stream.Position;
stream.Position = stream.Length - 8;
await ReadTrailerAsync(cancellationToken);
ReadTrailer();
stream.Position = position;
}
EntryStartPosition = stream.Position;
}
internal long EntryStartPosition { get; private set; }
internal long EntryStartPosition { get; }
internal DateTime? DateModified { get; private set; }
internal int? Crc { get; private set; }
internal int? UncompressedSize { get; private set; }
internal override string? FilePartName => _name;
internal override string FilePartName => _name!;
internal override async ValueTask<Stream> GetCompressedStreamAsync(CancellationToken cancellationToken)
internal override Stream GetCompressedStream()
{
var stream = new GZipStream(_stream, CompressionMode.Decompress, CompressionLevel.Default);
await stream.ReadAsync(Array.Empty<byte>(), 0, 0, cancellationToken);
_name = stream.FileName;
DateModified = stream.LastModified;
return stream;
return new DeflateStream(_stream, CompressionMode.Decompress, CompressionLevel.Default);
}
internal override Stream GetRawStream()
@@ -59,12 +46,93 @@ namespace SharpCompress.Common.GZip
return _stream;
}
private async ValueTask ReadTrailerAsync(CancellationToken cancellationToken)
private void ReadTrailer()
{
// Read and potentially verify the GZIP trailer: CRC32 and size mod 2^32
Span<byte> trailer = stackalloc byte[8];
int n = _stream.Read(trailer);
Crc = await _stream.ReadInt32(cancellationToken);
UncompressedSize = await _stream.ReadInt32(cancellationToken);
Crc = BinaryPrimitives.ReadInt32LittleEndian(trailer);
UncompressedSize = BinaryPrimitives.ReadInt32LittleEndian(trailer.Slice(4));
}
private void ReadAndValidateGzipHeader()
{
// read the header on the first read
Span<byte> header = stackalloc byte[10];
int n = _stream.Read(header);
// workitem 8501: handle edge case (decompress empty stream)
if (n == 0)
{
return;
}
if (n != 10)
{
throw new ZlibException("Not a valid GZIP stream.");
}
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
throw new ZlibException("Bad GZIP header.");
}
int timet = BinaryPrimitives.ReadInt32LittleEndian(header.Slice(4));
DateModified = TarHeader.EPOCH.AddSeconds(timet);
if ((header[3] & 0x04) == 0x04)
{
// read and discard extra field
n = _stream.Read(header.Slice(0, 2)); // 2-byte length field
short extraLength = (short)(header[0] + header[1] * 256);
byte[] extra = new byte[extraLength];
if (!_stream.ReadFully(extra))
{
throw new ZlibException("Unexpected end-of-file reading GZIP header.");
}
n = extraLength;
}
if ((header[3] & 0x08) == 0x08)
{
_name = ReadZeroTerminatedString(_stream);
}
if ((header[3] & 0x10) == 0x010)
{
ReadZeroTerminatedString(_stream);
}
if ((header[3] & 0x02) == 0x02)
{
_stream.ReadByte(); // CRC16, ignore
}
}
private string ReadZeroTerminatedString(Stream stream)
{
Span<byte> buf1 = stackalloc byte[1];
var list = new List<byte>();
bool done = false;
do
{
// workitem 7740
int n = stream.Read(buf1);
if (n != 1)
{
throw new ZlibException("Unexpected EOF reading GZIP header.");
}
if (buf1[0] == 0)
{
done = true;
}
else
{
list.Add(buf1[0]);
}
}
while (!done);
byte[] buffer = list.ToArray();
return ArchiveEncoding.Decode(buffer);
}
}
}

View File

@@ -2,7 +2,7 @@
namespace SharpCompress.Common
{
public interface IVolume : IAsyncDisposable
public interface IVolume : IDisposable
{
}
}

View File

@@ -5,8 +5,6 @@ using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.LZMA.Utilites;
using SharpCompress.IO;
@@ -785,7 +783,7 @@ namespace SharpCompress.Common.SevenZip
}
}
private async ValueTask<List<byte[]>> ReadAndDecodePackedStreams(long baseOffset, IPasswordProvider pass, CancellationToken cancellationToken)
private List<byte[]> ReadAndDecodePackedStreams(long baseOffset, IPasswordProvider pass)
{
#if DEBUG
Log.WriteLine("-- ReadAndDecodePackedStreams --");
@@ -817,8 +815,8 @@ namespace SharpCompress.Common.SevenZip
dataStartPos += packSize;
}
var outStream = await DecoderStreamHelper.CreateDecoderStream(_stream, oldDataStartPos, myPackSizes,
folder, pass, cancellationToken);
var outStream = DecoderStreamHelper.CreateDecoderStream(_stream, oldDataStartPos, myPackSizes,
folder, pass);
int unpackSize = checked((int)folder.GetUnpackSize());
byte[] data = new byte[unpackSize];
@@ -847,7 +845,7 @@ namespace SharpCompress.Common.SevenZip
}
}
private async ValueTask ReadHeader(ArchiveDatabase db, IPasswordProvider getTextPassword, CancellationToken cancellationToken)
private void ReadHeader(ArchiveDatabase db, IPasswordProvider getTextPassword)
{
#if DEBUG
Log.WriteLine("-- ReadHeader --");
@@ -866,7 +864,7 @@ namespace SharpCompress.Common.SevenZip
List<byte[]> dataVector = null;
if (type == BlockType.AdditionalStreamsInfo)
{
dataVector = await ReadAndDecodePackedStreams(db._startPositionAfterHeader, getTextPassword, cancellationToken);
dataVector = ReadAndDecodePackedStreams(db._startPositionAfterHeader, getTextPassword);
type = ReadId();
}

View File

@@ -32,7 +32,7 @@ namespace SharpCompress.Common.SevenZip
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsEncrypted => FilePart.IsEncrypted;
public override bool IsDirectory => FilePart.Header.IsDir;

View File

@@ -1,8 +1,6 @@
using System;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Common.SevenZip
@@ -37,11 +35,11 @@ namespace SharpCompress.Common.SevenZip
return null;
}
internal override async ValueTask<Stream> GetCompressedStreamAsync(CancellationToken cancellationToken)
internal override Stream GetCompressedStream()
{
if (!Header.HasStream)
{
return Stream.Null;
return null!;
}
var folderStream = _database.GetFolderStream(_stream, Folder!, _database.PasswordProvider);
@@ -54,7 +52,7 @@ namespace SharpCompress.Common.SevenZip
}
if (skipSize > 0)
{
await folderStream.SkipAsync(skipSize, cancellationToken);
folderStream.Skip(skipSize);
}
return new ReadOnlySubStream(folderStream, Header.Size);
}
@@ -104,5 +102,7 @@ namespace SharpCompress.Common.SevenZip
throw new NotImplementedException();
}
}
internal bool IsEncrypted => Folder!._coders.FindIndex(c => c._methodId._id == CMethodId.K_AES_ID) != -1;
}
}

View File

@@ -1,12 +1,9 @@
#nullable disable
using System;
using System.Buffers;
using System.Buffers.Binary;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Tar.Headers
{
@@ -35,48 +32,48 @@ namespace SharpCompress.Common.Tar.Headers
internal const int BLOCK_SIZE = 512;
internal async Task WriteAsync(Stream output)
internal void Write(Stream output)
{
using var buffer = MemoryPool<byte>.Shared.Rent(BLOCK_SIZE);
byte[] buffer = new byte[BLOCK_SIZE];
WriteOctalBytes(511, buffer.Memory.Span, 100, 8); // file mode
WriteOctalBytes(0, buffer.Memory.Span, 108, 8); // owner ID
WriteOctalBytes(0, buffer.Memory.Span, 116, 8); // group ID
WriteOctalBytes(511, buffer, 100, 8); // file mode
WriteOctalBytes(0, buffer, 108, 8); // owner ID
WriteOctalBytes(0, buffer, 116, 8); // group ID
//ArchiveEncoding.UTF8.GetBytes("magic").CopyTo(buffer, 257);
var nameByteCount = ArchiveEncoding.GetEncoding().GetByteCount(Name);
if (nameByteCount > 100)
{
// Set mock filename and filetype to indicate the next block is the actual name of the file
WriteStringBytes("././@LongLink", buffer.Memory.Span, 0, 100);
buffer.Memory.Span[156] = (byte)EntryType.LongName;
WriteOctalBytes(nameByteCount + 1, buffer.Memory.Span, 124, 12);
WriteStringBytes("././@LongLink", buffer, 0, 100);
buffer[156] = (byte)EntryType.LongName;
WriteOctalBytes(nameByteCount + 1, buffer, 124, 12);
}
else
{
WriteStringBytes(ArchiveEncoding.Encode(Name), buffer.Memory, 100);
WriteOctalBytes(Size, buffer.Memory.Span, 124, 12);
WriteStringBytes(ArchiveEncoding.Encode(Name), buffer, 100);
WriteOctalBytes(Size, buffer, 124, 12);
var time = (long)(LastModifiedTime.ToUniversalTime() - EPOCH).TotalSeconds;
WriteOctalBytes(time, buffer.Memory.Span, 136, 12);
buffer.Memory.Span[156] = (byte)EntryType;
WriteOctalBytes(time, buffer, 136, 12);
buffer[156] = (byte)EntryType;
if (Size >= 0x1FFFFFFFF)
{
using var bytes12 = MemoryPool<byte>.Shared.Rent(12);
BinaryPrimitives.WriteInt64BigEndian(bytes12.Memory.Span.Slice(4), Size);
bytes12.Memory.Span[0] |= 0x80;
bytes12.Memory.CopyTo(buffer.Memory.Slice(124));
Span<byte> bytes12 = stackalloc byte[12];
BinaryPrimitives.WriteInt64BigEndian(bytes12.Slice(4), Size);
bytes12[0] |= 0x80;
bytes12.CopyTo(buffer.AsSpan(124));
}
}
int crc = RecalculateChecksum(buffer.Memory);
WriteOctalBytes(crc, buffer.Memory.Span, 148, 8);
int crc = RecalculateChecksum(buffer);
WriteOctalBytes(crc, buffer, 148, 8);
await output.WriteAsync(buffer.Memory.Slice(0, BLOCK_SIZE));
output.Write(buffer, 0, buffer.Length);
if (nameByteCount > 100)
{
await WriteLongFilenameHeaderAsync(output);
WriteLongFilenameHeader(output);
// update to short name lower than 100 - [max bytes of one character].
// subtracting bytes is needed because preventing infinite loop(example code is here).
//
@@ -85,14 +82,14 @@ namespace SharpCompress.Common.Tar.Headers
//
// and then infinite recursion is occured in WriteLongFilenameHeader because truncated.Length is 102.
Name = ArchiveEncoding.Decode(ArchiveEncoding.Encode(Name), 0, 100 - ArchiveEncoding.GetEncoding().GetMaxByteCount(1));
await WriteAsync(output);
Write(output);
}
}
private async Task WriteLongFilenameHeaderAsync(Stream output)
private void WriteLongFilenameHeader(Stream output)
{
byte[] nameBytes = ArchiveEncoding.Encode(Name);
await output.WriteAsync(nameBytes.AsMemory());
output.Write(nameBytes, 0, nameBytes.Length);
// pad to multiple of BlockSize bytes, and make sure a terminating null is added
int numPaddingBytes = BLOCK_SIZE - (nameBytes.Length % BLOCK_SIZE);
@@ -100,56 +97,48 @@ namespace SharpCompress.Common.Tar.Headers
{
numPaddingBytes = BLOCK_SIZE;
}
using var padding = MemoryPool<byte>.Shared.Rent(numPaddingBytes);
padding.Memory.Span.Clear();
await output.WriteAsync(padding.Memory.Slice(0, numPaddingBytes));
output.Write(stackalloc byte[numPaddingBytes]);
}
internal async ValueTask<bool> Read(Stream stream, CancellationToken cancellationToken)
internal bool Read(BinaryReader reader)
{
var block = MemoryPool<byte>.Shared.Rent(BLOCK_SIZE);
bool readFullyAsync = await stream.ReadAsync(block.Memory.Slice(0, BLOCK_SIZE), cancellationToken) == BLOCK_SIZE;
if (readFullyAsync is false)
var buffer = ReadBlock(reader);
if (buffer.Length == 0)
{
return false;
}
// for symlinks, additionally read the linkname
if (ReadEntryType(block.Memory.Span) == EntryType.SymLink)
if (ReadEntryType(buffer) == EntryType.SymLink)
{
LinkName = ArchiveEncoding.Decode(block.Memory.Span.Slice(157, 100)).TrimNulls();
LinkName = ArchiveEncoding.Decode(buffer, 157, 100).TrimNulls();
}
if (ReadEntryType(block.Memory.Span) == EntryType.LongName)
if (ReadEntryType(buffer) == EntryType.LongName)
{
Name = await ReadLongName(stream, block.Memory.Slice(0,BLOCK_SIZE), cancellationToken);
readFullyAsync = await stream.ReadAsync(block.Memory.Slice(0, BLOCK_SIZE), cancellationToken) == BLOCK_SIZE;
if (readFullyAsync is false)
{
return false;
}
Name = ReadLongName(reader, buffer);
buffer = ReadBlock(reader);
}
else
{
Name = ArchiveEncoding.Decode(block.Memory.Span.Slice( 0, 100)).TrimNulls();
Name = ArchiveEncoding.Decode(buffer, 0, 100).TrimNulls();
}
EntryType = ReadEntryType(block.Memory.Span);
Size = ReadSize(block.Memory.Slice(0, BLOCK_SIZE));
EntryType = ReadEntryType(buffer);
Size = ReadSize(buffer);
//Mode = ReadASCIIInt32Base8(buffer, 100, 7);
//UserId = ReadASCIIInt32Base8(buffer, 108, 7);
//GroupId = ReadASCIIInt32Base8(buffer, 116, 7);
long unixTimeStamp = ReadAsciiInt64Base8(block.Memory.Span.Slice(136, 11));
long unixTimeStamp = ReadAsciiInt64Base8(buffer, 136, 11);
LastModifiedTime = EPOCH.AddSeconds(unixTimeStamp).ToLocalTime();
Magic = ArchiveEncoding.Decode(block.Memory.Span.Slice( 257, 6)).TrimNulls();
Magic = ArchiveEncoding.Decode(buffer, 257, 6).TrimNulls();
if (!string.IsNullOrEmpty(Magic)
&& "ustar".Equals(Magic))
{
string namePrefix = ArchiveEncoding.Decode(block.Memory.Span.Slice( 345, 157));
string namePrefix = ArchiveEncoding.Decode(buffer, 345, 157);
namePrefix = namePrefix.TrimNulls();
if (!string.IsNullOrEmpty(namePrefix))
{
@@ -164,46 +153,55 @@ namespace SharpCompress.Common.Tar.Headers
return true;
}
private async ValueTask<string> ReadLongName(Stream reader, ReadOnlyMemory<byte> buffer, CancellationToken cancellationToken)
private string ReadLongName(BinaryReader reader, byte[] buffer)
{
var size = ReadSize(buffer);
var nameLength = (int)size;
using var rented = MemoryPool<byte>.Shared.Rent(nameLength);
var nameBytes = rented.Memory.Slice(0, nameLength);
await reader.ReadAsync(nameBytes, cancellationToken);
var nameBytes = reader.ReadBytes(nameLength);
var remainingBytesToRead = BLOCK_SIZE - (nameLength % BLOCK_SIZE);
// Read the rest of the block and discard the data
if (remainingBytesToRead < BLOCK_SIZE)
{
using var remaining = MemoryPool<byte>.Shared.Rent(remainingBytesToRead);
await reader.ReadAsync(remaining.Memory.Slice(0, remainingBytesToRead), cancellationToken);
reader.ReadBytes(remainingBytesToRead);
}
return ArchiveEncoding.Decode(nameBytes.Span).TrimNulls();
return ArchiveEncoding.Decode(nameBytes, 0, nameBytes.Length).TrimNulls();
}
private static EntryType ReadEntryType(Span<byte> buffer)
private static EntryType ReadEntryType(byte[] buffer)
{
return (EntryType)buffer[156];
}
private long ReadSize(ReadOnlyMemory<byte> buffer)
private long ReadSize(byte[] buffer)
{
if ((buffer.Span[124] & 0x80) == 0x80) // if size in binary
if ((buffer[124] & 0x80) == 0x80) // if size in binary
{
return BinaryPrimitives.ReadInt64BigEndian(buffer.Span.Slice(0x80));
return BinaryPrimitives.ReadInt64BigEndian(buffer.AsSpan(0x80));
}
return ReadAsciiInt64Base8(buffer.Span.Slice(124, 11));
}
private static void WriteStringBytes(ReadOnlySpan<byte> name, Memory<byte> buffer, int length)
{
name.CopyTo(buffer.Span.Slice(0));
int i = Math.Min(length, name.Length);
buffer.Slice(i, length - i).Span.Clear();
return ReadAsciiInt64Base8(buffer, 124, 11);
}
private static void WriteStringBytes(string name, Span<byte> buffer, int offset, int length)
private static byte[] ReadBlock(BinaryReader reader)
{
byte[] buffer = reader.ReadBytes(BLOCK_SIZE);
if (buffer.Length != 0 && buffer.Length < BLOCK_SIZE)
{
throw new InvalidOperationException("Buffer is invalid size");
}
return buffer;
}
private static void WriteStringBytes(ReadOnlySpan<byte> name, Span<byte> buffer, int length)
{
name.CopyTo(buffer);
int i = Math.Min(length, name.Length);
buffer.Slice(i, length - i).Clear();
}
private static void WriteStringBytes(string name, byte[] buffer, int offset, int length)
{
int i;
@@ -218,7 +216,7 @@ namespace SharpCompress.Common.Tar.Headers
}
}
private static void WriteOctalBytes(long value, Span<byte> buffer, int offset, int length)
private static void WriteOctalBytes(long value, byte[] buffer, int offset, int length)
{
string val = Convert.ToString(value, 8);
int shift = length - val.Length - 1;
@@ -232,9 +230,19 @@ namespace SharpCompress.Common.Tar.Headers
}
}
private static long ReadAsciiInt64Base8(ReadOnlySpan<byte> buffer)
private static int ReadAsciiInt32Base8(byte[] buffer, int offset, int count)
{
string s = Encoding.UTF8.GetString(buffer).TrimNulls();
string s = Encoding.UTF8.GetString(buffer, offset, count).TrimNulls();
if (string.IsNullOrEmpty(s))
{
return 0;
}
return Convert.ToInt32(s, 8);
}
private static long ReadAsciiInt64Base8(byte[] buffer, int offset, int count)
{
string s = Encoding.UTF8.GetString(buffer, offset, count).TrimNulls();
if (string.IsNullOrEmpty(s))
{
return 0;
@@ -258,20 +266,38 @@ namespace SharpCompress.Common.Tar.Headers
(byte)' ', (byte)' ', (byte)' ', (byte)' '
};
private static int RecalculateChecksum(Memory<byte> buf)
internal static int RecalculateChecksum(byte[] buf)
{
// Set default value for checksum. That is 8 spaces.
eightSpaces.CopyTo(buf.Slice(148));
eightSpaces.CopyTo(buf, 148);
// Calculate checksum
int headerChecksum = 0;
foreach (byte b in buf.Span)
foreach (byte b in buf)
{
headerChecksum += b;
}
return headerChecksum;
}
internal static int RecalculateAltChecksum(byte[] buf)
{
eightSpaces.CopyTo(buf, 148);
int headerChecksum = 0;
foreach (byte b in buf)
{
if ((b & 0x80) == 0x80)
{
headerChecksum -= b ^ 0x80;
}
else
{
headerChecksum += b;
}
}
return headerChecksum;
}
public long? DataStartPosition { get; set; }
public string Magic { get; set; }

View File

@@ -3,8 +3,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Runtime.CompilerServices;
using System.Threading;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
@@ -48,11 +46,10 @@ namespace SharpCompress.Common.Tar
internal override IEnumerable<FilePart> Parts => _filePart.AsEnumerable<FilePart>();
internal static async IAsyncEnumerable<TarEntry> GetEntries(StreamingMode mode, Stream stream,
CompressionType compressionType, ArchiveEncoding archiveEncoding,
[EnumeratorCancellation]CancellationToken cancellationToken)
internal static IEnumerable<TarEntry> GetEntries(StreamingMode mode, Stream stream,
CompressionType compressionType, ArchiveEncoding archiveEncoding)
{
await foreach (TarHeader h in TarHeaderFactory.ReadHeader(mode, stream, archiveEncoding, cancellationToken))
foreach (TarHeader h in TarHeaderFactory.ReadHeader(mode, stream, archiveEncoding))
{
if (h != null)
{

View File

@@ -1,7 +1,6 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
namespace SharpCompress.Common.Tar
{
@@ -20,14 +19,14 @@ namespace SharpCompress.Common.Tar
internal override string FilePartName => Header.Name;
internal override ValueTask<Stream> GetCompressedStreamAsync(CancellationToken cancellationToken)
internal override Stream GetCompressedStream()
{
if (_seekableStream != null)
{
_seekableStream.Position = Header.DataStartPosition!.Value;
return new(new TarReadOnlySubStream(_seekableStream, Header.Size));
return new TarReadOnlySubStream(_seekableStream, Header.Size);
}
return new(Header.PackedStream);
return Header.PackedStream;
}
internal override Stream? GetRawStream()

View File

@@ -1,7 +1,5 @@
using System.Collections.Generic;
using System.IO;
using System.Runtime.CompilerServices;
using System.Threading;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
@@ -9,17 +7,17 @@ namespace SharpCompress.Common.Tar
{
internal static class TarHeaderFactory
{
internal static async IAsyncEnumerable<TarHeader?> ReadHeader(StreamingMode mode, Stream stream, ArchiveEncoding archiveEncoding,
[EnumeratorCancellation]CancellationToken cancellationToken)
internal static IEnumerable<TarHeader?> ReadHeader(StreamingMode mode, Stream stream, ArchiveEncoding archiveEncoding)
{
while (true)
{
TarHeader? header = null;
try
{
BinaryReader reader = new BinaryReader(stream);
header = new TarHeader(archiveEncoding);
if (!await header.Read(stream, cancellationToken))
if (!header.Read(reader))
{
yield break;
}
@@ -27,10 +25,10 @@ namespace SharpCompress.Common.Tar
{
case StreamingMode.Seekable:
{
header.DataStartPosition = stream.Position;
header.DataStartPosition = reader.BaseStream.Position;
//skip to nearest 512
stream.Position += PadTo512(header.Size);
reader.BaseStream.Position += PadTo512(header.Size);
}
break;
case StreamingMode.Streaming:

View File

@@ -1,8 +1,6 @@
using SharpCompress.IO;
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Tar
{
@@ -16,7 +14,7 @@ namespace SharpCompress.Common.Tar
BytesLeftToRead = bytesToRead;
}
public override async ValueTask DisposeAsync()
protected override void Dispose(bool disposing)
{
if (_isDisposed)
{
@@ -25,17 +23,22 @@ namespace SharpCompress.Common.Tar
_isDisposed = true;
// Ensure we read all remaining blocks for this entry.
await Stream.SkipAsync(BytesLeftToRead);
_amountRead += BytesLeftToRead;
// If the last block wasn't a full 512 bytes, skip the remaining padding bytes.
var bytesInLastBlock = _amountRead % 512;
if (bytesInLastBlock != 0)
if (disposing)
{
await Stream.SkipAsync(512 - bytesInLastBlock);
// Ensure we read all remaining blocks for this entry.
Stream.Skip(BytesLeftToRead);
_amountRead += BytesLeftToRead;
// If the last block wasn't a full 512 bytes, skip the remaining padding bytes.
var bytesInLastBlock = _amountRead % 512;
if (bytesInLastBlock != 0)
{
Stream.Skip(512 - bytesInLastBlock);
}
}
base.Dispose(disposing);
}
private long BytesLeftToRead { get; set; }
@@ -46,18 +49,22 @@ namespace SharpCompress.Common.Tar
public override bool CanWrite => false;
public override void Flush()
{
throw new NotSupportedException();
}
public override long Length => throw new NotSupportedException();
public override long Position { get => throw new NotSupportedException(); set => throw new NotSupportedException(); }
public override async ValueTask<int> ReadAsync(Memory<byte> buffer, CancellationToken cancellationToken = default)
public override int Read(byte[] buffer, int offset, int count)
{
var count = buffer.Length;
if (BytesLeftToRead < buffer.Length)
if (BytesLeftToRead < count)
{
count = (int)BytesLeftToRead;
}
int read = await Stream.ReadAsync(buffer.Slice(0, count), cancellationToken);
int read = Stream.Read(buffer, offset, count);
if (read > 0)
{
BytesLeftToRead -= read;
@@ -66,9 +73,20 @@ namespace SharpCompress.Common.Tar
return read;
}
public override Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
public override int ReadByte()
{
throw new NotSupportedException();
if (BytesLeftToRead <= 0)
{
return -1;
}
int value = Stream.ReadByte();
if (value != -1)
{
--BytesLeftToRead;
++_amountRead;
}
return value;
}
public override long Seek(long offset, SeekOrigin origin)
@@ -80,5 +98,10 @@ namespace SharpCompress.Common.Tar
{
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException();
}
}
}

View File

@@ -1,5 +1,5 @@
using System.IO;
using System.Threading.Tasks;
using System;
using System.IO;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -33,10 +33,19 @@ namespace SharpCompress.Common
/// RarArchive is part of a multi-part archive.
/// </summary>
public virtual bool IsMultiVolume => true;
public ValueTask DisposeAsync()
protected virtual void Dispose(bool disposing)
{
return _actualStream.DisposeAsync();
if (disposing)
{
_actualStream.Dispose();
}
}
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
}
}

View File

@@ -1,6 +1,4 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers
{
@@ -11,29 +9,29 @@ namespace SharpCompress.Common.Zip.Headers
{
}
internal override async ValueTask Read(Stream stream, CancellationToken cancellationToken)
internal override void Read(BinaryReader reader)
{
VolumeNumber = await stream.ReadUInt16(cancellationToken);
FirstVolumeWithDirectory = await stream.ReadUInt16(cancellationToken);
TotalNumberOfEntriesInDisk = await stream.ReadUInt16(cancellationToken);
TotalNumberOfEntries = await stream.ReadUInt16(cancellationToken);
DirectorySize = await stream.ReadUInt32(cancellationToken);
DirectoryStartOffsetRelativeToDisk = await stream.ReadUInt32(cancellationToken);
CommentLength = await stream.ReadUInt16(cancellationToken);
Comment = await stream.ReadBytes(CommentLength ?? 0, cancellationToken);
VolumeNumber = reader.ReadUInt16();
FirstVolumeWithDirectory = reader.ReadUInt16();
TotalNumberOfEntriesInDisk = reader.ReadUInt16();
TotalNumberOfEntries = reader.ReadUInt16();
DirectorySize = reader.ReadUInt32();
DirectoryStartOffsetRelativeToDisk = reader.ReadUInt32();
CommentLength = reader.ReadUInt16();
Comment = reader.ReadBytes(CommentLength);
}
public ushort? VolumeNumber { get; private set; }
public ushort VolumeNumber { get; private set; }
public ushort? FirstVolumeWithDirectory { get; private set; }
public ushort FirstVolumeWithDirectory { get; private set; }
public ushort? TotalNumberOfEntriesInDisk { get; private set; }
public ushort TotalNumberOfEntriesInDisk { get; private set; }
public uint? DirectorySize { get; private set; }
public uint DirectorySize { get; private set; }
public uint? DirectoryStartOffsetRelativeToDisk { get; private set; }
public uint DirectoryStartOffsetRelativeToDisk { get; private set; }
public ushort? CommentLength { get; private set; }
public ushort CommentLength { get; private set; }
public byte[]? Comment { get; private set; }

View File

@@ -1,7 +1,5 @@
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers
{
@@ -12,28 +10,28 @@ namespace SharpCompress.Common.Zip.Headers
{
}
internal override async ValueTask Read(Stream stream, CancellationToken cancellationToken)
internal override void Read(BinaryReader reader)
{
Version = await stream.ReadUInt16(cancellationToken);
VersionNeededToExtract = await stream.ReadUInt16(cancellationToken);
Flags = (HeaderFlags)await stream.ReadUInt16(cancellationToken);
CompressionMethod = (ZipCompressionMethod)await stream.ReadUInt16(cancellationToken);
LastModifiedTime = await stream.ReadUInt16(cancellationToken);
LastModifiedDate = await stream.ReadUInt16(cancellationToken);
Crc = await stream.ReadUInt32(cancellationToken);
CompressedSize = await stream.ReadUInt32(cancellationToken);
UncompressedSize = await stream.ReadUInt32(cancellationToken);
ushort nameLength = await stream.ReadUInt16(cancellationToken);
ushort extraLength = await stream.ReadUInt16(cancellationToken);
ushort commentLength = await stream.ReadUInt16(cancellationToken);
DiskNumberStart = await stream.ReadUInt16(cancellationToken);
InternalFileAttributes = await stream.ReadUInt16(cancellationToken);
ExternalFileAttributes = await stream.ReadUInt32(cancellationToken);
RelativeOffsetOfEntryHeader = await stream.ReadUInt32(cancellationToken);
Version = reader.ReadUInt16();
VersionNeededToExtract = reader.ReadUInt16();
Flags = (HeaderFlags)reader.ReadUInt16();
CompressionMethod = (ZipCompressionMethod)reader.ReadUInt16();
LastModifiedTime = reader.ReadUInt16();
LastModifiedDate = reader.ReadUInt16();
Crc = reader.ReadUInt32();
CompressedSize = reader.ReadUInt32();
UncompressedSize = reader.ReadUInt32();
ushort nameLength = reader.ReadUInt16();
ushort extraLength = reader.ReadUInt16();
ushort commentLength = reader.ReadUInt16();
DiskNumberStart = reader.ReadUInt16();
InternalFileAttributes = reader.ReadUInt16();
ExternalFileAttributes = reader.ReadUInt32();
RelativeOffsetOfEntryHeader = reader.ReadUInt32();
byte[] name = await stream.ReadBytes(nameLength, cancellationToken);
byte[] extra = await stream.ReadBytes(extraLength, cancellationToken);
byte[] comment = await stream.ReadBytes(commentLength, cancellationToken);
byte[] name = reader.ReadBytes(nameLength);
byte[] extra = reader.ReadBytes(extraLength);
byte[] comment = reader.ReadBytes(commentLength);
// According to .ZIP File Format Specification
//

View File

@@ -1,6 +1,4 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers
{
@@ -11,9 +9,8 @@ namespace SharpCompress.Common.Zip.Headers
{
}
internal override ValueTask Read(Stream stream, CancellationToken cancellationToken)
internal override void Read(BinaryReader reader)
{
return new();
}
}
}

View File

@@ -1,7 +1,5 @@
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers
{
@@ -12,20 +10,20 @@ namespace SharpCompress.Common.Zip.Headers
{
}
internal override async ValueTask Read(Stream stream, CancellationToken cancellationToken)
internal override void Read(BinaryReader reader)
{
Version = await stream.ReadUInt16(cancellationToken);
Flags = (HeaderFlags)await stream.ReadUInt16(cancellationToken);
CompressionMethod = (ZipCompressionMethod)await stream.ReadUInt16(cancellationToken);
LastModifiedTime = await stream.ReadUInt16(cancellationToken);
LastModifiedDate = await stream.ReadUInt16(cancellationToken);
Crc = await stream.ReadUInt32(cancellationToken);
CompressedSize = await stream.ReadUInt32(cancellationToken);
UncompressedSize = await stream.ReadUInt32(cancellationToken);
ushort nameLength = await stream.ReadUInt16(cancellationToken);
ushort extraLength = await stream.ReadUInt16(cancellationToken);
byte[] name = await stream.ReadBytes(nameLength, cancellationToken);
byte[] extra = await stream.ReadBytes(extraLength, cancellationToken);
Version = reader.ReadUInt16();
Flags = (HeaderFlags)reader.ReadUInt16();
CompressionMethod = (ZipCompressionMethod)reader.ReadUInt16();
LastModifiedTime = reader.ReadUInt16();
LastModifiedDate = reader.ReadUInt16();
Crc = reader.ReadUInt32();
CompressedSize = reader.ReadUInt32();
UncompressedSize = reader.ReadUInt32();
ushort nameLength = reader.ReadUInt16();
ushort extraLength = reader.ReadUInt16();
byte[] name = reader.ReadBytes(nameLength);
byte[] extra = reader.ReadBytes(extraLength);
// According to .ZIP File Format Specification
//

View File

@@ -1,7 +1,5 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers
{
@@ -12,7 +10,7 @@ namespace SharpCompress.Common.Zip.Headers
{
}
internal override ValueTask Read(Stream stream, CancellationToken cancellationToken)
internal override void Read(BinaryReader reader)
{
throw new NotImplementedException();
}

View File

@@ -1,6 +1,4 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers
{
@@ -11,18 +9,18 @@ namespace SharpCompress.Common.Zip.Headers
{
}
internal override async ValueTask Read(Stream stream, CancellationToken cancellationToken)
internal override void Read(BinaryReader reader)
{
SizeOfDirectoryEndRecord = (long)await stream.ReadUInt64(cancellationToken);
VersionMadeBy = await stream.ReadUInt16(cancellationToken);
VersionNeededToExtract = await stream.ReadUInt16(cancellationToken);
VolumeNumber = await stream.ReadUInt32(cancellationToken);
FirstVolumeWithDirectory = await stream.ReadUInt32(cancellationToken);
TotalNumberOfEntriesInDisk = (long)await stream.ReadUInt64(cancellationToken);
TotalNumberOfEntries = (long)await stream.ReadUInt64(cancellationToken);
DirectorySize = (long)await stream.ReadUInt64(cancellationToken);
DirectoryStartOffsetRelativeToDisk = (long)await stream.ReadUInt64(cancellationToken);
DataSector = await stream.ReadBytes((int)(SizeOfDirectoryEndRecord - SIZE_OF_FIXED_HEADER_DATA_EXCEPT_SIGNATURE_AND_SIZE_FIELDS), cancellationToken);
SizeOfDirectoryEndRecord = (long)reader.ReadUInt64();
VersionMadeBy = reader.ReadUInt16();
VersionNeededToExtract = reader.ReadUInt16();
VolumeNumber = reader.ReadUInt32();
FirstVolumeWithDirectory = reader.ReadUInt32();
TotalNumberOfEntriesInDisk = (long)reader.ReadUInt64();
TotalNumberOfEntries = (long)reader.ReadUInt64();
DirectorySize = (long)reader.ReadUInt64();
DirectoryStartOffsetRelativeToDisk = (long)reader.ReadUInt64();
DataSector = reader.ReadBytes((int)(SizeOfDirectoryEndRecord - SIZE_OF_FIXED_HEADER_DATA_EXCEPT_SIGNATURE_AND_SIZE_FIELDS));
}
private const int SIZE_OF_FIXED_HEADER_DATA_EXCEPT_SIGNATURE_AND_SIZE_FIELDS = 44;

View File

@@ -1,6 +1,4 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers
{
@@ -11,11 +9,11 @@ namespace SharpCompress.Common.Zip.Headers
{
}
internal override async ValueTask Read(Stream stream, CancellationToken cancellationToken)
internal override void Read(BinaryReader reader)
{
FirstVolumeWithDirectory = await stream.ReadUInt32(cancellationToken);
RelativeOffsetOfTheEndOfDirectoryRecord = (long)await stream.ReadUInt64(cancellationToken);
TotalNumberOfVolumes = await stream.ReadUInt32(cancellationToken);
FirstVolumeWithDirectory = reader.ReadUInt32();
RelativeOffsetOfTheEndOfDirectoryRecord = (long)reader.ReadUInt64();
TotalNumberOfVolumes = reader.ReadUInt32();
}
public uint FirstVolumeWithDirectory { get; private set; }

View File

@@ -1,6 +1,4 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers
{
@@ -14,7 +12,7 @@ namespace SharpCompress.Common.Zip.Headers
internal ZipHeaderType ZipHeaderType { get; }
internal abstract ValueTask Read(Stream stream, CancellationToken cancellationToken);
internal abstract void Read(BinaryReader reader);
internal bool HasData { get; set; }
}

View File

@@ -1,6 +1,4 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
@@ -19,22 +17,22 @@ namespace SharpCompress.Common.Zip
_directoryEntryHeader = header;
}
internal override async ValueTask<Stream> GetCompressedStreamAsync(CancellationToken cancellationToken)
internal override Stream GetCompressedStream()
{
if (!_isLocalHeaderLoaded)
{
await LoadLocalHeader(cancellationToken);
LoadLocalHeader();
_isLocalHeaderLoaded = true;
}
return await base.GetCompressedStreamAsync(cancellationToken);
return base.GetCompressedStream();
}
internal string? Comment => ((DirectoryEntryHeader)Header).Comment;
private async ValueTask LoadLocalHeader(CancellationToken cancellationToken)
private void LoadLocalHeader()
{
bool hasData = Header.HasData;
Header = await _headerFactory.GetLocalHeader(BaseStream, (DirectoryEntryHeader)Header, cancellationToken);
Header = _headerFactory.GetLocalHeader(BaseStream, ((DirectoryEntryHeader)Header));
Header.HasData = hasData;
}

View File

@@ -1,12 +1,7 @@
using System;
using System.Buffers;
using System.Collections.Generic;
using System.IO;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors.Xz;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip
@@ -24,13 +19,15 @@ namespace SharpCompress.Common.Zip
{
}
internal async IAsyncEnumerable<ZipHeader> ReadSeekableHeader(Stream stream, [EnumeratorCancellation]CancellationToken cancellationToken)
internal IEnumerable<ZipHeader> ReadSeekableHeader(Stream stream)
{
await SeekBackToHeaderAsync(stream);
var reader = new BinaryReader(stream);
SeekBackToHeader(stream, reader);
var eocd_location = stream.Position;
var entry = new DirectoryEndHeader();
await entry.Read(stream, cancellationToken);
entry.Read(reader);
if (entry.IsZip64)
{
@@ -38,37 +35,37 @@ namespace SharpCompress.Common.Zip
// ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR should be before the EOCD
stream.Seek(eocd_location - ZIP64_EOCD_LENGTH - 4, SeekOrigin.Begin);
uint zip64_locator = await stream.ReadUInt32(cancellationToken);
uint zip64_locator = reader.ReadUInt32();
if( zip64_locator != ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR )
{
throw new ArchiveException("Failed to locate the Zip64 Directory Locator");
}
var zip64Locator = new Zip64DirectoryEndLocatorHeader();
await zip64Locator.Read(stream, cancellationToken);
zip64Locator.Read(reader);
stream.Seek(zip64Locator.RelativeOffsetOfTheEndOfDirectoryRecord, SeekOrigin.Begin);
uint zip64Signature = await stream.ReadUInt32(cancellationToken);
uint zip64Signature = reader.ReadUInt32();
if (zip64Signature != ZIP64_END_OF_CENTRAL_DIRECTORY)
{
throw new ArchiveException("Failed to locate the Zip64 Header");
}
var zip64Entry = new Zip64DirectoryEndHeader();
await zip64Entry.Read(stream, cancellationToken);
zip64Entry.Read(reader);
stream.Seek(zip64Entry.DirectoryStartOffsetRelativeToDisk, SeekOrigin.Begin);
}
else
{
stream.Seek(entry.DirectoryStartOffsetRelativeToDisk ?? 0, SeekOrigin.Begin);
stream.Seek(entry.DirectoryStartOffsetRelativeToDisk, SeekOrigin.Begin);
}
long position = stream.Position;
while (true)
{
stream.Position = position;
uint signature = await stream.ReadUInt32(cancellationToken);
var nextHeader = await ReadHeader(signature, stream, cancellationToken, _zip64);
uint signature = reader.ReadUInt32();
var nextHeader = ReadHeader(signature, reader, _zip64);
position = stream.Position;
if (nextHeader is null)
@@ -89,7 +86,7 @@ namespace SharpCompress.Common.Zip
}
}
private static bool IsMatch (Span<byte> haystack, int position, byte[] needle)
private static bool IsMatch( byte[] haystack, int position, byte[] needle)
{
for( int i = 0; i < needle.Length; i++ )
{
@@ -101,7 +98,7 @@ namespace SharpCompress.Common.Zip
return true;
}
private static async ValueTask SeekBackToHeaderAsync(Stream stream)
private static void SeekBackToHeader(Stream stream, BinaryReader reader)
{
// Minimum EOCD length
if (stream.Length < MINIMUM_EOCD_LENGTH)
@@ -115,18 +112,16 @@ namespace SharpCompress.Common.Zip
stream.Seek(-len, SeekOrigin.End);
using var rented = MemoryPool<byte>.Shared.Rent(len);
var buffer = rented.Memory.Slice(0, len);
await stream.ReadAsync(buffer);
byte[] seek = reader.ReadBytes(len);
// Search in reverse
buffer.Span.Reverse();
Array.Reverse(seek);
var max_search_area = len - MINIMUM_EOCD_LENGTH;
for( int pos_from_end = 0; pos_from_end < max_search_area; ++pos_from_end)
{
if( IsMatch( buffer.Span, pos_from_end, needle) )
if( IsMatch(seek, pos_from_end, needle) )
{
stream.Seek(-pos_from_end, SeekOrigin.End);
return;
@@ -136,11 +131,12 @@ namespace SharpCompress.Common.Zip
throw new ArchiveException("Failed to locate the Zip Header");
}
internal async ValueTask<LocalEntryHeader> GetLocalHeader(Stream stream, DirectoryEntryHeader directoryEntryHeader, CancellationToken cancellationToken)
internal LocalEntryHeader GetLocalHeader(Stream stream, DirectoryEntryHeader directoryEntryHeader)
{
stream.Seek(directoryEntryHeader.RelativeOffsetOfEntryHeader, SeekOrigin.Begin);
uint signature = await stream.ReadUInt32(cancellationToken);
var localEntryHeader = await ReadHeader(signature, stream, cancellationToken, _zip64) as LocalEntryHeader;
BinaryReader reader = new BinaryReader(stream);
uint signature = reader.ReadUInt32();
var localEntryHeader = ReadHeader(signature, reader, _zip64) as LocalEntryHeader;
if (localEntryHeader is null)
{
throw new InvalidOperationException();

View File

@@ -1,6 +1,4 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors.Deflate;
using SharpCompress.IO;
@@ -21,13 +19,13 @@ namespace SharpCompress.Common.Zip
return Header.PackedStream;
}
internal override async ValueTask<Stream> GetCompressedStreamAsync(CancellationToken cancellationToken)
internal override Stream GetCompressedStream()
{
if (!Header.HasData)
{
return Stream.Null;
}
_decompressionStream = await CreateDecompressionStream(GetCryptoStream(CreateBaseStream()), Header.CompressionMethod, cancellationToken);
_decompressionStream = CreateDecompressionStream(GetCryptoStream(CreateBaseStream()), Header.CompressionMethod);
if (LeaveStreamOpen)
{
return new NonDisposingStream(_decompressionStream);
@@ -35,17 +33,17 @@ namespace SharpCompress.Common.Zip
return _decompressionStream;
}
internal async ValueTask FixStreamedFileLocation(RewindableStream rewindableStream, CancellationToken cancellationToken)
internal BinaryReader FixStreamedFileLocation(ref RewindableStream rewindableStream)
{
if (Header.IsDirectory)
{
return;
return new BinaryReader(rewindableStream);
}
if (Header.HasData && !Skipped)
{
_decompressionStream ??= await GetCompressedStreamAsync(cancellationToken);
_decompressionStream ??= GetCompressedStream();
await _decompressionStream.SkipAsync(cancellationToken);
_decompressionStream.Skip();
if (_decompressionStream is DeflateStream deflateStream)
{
@@ -53,7 +51,9 @@ namespace SharpCompress.Common.Zip
}
Skipped = true;
}
var reader = new BinaryReader(rewindableStream);
_decompressionStream = null;
return reader;
}
}
}

View File

@@ -1,6 +1,5 @@
using System.Collections.Generic;
using System.Runtime.CompilerServices;
using System.Threading;
using System.IO;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
@@ -13,36 +12,43 @@ namespace SharpCompress.Common.Zip
{
}
internal async IAsyncEnumerable<ZipHeader> ReadStreamHeader(RewindableStream rewindableStream, [EnumeratorCancellation] CancellationToken cancellationToken)
internal IEnumerable<ZipHeader> ReadStreamHeader(Stream stream)
{
RewindableStream rewindableStream;
if (stream is RewindableStream rs)
{
rewindableStream = rs;
}
else
{
rewindableStream = new RewindableStream(stream);
}
while (true)
{
ZipHeader? header;
BinaryReader reader = new BinaryReader(rewindableStream);
if (_lastEntryHeader != null &&
(FlagUtility.HasFlag(_lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor) || _lastEntryHeader.IsZip64))
{
await ((StreamingZipFilePart)_lastEntryHeader.Part).FixStreamedFileLocation(rewindableStream, cancellationToken);
reader = ((StreamingZipFilePart)_lastEntryHeader.Part).FixStreamedFileLocation(ref rewindableStream);
long? pos = rewindableStream.CanSeek ? (long?)rewindableStream.Position : null;
uint crc = await rewindableStream.ReadUInt32(cancellationToken);
uint crc = reader.ReadUInt32();
if (crc == POST_DATA_DESCRIPTOR)
{
crc = await rewindableStream.ReadUInt32(cancellationToken);
crc = reader.ReadUInt32();
}
_lastEntryHeader.Crc = crc;
_lastEntryHeader.CompressedSize = await rewindableStream.ReadUInt32(cancellationToken);
_lastEntryHeader.UncompressedSize = await rewindableStream.ReadUInt32(cancellationToken);
_lastEntryHeader.CompressedSize = reader.ReadUInt32();
_lastEntryHeader.UncompressedSize = reader.ReadUInt32();
if (pos.HasValue)
{
_lastEntryHeader.DataStartPosition = pos - _lastEntryHeader.CompressedSize;
}
}
_lastEntryHeader = null;
var headerBytes = await rewindableStream.ReadUInt32OrNull(cancellationToken);
if (headerBytes is null)
{
yield break;
}
header = await ReadHeader(headerBytes.Value, rewindableStream, cancellationToken);
uint headerBytes = reader.ReadUInt32();
header = ReadHeader(headerBytes, reader);
if (header is null)
{
yield break;
@@ -65,10 +71,10 @@ namespace SharpCompress.Common.Zip
{
rewindableStream.StartRecording();
}
uint nextHeaderBytes = await rewindableStream.ReadUInt32(cancellationToken);
uint nextHeaderBytes = reader.ReadUInt32();
// Check if next data is PostDataDescriptor, streamed file with 0 length
header.HasData = nextHeaderBytes != POST_DATA_DESCRIPTOR;
header.HasData = !IsHeader(nextHeaderBytes);
rewindableStream.Rewind(!isRecording);
}
else // We are not streaming and compressed size is 0, we have no data

View File

@@ -2,15 +2,13 @@
using System.Buffers.Binary;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.BZip2;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Compressors.Deflate64;
using SharpCompress.Compressors.LZMA;
//using SharpCompress.Compressors.PPMd;
using SharpCompress.Compressors.PPMd;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip
@@ -30,13 +28,13 @@ namespace SharpCompress.Common.Zip
internal override string FilePartName => Header.Name;
internal override async ValueTask<Stream> GetCompressedStreamAsync(CancellationToken cancellationToken)
internal override Stream GetCompressedStream()
{
if (!Header.HasData)
{
return Stream.Null;
}
Stream decompressionStream = await CreateDecompressionStream(GetCryptoStream(CreateBaseStream()), Header.CompressionMethod, cancellationToken);
Stream decompressionStream = CreateDecompressionStream(GetCryptoStream(CreateBaseStream()), Header.CompressionMethod);
if (LeaveStreamOpen)
{
return new NonDisposingStream(decompressionStream);
@@ -57,7 +55,7 @@ namespace SharpCompress.Common.Zip
protected bool LeaveStreamOpen => FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor) || Header.IsZip64;
protected async ValueTask<Stream> CreateDecompressionStream(Stream stream, ZipCompressionMethod method, CancellationToken cancellationToken)
protected Stream CreateDecompressionStream(Stream stream, ZipCompressionMethod method)
{
switch (method)
{
@@ -75,29 +73,30 @@ namespace SharpCompress.Common.Zip
}
case ZipCompressionMethod.BZip2:
{
return await BZip2Stream.CreateAsync(stream, CompressionMode.Decompress, false, cancellationToken);
}
return new BZip2Stream(stream, CompressionMode.Decompress, false);
}
case ZipCompressionMethod.LZMA:
{
if (FlagUtility.HasFlag(Header.Flags, HeaderFlags.Encrypted))
{
throw new NotSupportedException("LZMA with pkware encryption.");
}
await stream.ReadUInt16(cancellationToken); //LZMA version
var props = new byte[await stream.ReadUInt16(cancellationToken)];
await stream.ReadAsync(props, 0, props.Length, cancellationToken);
return await LzmaStream.CreateAsync(props, stream,
var reader = new BinaryReader(stream);
reader.ReadUInt16(); //LZMA version
var props = new byte[reader.ReadUInt16()];
reader.Read(props, 0, props.Length);
return new LzmaStream(props, stream,
Header.CompressedSize > 0 ? Header.CompressedSize - 4 - props.Length : -1,
FlagUtility.HasFlag(Header.Flags, HeaderFlags.Bit1)
? -1
: (long)Header.UncompressedSize,
cancellationToken: cancellationToken);
: (long)Header.UncompressedSize);
}
/* case ZipCompressionMethod.PPMd:
{
var props = await stream.ReadBytes(2, cancellationToken);
case ZipCompressionMethod.PPMd:
{
Span<byte> props = stackalloc byte[2];
stream.ReadFully(props);
return new PpmdStream(new PpmdProperties(props), stream, false);
} */
}
case ZipCompressionMethod.WinzipAes:
{
ExtraData? data = Header.Extra.SingleOrDefault(x => x.Type == ExtraDataType.WinZipAes);
@@ -121,7 +120,7 @@ namespace SharpCompress.Common.Zip
{
throw new InvalidFormatException("Unexpected vendor ID for WinZip AES metadata");
}
return await CreateDecompressionStream(stream, (ZipCompressionMethod)BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes.AsSpan(5)), cancellationToken);
return CreateDecompressionStream(stream, (ZipCompressionMethod)BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes.AsSpan(5)));
}
default:
{

View File

@@ -1,8 +1,6 @@
using System;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
@@ -32,15 +30,15 @@ namespace SharpCompress.Common.Zip
this._archiveEncoding = archiveEncoding;
}
protected async ValueTask<ZipHeader?> ReadHeader(uint headerBytes, Stream stream, CancellationToken cancellationToken, bool zip64 = false)
protected ZipHeader? ReadHeader(uint headerBytes, BinaryReader reader, bool zip64 = false)
{
switch (headerBytes)
{
case ENTRY_HEADER_BYTES:
{
var entryHeader = new LocalEntryHeader(_archiveEncoding);
await entryHeader.Read(stream, cancellationToken);
await LoadHeader(entryHeader, stream, cancellationToken);
entryHeader.Read(reader);
LoadHeader(entryHeader, reader.BaseStream);
_lastEntryHeader = entryHeader;
return entryHeader;
@@ -48,20 +46,20 @@ namespace SharpCompress.Common.Zip
case DIRECTORY_START_HEADER_BYTES:
{
var entry = new DirectoryEntryHeader(_archiveEncoding);
await entry.Read(stream, cancellationToken);
entry.Read(reader);
return entry;
}
case POST_DATA_DESCRIPTOR:
{
if (FlagUtility.HasFlag(_lastEntryHeader!.Flags, HeaderFlags.UsePostDataDescriptor))
{
_lastEntryHeader.Crc = await stream.ReadUInt32(cancellationToken);
_lastEntryHeader.CompressedSize = zip64 ? (long)await stream.ReadUInt64(cancellationToken) : await stream.ReadUInt32(cancellationToken);
_lastEntryHeader.UncompressedSize = zip64 ? (long)await stream.ReadUInt64(cancellationToken) : await stream.ReadUInt32(cancellationToken);
_lastEntryHeader.Crc = reader.ReadUInt32();
_lastEntryHeader.CompressedSize = zip64 ? (long)reader.ReadUInt64() : reader.ReadUInt32();
_lastEntryHeader.UncompressedSize = zip64 ? (long)reader.ReadUInt64() : reader.ReadUInt32();
}
else
{
await stream.ReadBytes(zip64 ? 20 : 12, cancellationToken);
reader.ReadBytes(zip64 ? 20 : 12);
}
return null;
}
@@ -70,7 +68,7 @@ namespace SharpCompress.Common.Zip
case DIRECTORY_END_HEADER_BYTES:
{
var entry = new DirectoryEndHeader();
await entry.Read(stream, cancellationToken);
entry.Read(reader);
return entry;
}
case SPLIT_ARCHIVE_HEADER_BYTES:
@@ -80,13 +78,13 @@ namespace SharpCompress.Common.Zip
case ZIP64_END_OF_CENTRAL_DIRECTORY:
{
var entry = new Zip64DirectoryEndHeader();
await entry.Read(stream, cancellationToken);
entry.Read(reader);
return entry;
}
case ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR:
{
var entry = new Zip64DirectoryEndLocatorHeader();
await entry.Read(stream, cancellationToken);
entry.Read(reader);
return entry;
}
default:
@@ -112,7 +110,7 @@ namespace SharpCompress.Common.Zip
}
}
private async ValueTask LoadHeader(ZipFileEntry entryHeader, Stream stream, CancellationToken cancellationToken)
private void LoadHeader(ZipFileEntry entryHeader, Stream stream)
{
if (FlagUtility.HasFlag(entryHeader.Flags, HeaderFlags.Encrypted))
{
@@ -136,8 +134,10 @@ namespace SharpCompress.Common.Zip
{
var keySize = (WinzipAesKeySize)data.DataBytes[4];
var salt = await stream.ReadBytes(WinzipAesEncryptionData.KeyLengthInBytes(keySize) / 2, cancellationToken);
var passwordVerifyValue = await stream.ReadBytes(2, cancellationToken);
var salt = new byte[WinzipAesEncryptionData.KeyLengthInBytes(keySize) / 2];
var passwordVerifyValue = new byte[2];
stream.Read(salt, 0, salt.Length);
stream.Read(passwordVerifyValue, 0, 2);
entryHeader.WinzipAesEncryptionData =
new WinzipAesEncryptionData(keySize, salt, passwordVerifyValue, _password);

View File

@@ -1,13 +1,9 @@
using System;
using System.Buffers;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Compressors.BZip2
{
public sealed class BZip2Stream : AsyncStream
public sealed class BZip2Stream : Stream
{
private readonly Stream stream;
private bool isDisposed;
@@ -37,14 +33,17 @@ namespace SharpCompress.Compressors.BZip2
(stream as CBZip2OutputStream)?.Finish();
}
public override async ValueTask DisposeAsync()
protected override void Dispose(bool disposing)
{
if (isDisposed)
{
return;
}
isDisposed = true;
await stream.DisposeAsync();
if (disposing)
{
stream.Dispose();
}
}
public CompressionMode Mode { get; }
@@ -55,18 +54,23 @@ namespace SharpCompress.Compressors.BZip2
public override bool CanWrite => stream.CanWrite;
public override Task FlushAsync(CancellationToken cancellationToken)
public override void Flush()
{
return stream.FlushAsync(cancellationToken);
stream.Flush();
}
public override long Length => stream.Length;
public override long Position { get => stream.Position; set => stream.Position = value; }
public override ValueTask<int> ReadAsync(Memory<byte> buffer, CancellationToken cancellationToken = default)
public override int Read(byte[] buffer, int offset, int count)
{
return stream.ReadAsync(buffer, cancellationToken);
return stream.Read(buffer, offset, count);
}
public override int ReadByte()
{
return stream.ReadByte();
}
public override long Seek(long offset, SeekOrigin origin)
@@ -79,14 +83,28 @@ namespace SharpCompress.Compressors.BZip2
stream.SetLength(value);
}
public override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
#if !NET461 && !NETSTANDARD2_0
public override int Read(Span<byte> buffer)
{
return stream.WriteAsync(buffer, offset, count, cancellationToken);
return stream.Read(buffer);
}
public override ValueTask WriteAsync(ReadOnlyMemory<byte> buffer, CancellationToken cancellationToken = new CancellationToken())
public override void Write(ReadOnlySpan<byte> buffer)
{
return stream.WriteAsync(buffer, cancellationToken);
stream.Write(buffer);
}
#endif
public override void Write(byte[] buffer, int offset, int count)
{
stream.Write(buffer, offset, count);
}
public override void WriteByte(byte value)
{
stream.WriteByte(value);
}
/// <summary>
@@ -94,12 +112,11 @@ namespace SharpCompress.Compressors.BZip2
/// </summary>
/// <param name="stream"></param>
/// <returns></returns>
public static async ValueTask<bool> IsBZip2Async(Stream stream, CancellationToken cancellationToken)
public static bool IsBZip2(Stream stream)
{
using var rented = MemoryPool<byte>.Shared.Rent(2);
var chars = rented.Memory.Slice(0, 2);
await stream.ReadAsync(chars, cancellationToken);
if (chars.Length < 2 || chars.Span[0] != 'B' || chars.Span[1] != 'Z')
BinaryReader br = new BinaryReader(stream);
byte[] chars = br.ReadBytes(2);
if (chars.Length < 2 || chars[0] != 'B' || chars[1] != 'Z')
{
return false;
}

View File

@@ -27,13 +27,10 @@
using System;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Compressors.Deflate
{
public class DeflateStream : AsyncStream
public class DeflateStream : Stream
{
private readonly ZlibBaseStream _baseStream;
private bool _disposed;
@@ -219,25 +216,35 @@ namespace SharpCompress.Compressors.Deflate
/// <remarks>
/// This may or may not result in a <c>Close()</c> call on the captive stream.
/// </remarks>
public override async ValueTask DisposeAsync()
protected override void Dispose(bool disposing)
{
if (!_disposed)
try
{
await _baseStream.DisposeAsync();
_disposed = true;
if (!_disposed)
{
if (disposing)
{
_baseStream?.Dispose();
}
_disposed = true;
}
}
finally
{
base.Dispose(disposing);
}
}
/// <summary>
/// Flush the stream.
/// </summary>
public override async Task FlushAsync(CancellationToken cancellationToken)
public override void Flush()
{
if (_disposed)
{
throw new ObjectDisposedException("DeflateStream");
}
await _baseStream.FlushAsync(cancellationToken);
_baseStream.Flush();
}
/// <summary>
@@ -266,14 +273,24 @@ namespace SharpCompress.Compressors.Deflate
/// <param name="offset">the offset within that data array to put the first byte read.</param>
/// <param name="count">the number of bytes to read.</param>
/// <returns>the number of bytes actually read</returns>
public override async Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
public override int Read(byte[] buffer, int offset, int count)
{
if (_disposed)
{
throw new ObjectDisposedException("DeflateStream");
}
return await _baseStream.ReadAsync(buffer, offset, count, cancellationToken);
return _baseStream.Read(buffer, offset, count);
}
public override int ReadByte()
{
if (_disposed)
{
throw new ObjectDisposedException("DeflateStream");
}
return _baseStream.ReadByte();
}
/// <summary>
/// Calling this method always throws a <see cref="NotImplementedException"/>.
/// </summary>
@@ -323,13 +340,22 @@ namespace SharpCompress.Compressors.Deflate
/// <param name="buffer">The buffer holding data to write to the stream.</param>
/// <param name="offset">the offset within that data array to find the first byte to write.</param>
/// <param name="count">the number of bytes to write.</param>
public override async Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
public override void Write(byte[] buffer, int offset, int count)
{
if (_disposed)
{
throw new ObjectDisposedException("DeflateStream");
}
await _baseStream.WriteAsync(buffer, offset, count, cancellationToken);
_baseStream.Write(buffer, offset, count);
}
public override void WriteByte(byte value)
{
if (_disposed)
{
throw new ObjectDisposedException("DeflateStream");
}
_baseStream.WriteByte(value);
}
#endregion

View File

@@ -27,25 +27,21 @@
// ------------------------------------------------------------------
using System;
using System.Buffers;
using System.Buffers.Binary;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Compressors.Deflate
{
public class GZipStream : AsyncStream
public class GZipStream : Stream
{
private static readonly DateTime UNIX_EPOCH = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
internal static readonly DateTime UNIX_EPOCH = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
private string? _comment;
private string? _fileName;
private DateTime? _lastModified;
private readonly ZlibBaseStream _baseStream;
internal ZlibBaseStream BaseStream;
private bool _disposed;
private bool _firstReadDone;
private int _headerByteCount;
@@ -64,7 +60,7 @@ namespace SharpCompress.Compressors.Deflate
public GZipStream(Stream stream, CompressionMode mode, CompressionLevel level, Encoding encoding)
{
_baseStream = new ZlibBaseStream(stream, mode, level, ZlibStreamFlavor.GZIP, encoding);
BaseStream = new ZlibBaseStream(stream, mode, level, ZlibStreamFlavor.GZIP, encoding);
_encoding = encoding;
}
@@ -72,27 +68,27 @@ namespace SharpCompress.Compressors.Deflate
public virtual FlushType FlushMode
{
get => (_baseStream._flushMode);
get => (BaseStream._flushMode);
set
{
if (_disposed)
{
throw new ObjectDisposedException("GZipStream");
}
_baseStream._flushMode = value;
BaseStream._flushMode = value;
}
}
public int BufferSize
{
get => _baseStream._bufferSize;
get => BaseStream._bufferSize;
set
{
if (_disposed)
{
throw new ObjectDisposedException("GZipStream");
}
if (_baseStream._workingBuffer != null)
if (BaseStream._workingBuffer != null)
{
throw new ZlibException("The working buffer is already set.");
}
@@ -102,13 +98,13 @@ namespace SharpCompress.Compressors.Deflate
String.Format("Don't be silly. {0} bytes?? Use a bigger buffer, at least {1}.", value,
ZlibConstants.WorkingBufferSizeMin));
}
_baseStream._bufferSize = value;
BaseStream._bufferSize = value;
}
}
internal virtual long TotalIn => _baseStream._z.TotalBytesIn;
internal virtual long TotalIn => BaseStream._z.TotalBytesIn;
internal virtual long TotalOut => _baseStream._z.TotalBytesOut;
internal virtual long TotalOut => BaseStream._z.TotalBytesOut;
#endregion
@@ -128,7 +124,7 @@ namespace SharpCompress.Compressors.Deflate
{
throw new ObjectDisposedException("GZipStream");
}
return _baseStream._stream.CanRead;
return BaseStream._stream.CanRead;
}
}
@@ -154,7 +150,7 @@ namespace SharpCompress.Compressors.Deflate
{
throw new ObjectDisposedException("GZipStream");
}
return _baseStream._stream.CanWrite;
return BaseStream._stream.CanWrite;
}
}
@@ -178,13 +174,13 @@ namespace SharpCompress.Compressors.Deflate
{
get
{
if (_baseStream._streamMode == ZlibBaseStream.StreamMode.Writer)
if (BaseStream._streamMode == ZlibBaseStream.StreamMode.Writer)
{
return _baseStream._z.TotalBytesOut + _headerByteCount;
return BaseStream._z.TotalBytesOut + _headerByteCount;
}
if (_baseStream._streamMode == ZlibBaseStream.StreamMode.Reader)
if (BaseStream._streamMode == ZlibBaseStream.StreamMode.Reader)
{
return _baseStream._z.TotalBytesIn + _baseStream._gzipHeaderByteCount;
return BaseStream._z.TotalBytesIn + BaseStream._gzipHeaderByteCount;
}
return 0;
}
@@ -198,29 +194,36 @@ namespace SharpCompress.Compressors.Deflate
/// <remarks>
/// This may or may not result in a <c>Close()</c> call on the captive stream.
/// </remarks>
public override async ValueTask DisposeAsync()
protected override void Dispose(bool disposing)
{
try
{
if (!_disposed)
{
if (_baseStream is not null)
if (disposing && (BaseStream != null))
{
await _baseStream.DisposeAsync();
Crc32 = _baseStream.Crc32;
BaseStream.Dispose();
Crc32 = BaseStream.Crc32;
}
_disposed = true;
}
}
finally
{
base.Dispose(disposing);
}
}
/// <summary>
/// Flush the stream.
/// </summary>
public override Task FlushAsync(CancellationToken cancellationToken)
public override void Flush()
{
if (_disposed)
{
throw new ObjectDisposedException("GZipStream");
}
return _baseStream.FlushAsync(cancellationToken);
BaseStream.Flush();
}
/// <summary>
@@ -254,13 +257,13 @@ namespace SharpCompress.Compressors.Deflate
/// <param name="offset">the offset within that data array to put the first byte read.</param>
/// <param name="count">the number of bytes to read.</param>
/// <returns>the number of bytes actually read</returns>
public override async Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
public override int Read(byte[] buffer, int offset, int count)
{
if (_disposed)
{
throw new ObjectDisposedException("GZipStream");
}
int n = await _baseStream.ReadAsync(buffer, offset, count, cancellationToken);
int n = BaseStream.Read(buffer, offset, count);
// Console.WriteLine("GZipStream::Read(buffer, off({0}), c({1}) = {2}", offset, count, n);
// Console.WriteLine( Util.FormatByteArray(buffer, offset, n) );
@@ -268,9 +271,9 @@ namespace SharpCompress.Compressors.Deflate
if (!_firstReadDone)
{
_firstReadDone = true;
FileName = _baseStream._GzipFileName;
Comment = _baseStream._GzipComment;
LastModified = _baseStream._GzipMtime;
FileName = BaseStream._GzipFileName;
Comment = BaseStream._GzipComment;
LastModified = BaseStream._GzipMtime;
}
return n;
}
@@ -317,19 +320,19 @@ namespace SharpCompress.Compressors.Deflate
/// <param name="buffer">The buffer holding data to write to the stream.</param>
/// <param name="offset">the offset within that data array to find the first byte to write.</param>
/// <param name="count">the number of bytes to write.</param>
public override async Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
public override void Write(byte[] buffer, int offset, int count)
{
if (_disposed)
{
throw new ObjectDisposedException("GZipStream");
}
if (_baseStream._streamMode == ZlibBaseStream.StreamMode.Undefined)
if (BaseStream._streamMode == ZlibBaseStream.StreamMode.Undefined)
{
//Console.WriteLine("GZipStream: First write");
if (_baseStream._wantCompress)
if (BaseStream._wantCompress)
{
// first write in compression, therefore, emit the GZIP header
_headerByteCount = await EmitHeaderAsync();
_headerByteCount = EmitHeader();
}
else
{
@@ -337,7 +340,7 @@ namespace SharpCompress.Compressors.Deflate
}
}
await _baseStream.WriteAsync(buffer, offset, count, cancellationToken);
BaseStream.Write(buffer, offset, count);
}
#endregion Stream methods
@@ -402,7 +405,7 @@ namespace SharpCompress.Compressors.Deflate
public int Crc32 { get; private set; }
private async ValueTask<int> EmitHeaderAsync()
private int EmitHeader()
{
byte[]? commentBytes = (Comment is null) ? null
: _encoding.GetBytes(Comment);
@@ -471,7 +474,7 @@ namespace SharpCompress.Compressors.Deflate
header[i++] = 0; // terminate
}
await _baseStream._stream.WriteAsync(header, 0, header.Length);
BaseStream._stream.Write(header, 0, header.Length);
return header.Length; // bytes written
}

View File

@@ -27,15 +27,11 @@
// ------------------------------------------------------------------
using System;
using System.Buffers;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Tar.Headers;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Compressors.Deflate
{
@@ -46,18 +42,18 @@ namespace SharpCompress.Compressors.Deflate
GZIP = 1952
}
internal class ZlibBaseStream : AsyncStream
internal class ZlibBaseStream : Stream
{
protected internal ZlibCodec _z; // deferred init... new ZlibCodec();
protected internal StreamMode _streamMode = StreamMode.Undefined;
protected internal FlushType _flushMode;
private readonly ZlibStreamFlavor _flavor;
private readonly CompressionMode _compressionMode;
private readonly CompressionLevel _level;
protected internal ZlibStreamFlavor _flavor;
protected internal CompressionMode _compressionMode;
protected internal CompressionLevel _level;
protected internal byte[] _workingBuffer;
protected internal int _bufferSize = ZlibConstants.WorkingBufferSizeDefault;
private readonly byte[] _buf1 = new byte[1];
protected internal byte[] _buf1 = new byte[1];
protected internal Stream _stream;
protected internal CompressionStrategy Strategy = CompressionStrategy.Default;
@@ -120,13 +116,19 @@ namespace SharpCompress.Compressors.Deflate
}
}
private byte[] workingBuffer => _workingBuffer ??= new byte[_bufferSize];
private byte[] workingBuffer
{
get => _workingBuffer ??= new byte[_bufferSize];
}
public override async Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
public override void Write(byte[] buffer, int offset, int count)
{
// workitem 7159
// calculate the CRC on the unccompressed data (before writing)
crc?.SlurpBlock(buffer, offset, count);
if (crc != null)
{
crc.SlurpBlock(buffer, offset, count);
}
if (_streamMode == StreamMode.Undefined)
{
@@ -146,7 +148,7 @@ namespace SharpCompress.Compressors.Deflate
z.InputBuffer = buffer;
_z.NextIn = offset;
_z.AvailableBytesIn = count;
var done = false;
bool done = false;
do
{
_z.OutputBuffer = workingBuffer;
@@ -161,7 +163,7 @@ namespace SharpCompress.Compressors.Deflate
}
//if (_workingBuffer.Length - _z.AvailableBytesOut > 0)
await _stream.WriteAsync(_workingBuffer, 0, _workingBuffer.Length - _z.AvailableBytesOut, cancellationToken);
_stream.Write(_workingBuffer, 0, _workingBuffer.Length - _z.AvailableBytesOut);
done = _z.AvailableBytesIn == 0 && _z.AvailableBytesOut != 0;
@@ -174,7 +176,7 @@ namespace SharpCompress.Compressors.Deflate
while (!done);
}
private async Task FinishAsync()
private void finish()
{
if (_z is null)
{
@@ -183,7 +185,7 @@ namespace SharpCompress.Compressors.Deflate
if (_streamMode == StreamMode.Writer)
{
var done = false;
bool done = false;
do
{
_z.OutputBuffer = workingBuffer;
@@ -198,14 +200,14 @@ namespace SharpCompress.Compressors.Deflate
string verb = (_wantCompress ? "de" : "in") + "flating";
if (_z.Message is null)
{
throw new ZlibException($"{verb}: (rc = {rc})");
throw new ZlibException(String.Format("{0}: (rc = {1})", verb, rc));
}
throw new ZlibException(verb + ": " + _z.Message);
}
if (_workingBuffer.Length - _z.AvailableBytesOut > 0)
{
await _stream.WriteAsync(_workingBuffer, 0, _workingBuffer.Length - _z.AvailableBytesOut);
_stream.Write(_workingBuffer, 0, _workingBuffer.Length - _z.AvailableBytesOut);
}
done = _z.AvailableBytesIn == 0 && _z.AvailableBytesOut != 0;
@@ -218,7 +220,7 @@ namespace SharpCompress.Compressors.Deflate
}
while (!done);
await FlushAsync();
Flush();
// workitem 7159
if (_flavor == ZlibStreamFlavor.GZIP)
@@ -226,13 +228,12 @@ namespace SharpCompress.Compressors.Deflate
if (_wantCompress)
{
// Emit the GZIP trailer: CRC32 and size mod 2^32
using var rented = MemoryPool<byte>.Shared.Rent(4);
var intBuf = rented.Memory.Slice(0, 4);
BinaryPrimitives.WriteInt32LittleEndian(intBuf.Span, crc.Crc32Result);
await _stream.WriteAsync(intBuf, CancellationToken.None);
Span<byte> intBuf = stackalloc byte[4];
BinaryPrimitives.WriteInt32LittleEndian(intBuf, crc.Crc32Result);
_stream.Write(intBuf);
int c2 = (int)(crc.TotalBytesRead & 0x00000000FFFFFFFF);
BinaryPrimitives.WriteInt32LittleEndian(intBuf.Span, c2);
await _stream.WriteAsync(intBuf, CancellationToken.None);
BinaryPrimitives.WriteInt32LittleEndian(intBuf, c2);
_stream.Write(intBuf);
}
else
{
@@ -255,41 +256,44 @@ namespace SharpCompress.Compressors.Deflate
}
// Read and potentially verify the GZIP trailer: CRC32 and size mod 2^32
using var rented = MemoryPool<byte>.Shared.Rent(8);
var trailer = rented.Memory.Slice(0, 8);
Span<byte> trailer = stackalloc byte[8];
// workitem 8679
if (_z.AvailableBytesIn != 8)
{
// Make sure we have read to the end of the stream
_z.InputBuffer.AsSpan(_z.NextIn, _z.AvailableBytesIn).CopyTo(trailer.Span);
_z.InputBuffer.AsSpan(_z.NextIn, _z.AvailableBytesIn).CopyTo(trailer);
int bytesNeeded = 8 - _z.AvailableBytesIn;
int bytesRead = await _stream.ReadAsync(trailer.Slice(_z.AvailableBytesIn, bytesNeeded));
int bytesRead = _stream.Read(trailer.Slice(_z.AvailableBytesIn, bytesNeeded));
if (bytesNeeded != bytesRead)
{
throw new ZlibException($"Protocol error. AvailableBytesIn={_z.AvailableBytesIn + bytesRead}, expected 8");
throw new ZlibException(String.Format(
"Protocol error. AvailableBytesIn={0}, expected 8",
_z.AvailableBytesIn + bytesRead));
}
}
else
{
_z.InputBuffer.AsSpan(_z.NextIn, trailer.Length).CopyTo(trailer.Span);
_z.InputBuffer.AsSpan(_z.NextIn, trailer.Length).CopyTo(trailer);
}
Int32 crc32_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer.Span);
Int32 crc32_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer);
Int32 crc32_actual = crc.Crc32Result;
Int32 isize_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer.Span.Slice(4));
Int32 isize_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer.Slice(4));
Int32 isize_actual = (Int32)(_z.TotalBytesOut & 0x00000000FFFFFFFF);
if (crc32_actual != crc32_expected)
{
throw new ZlibException(
$"Bad CRC32 in GZIP stream. (actual({crc32_actual:X8})!=expected({crc32_expected:X8}))");
String.Format("Bad CRC32 in GZIP stream. (actual({0:X8})!=expected({1:X8}))",
crc32_actual, crc32_expected));
}
if (isize_actual != isize_expected)
{
throw new ZlibException(
$"Bad size in GZIP stream. (actual({isize_actual})!=expected({isize_expected}))");
String.Format("Bad size in GZIP stream. (actual({0})!=expected({1}))", isize_actual,
isize_expected));
}
}
else
@@ -300,7 +304,7 @@ namespace SharpCompress.Compressors.Deflate
}
}
private void End()
private void end()
{
if (z is null)
{
@@ -317,32 +321,36 @@ namespace SharpCompress.Compressors.Deflate
_z = null;
}
public override async ValueTask DisposeAsync()
protected override void Dispose(bool disposing)
{
if (_isDisposed)
if (isDisposed)
{
return;
}
_isDisposed = true;
isDisposed = true;
base.Dispose(disposing);
if (disposing)
{
if (_stream is null)
{
return;
}
try
{
await FinishAsync();
finish();
}
finally
{
End();
_stream?.DisposeAsync();
end();
_stream?.Dispose();
_stream = null;
}
}
}
public override Task FlushAsync(CancellationToken cancellationToken)
public override void Flush()
{
return _stream.FlushAsync(cancellationToken);
_stream.Flush();
}
public override Int64 Seek(Int64 offset, SeekOrigin origin)
@@ -357,7 +365,7 @@ namespace SharpCompress.Compressors.Deflate
_stream.SetLength(value);
}
/*
#if NOT
public int Read()
{
if (Read(_buf1, 0, 1) == 0)
@@ -367,19 +375,19 @@ namespace SharpCompress.Compressors.Deflate
crc.SlurpBlock(_buf1,0,1);
return (_buf1[0] & 0xFF);
}
*/
#endif
private bool _nomoreinput;
private bool _isDisposed;
private bool nomoreinput;
private bool isDisposed;
private async Task<string> ReadZeroTerminatedStringAsync()
private string ReadZeroTerminatedString()
{
var list = new List<byte>();
var done = false;
bool done = false;
do
{
// workitem 7740
int n = await _stream.ReadAsync(_buf1, 0, 1);
int n = _stream.Read(_buf1, 0, 1);
if (n != 1)
{
throw new ZlibException("Unexpected EOF reading GZIP header.");
@@ -398,14 +406,13 @@ namespace SharpCompress.Compressors.Deflate
return _encoding.GetString(buffer, 0, buffer.Length);
}
private async Task<int> ReadAndValidateGzipHeaderAsync(CancellationToken cancellationToken)
private int _ReadAndValidateGzipHeader()
{
var totalBytesRead = 0;
int totalBytesRead = 0;
// read the header on the first read
using var rented = MemoryPool<byte>.Shared.Rent(10);
var header = rented.Memory.Slice(0, 10);
int n = await _stream.ReadAsync(header, cancellationToken);
Span<byte> header = stackalloc byte[10];
int n = _stream.Read(header);
// workitem 8501: handle edge case (decompress empty stream)
if (n == 0)
@@ -418,46 +425,46 @@ namespace SharpCompress.Compressors.Deflate
throw new ZlibException("Not a valid GZIP stream.");
}
if (header.Span[0] != 0x1F || header.Span[1] != 0x8B || header.Span[2] != 8)
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
throw new ZlibException("Bad GZIP header.");
}
int timet = BinaryPrimitives.ReadInt32LittleEndian(header.Span.Slice(4));
int timet = BinaryPrimitives.ReadInt32LittleEndian(header.Slice(4));
_GzipMtime = TarHeader.EPOCH.AddSeconds(timet);
totalBytesRead += n;
if ((header.Span[3] & 0x04) == 0x04)
if ((header[3] & 0x04) == 0x04)
{
// read and discard extra field
n = await _stream.ReadAsync(header.Slice(0, 2), cancellationToken); // 2-byte length field
n = _stream.Read(header.Slice(0, 2)); // 2-byte length field
totalBytesRead += n;
short extraLength = (short)(header.Span[0] + header.Span[1] * 256);
short extraLength = (short)(header[0] + header[1] * 256);
byte[] extra = new byte[extraLength];
n = await _stream.ReadAsync(extra, 0, extra.Length, cancellationToken);
n = _stream.Read(extra, 0, extra.Length);
if (n != extraLength)
{
throw new ZlibException("Unexpected end-of-file reading GZIP header.");
}
totalBytesRead += n;
}
if ((header.Span[3] & 0x08) == 0x08)
if ((header[3] & 0x08) == 0x08)
{
_GzipFileName = await ReadZeroTerminatedStringAsync();
_GzipFileName = ReadZeroTerminatedString();
}
if ((header.Span[3] & 0x10) == 0x010)
if ((header[3] & 0x10) == 0x010)
{
_GzipComment = await ReadZeroTerminatedStringAsync();
_GzipComment = ReadZeroTerminatedString();
}
if ((header.Span[3] & 0x02) == 0x02)
if ((header[3] & 0x02) == 0x02)
{
await ReadAsync(_buf1, 0, 1, cancellationToken); // CRC16, ignore
Read(_buf1, 0, 1); // CRC16, ignore
}
return totalBytesRead;
}
public override async Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
public override Int32 Read(Byte[] buffer, Int32 offset, Int32 count)
{
// According to MS documentation, any implementation of the IO.Stream.Read function must:
// (a) throw an exception if offset & count reference an invalid part of the buffer,
@@ -480,7 +487,7 @@ namespace SharpCompress.Compressors.Deflate
z.AvailableBytesIn = 0;
if (_flavor == ZlibStreamFlavor.GZIP)
{
_gzipHeaderByteCount = await ReadAndValidateGzipHeaderAsync(cancellationToken);
_gzipHeaderByteCount = _ReadAndValidateGzipHeader();
// workitem 8501: handle edge case (decompress empty stream)
if (_gzipHeaderByteCount == 0)
@@ -499,7 +506,7 @@ namespace SharpCompress.Compressors.Deflate
{
return 0;
}
if (_nomoreinput && _wantCompress)
if (nomoreinput && _wantCompress)
{
return 0; // workitem 8557
}
@@ -520,7 +527,7 @@ namespace SharpCompress.Compressors.Deflate
throw new ArgumentOutOfRangeException(nameof(count));
}
var rc = 0;
int rc = 0;
// set up the output of the deflate/inflate codec:
_z.OutputBuffer = buffer;
@@ -535,14 +542,14 @@ namespace SharpCompress.Compressors.Deflate
do
{
// need data in _workingBuffer in order to deflate/inflate. Here, we check if we have any.
if ((_z.AvailableBytesIn == 0) && (!_nomoreinput))
if ((_z.AvailableBytesIn == 0) && (!nomoreinput))
{
// No data available, so try to Read data from the captive stream.
_z.NextIn = 0;
_z.AvailableBytesIn = await _stream.ReadAsync(_workingBuffer, 0, _workingBuffer.Length, cancellationToken);
_z.AvailableBytesIn = _stream.Read(_workingBuffer, 0, _workingBuffer.Length);
if (_z.AvailableBytesIn == 0)
{
_nomoreinput = true;
nomoreinput = true;
}
}
@@ -551,22 +558,23 @@ namespace SharpCompress.Compressors.Deflate
? _z.Deflate(_flushMode)
: _z.Inflate(_flushMode);
if (_nomoreinput && (rc == ZlibConstants.Z_BUF_ERROR))
if (nomoreinput && (rc == ZlibConstants.Z_BUF_ERROR))
{
return 0;
}
if (rc != ZlibConstants.Z_OK && rc != ZlibConstants.Z_STREAM_END)
{
throw new ZlibException($"{(_wantCompress ? "de" : "in")}flating: rc={rc} msg={_z.Message}");
throw new ZlibException(String.Format("{0}flating: rc={1} msg={2}", (_wantCompress ? "de" : "in"),
rc, _z.Message));
}
if ((_nomoreinput || rc == ZlibConstants.Z_STREAM_END) && (_z.AvailableBytesOut == count))
if ((nomoreinput || rc == ZlibConstants.Z_STREAM_END) && (_z.AvailableBytesOut == count))
{
break; // nothing more to read
}
} //while (_z.AvailableBytesOut == count && rc == ZlibConstants.Z_OK);
while (_z.AvailableBytesOut > 0 && !_nomoreinput && rc == ZlibConstants.Z_OK);
while (_z.AvailableBytesOut > 0 && !nomoreinput && rc == ZlibConstants.Z_OK);
// workitem 8557
// is there more room in output?
@@ -578,7 +586,7 @@ namespace SharpCompress.Compressors.Deflate
}
// are we completely done reading?
if (_nomoreinput)
if (nomoreinput)
{
// and in compression?
if (_wantCompress)
@@ -589,7 +597,7 @@ namespace SharpCompress.Compressors.Deflate
if (rc != ZlibConstants.Z_OK && rc != ZlibConstants.Z_STREAM_END)
{
throw new ZlibException($"Deflating: rc={rc} msg={_z.Message}");
throw new ZlibException(String.Format("Deflating: rc={0} msg={1}", rc, _z.Message));
}
}
}
@@ -598,7 +606,10 @@ namespace SharpCompress.Compressors.Deflate
rc = (count - _z.AvailableBytesOut);
// calculate CRC after reading
crc?.SlurpBlock(buffer, offset, rc);
if (crc != null)
{
crc.SlurpBlock(buffer, offset, rc);
}
return rc;
}

View File

@@ -28,13 +28,10 @@
using System;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Compressors.Deflate
{
public class ZlibStream : AsyncStream
public class ZlibStream : Stream
{
private readonly ZlibBaseStream _baseStream;
private bool _disposed;
@@ -207,25 +204,35 @@ namespace SharpCompress.Compressors.Deflate
/// <remarks>
/// This may or may not result in a <c>Close()</c> call on the captive stream.
/// </remarks>
public override async ValueTask DisposeAsync()
protected override void Dispose(bool disposing)
{
if (!_disposed)
try
{
await _baseStream.DisposeAsync();
_disposed = true;
if (!_disposed)
{
if (disposing)
{
_baseStream?.Dispose();
}
_disposed = true;
}
}
finally
{
base.Dispose(disposing);
}
}
/// <summary>
/// Flush the stream.
/// </summary>
public override Task FlushAsync(CancellationToken cancellationToken)
public override void Flush()
{
if (_disposed)
{
throw new ObjectDisposedException("ZlibStream");
}
return _baseStream.FlushAsync(cancellationToken);
_baseStream.Flush();
}
/// <summary>
@@ -254,13 +261,22 @@ namespace SharpCompress.Compressors.Deflate
/// <param name="buffer">The buffer into which the read data should be placed.</param>
/// <param name="offset">the offset within that data array to put the first byte read.</param>
/// <param name="count">the number of bytes to read.</param>
public override async Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
public override int Read(byte[] buffer, int offset, int count)
{
if (_disposed)
{
throw new ObjectDisposedException("ZlibStream");
}
return await _baseStream.ReadAsync(buffer, offset, count, cancellationToken);
return _baseStream.Read(buffer, offset, count);
}
public override int ReadByte()
{
if (_disposed)
{
throw new ObjectDisposedException("ZlibStream");
}
return _baseStream.ReadByte();
}
/// <summary>
@@ -305,14 +321,24 @@ namespace SharpCompress.Compressors.Deflate
/// <param name="buffer">The buffer holding data to write to the stream.</param>
/// <param name="offset">the offset within that data array to find the first byte to write.</param>
/// <param name="count">the number of bytes to write.</param>
public override async Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
public override void Write(byte[] buffer, int offset, int count)
{
if (_disposed)
{
throw new ObjectDisposedException("ZlibStream");
}
await _baseStream.WriteAsync(buffer, offset, count, cancellationToken);
_baseStream.Write(buffer, offset, count);
}
public override void WriteByte(byte value)
{
if (_disposed)
{
throw new ObjectDisposedException("ZlibStream");
}
_baseStream.WriteByte(value);
}
#endregion System.IO.Stream methods
}
}

View File

@@ -1,7 +1,5 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.SevenZip;
using SharpCompress.Compressors.LZMA.Utilites;
using SharpCompress.IO;
@@ -93,9 +91,8 @@ namespace SharpCompress.Compressors.LZMA
}
}
private static async ValueTask<Stream> CreateDecoderStream(Stream[] packStreams, long[] packSizes, Stream[] outStreams,
CFolder folderInfo, int coderIndex, IPasswordProvider pass,
CancellationToken cancellationToken)
private static Stream CreateDecoderStream(Stream[] packStreams, long[] packSizes, Stream[] outStreams,
CFolder folderInfo, int coderIndex, IPasswordProvider pass)
{
var coderInfo = folderInfo._coders[coderIndex];
if (coderInfo._numOutStreams != 1)
@@ -130,8 +127,8 @@ namespace SharpCompress.Compressors.LZMA
}
int otherCoderIndex = FindCoderIndexForOutStreamIndex(folderInfo, pairedOutIndex);
inStreams[i] = await CreateDecoderStream(packStreams, packSizes, outStreams, folderInfo, otherCoderIndex,
pass, cancellationToken);
inStreams[i] = CreateDecoderStream(packStreams, packSizes, outStreams, folderInfo, otherCoderIndex,
pass);
//inStreamSizes[i] = folderInfo.UnpackSizes[pairedOutIndex];
@@ -157,11 +154,11 @@ namespace SharpCompress.Compressors.LZMA
}
long unpackSize = folderInfo._unpackSizes[outStreamId];
return await DecoderRegistry.CreateDecoderStream(coderInfo._methodId, inStreams, coderInfo._props, pass, unpackSize, cancellationToken);
return DecoderRegistry.CreateDecoderStream(coderInfo._methodId, inStreams, coderInfo._props, pass, unpackSize);
}
internal static async ValueTask<Stream> CreateDecoderStream(Stream inStream, long startPos, long[] packSizes, CFolder folderInfo,
IPasswordProvider pass, CancellationToken cancellationToken)
internal static Stream CreateDecoderStream(Stream inStream, long startPos, long[] packSizes, CFolder folderInfo,
IPasswordProvider pass)
{
if (!folderInfo.CheckStructure())
{
@@ -179,7 +176,7 @@ namespace SharpCompress.Compressors.LZMA
int primaryCoderIndex, primaryOutStreamIndex;
FindPrimaryOutStreamIndex(folderInfo, out primaryCoderIndex, out primaryOutStreamIndex);
return await CreateDecoderStream(inStreams, packSizes, outStreams, folderInfo, primaryCoderIndex, pass, cancellationToken);
return CreateDecoderStream(inStreams, packSizes, outStreams, folderInfo, primaryCoderIndex, pass);
}
}
}

View File

@@ -1,7 +1,5 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Compressors.LZMA
{
@@ -61,8 +59,8 @@ namespace SharpCompress.Compressors.LZMA
/// <param name="progress">
/// callback progress reference.
/// </param>
ValueTask CodeAsync(Stream inStream, Stream outStream,
Int64 inSize, Int64 outSize, ICodeProgress progress, CancellationToken cancellationToken);
void Code(Stream inStream, Stream outStream,
Int64 inSize, Int64 outSize, ICodeProgress progress);
}
/*

View File

@@ -1,9 +1,6 @@
using System;
using System.Buffers;
using System.Buffers.Binary;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Crypto;
using SharpCompress.IO;
@@ -17,70 +14,61 @@ namespace SharpCompress.Compressors.LZMA
/// <summary>
/// Stream supporting the LZIP format, as documented at http://www.nongnu.org/lzip/manual/lzip_manual.html
/// </summary>
public sealed class LZipStream : AsyncStream
public sealed class LZipStream : Stream
{
#nullable disable
private Stream _stream;
#nullable enable
private CountingWritableSubStream? _countingWritableSubStream;
private readonly Stream _stream;
private readonly CountingWritableSubStream? _countingWritableSubStream;
private bool _disposed;
private bool _finished;
private long _writeCount;
private LZipStream()
public LZipStream(Stream stream, CompressionMode mode)
{
}
public static async ValueTask<LZipStream> CreateAsync(Stream stream, CompressionMode mode)
{
var lzip = new LZipStream();
lzip.Mode = mode;
Mode = mode;
if (mode == CompressionMode.Decompress)
{
int dSize = await ValidateAndReadSize(stream);
int dSize = ValidateAndReadSize(stream);
if (dSize == 0)
{
throw new IOException("Not an LZip stream");
}
byte[] properties = GetProperties(dSize);
lzip._stream = await LzmaStream.CreateAsync(properties, stream);
_stream = new LzmaStream(properties, stream);
}
else
{
//default
int dSize = 104 * 1024;
await WriteHeaderSizeAsync(stream);
WriteHeaderSize(stream);
lzip._countingWritableSubStream = new CountingWritableSubStream(stream);
lzip._stream = new Crc32Stream(new LzmaStream(new LzmaEncoderProperties(true, dSize), false, lzip._countingWritableSubStream));
_countingWritableSubStream = new CountingWritableSubStream(stream);
_stream = new Crc32Stream(new LzmaStream(new LzmaEncoderProperties(true, dSize), false, _countingWritableSubStream));
}
return lzip;
}
public async ValueTask FinishAsync()
public void Finish()
{
if (!_finished)
{
if (Mode == CompressionMode.Compress)
{
var crc32Stream = (Crc32Stream)_stream;
await crc32Stream.WrappedStream.DisposeAsync();
await crc32Stream.DisposeAsync();
crc32Stream.WrappedStream.Dispose();
crc32Stream.Dispose();
var compressedCount = _countingWritableSubStream!.Count;
byte[] intBuf = new byte[8];
Span<byte> intBuf = stackalloc byte[8];
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, crc32Stream.Crc);
await _countingWritableSubStream.WriteAsync(intBuf, 0, 4);
_countingWritableSubStream.Write(intBuf.Slice(0, 4));
BinaryPrimitives.WriteInt64LittleEndian(intBuf, _writeCount);
await _countingWritableSubStream.WriteAsync(intBuf, 0, 8);
_countingWritableSubStream.Write(intBuf);
//total with headers
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, compressedCount + 6 + 20);
await _countingWritableSubStream.WriteAsync(intBuf, 0, 8);
_countingWritableSubStream.Write(intBuf);
}
_finished = true;
}
@@ -88,18 +76,21 @@ namespace SharpCompress.Compressors.LZMA
#region Stream methods
public override async ValueTask DisposeAsync()
protected override void Dispose(bool disposing)
{
if (_disposed)
{
return;
}
_disposed = true;
await FinishAsync();
await _stream.DisposeAsync();
if (disposing)
{
Finish();
_stream.Dispose();
}
}
public CompressionMode Mode { get; private set; }
public CompressionMode Mode { get; }
public override bool CanRead => Mode == CompressionMode.Decompress;
@@ -107,38 +98,54 @@ namespace SharpCompress.Compressors.LZMA
public override bool CanWrite => Mode == CompressionMode.Compress;
public override Task FlushAsync(CancellationToken cancellationToken)
public override void Flush()
{
return _stream.FlushAsync(cancellationToken);
_stream.Flush();
}
// TODO: Both Length and Position are sometimes feasible, but would require
// reading the output length when we initialize.
public override long Length => throw new NotSupportedException();
public override long Length => throw new NotImplementedException();
public override long Position { get => throw new NotImplementedException(); set => throw new NotSupportedException(); }
public override long Position { get => throw new NotImplementedException(); set => throw new NotImplementedException(); }
public override ValueTask<int> ReadAsync(Memory<byte> buffer, CancellationToken cancellationToken = new CancellationToken())
{
return _stream.ReadAsync(buffer, cancellationToken);
}
public override int Read(byte[] buffer, int offset, int count) => _stream.Read(buffer, offset, count);
public override int ReadByte() => _stream.ReadByte();
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();
public override void SetLength(long value) => throw new NotImplementedException();
public override async ValueTask WriteAsync(ReadOnlyMemory<byte> buffer, CancellationToken cancellationToken = new CancellationToken())
#if !NET461 && !NETSTANDARD2_0
public override int Read(Span<byte> buffer)
{
await _stream.WriteAsync(buffer, cancellationToken);
return _stream.Read(buffer);
}
public override void Write(ReadOnlySpan<byte> buffer)
{
_stream.Write(buffer);
_writeCount += buffer.Length;
}
public override async Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
#endif
public override void Write(byte[] buffer, int offset, int count)
{
await _stream.WriteAsync(buffer, offset, count, cancellationToken);
_stream.Write(buffer, offset, count);
_writeCount += count;
}
public override void WriteByte(byte value)
{
_stream.WriteByte(value);
++_writeCount;
}
#endregion
/// <summary>
@@ -148,14 +155,14 @@ namespace SharpCompress.Compressors.LZMA
/// </summary>
/// <param name="stream">The stream to read from. Must not be null.</param>
/// <returns><c>true</c> if the given stream is an LZip file, <c>false</c> otherwise.</returns>
public static async ValueTask<bool> IsLZipFileAsync(Stream stream) => await ValidateAndReadSize(stream) != 0;
public static bool IsLZipFile(Stream stream) => ValidateAndReadSize(stream) != 0;
/// <summary>
/// Reads the 6-byte header of the stream, and returns 0 if either the header
/// couldn't be read or it isn't a validate LZIP header, or the dictionary
/// size if it *is* a valid LZIP file.
/// </summary>
private static async ValueTask<int> ValidateAndReadSize(Stream stream)
public static int ValidateAndReadSize(Stream stream)
{
if (stream is null)
{
@@ -163,9 +170,8 @@ namespace SharpCompress.Compressors.LZMA
}
// Read the header
using var buffer = MemoryPool<byte>.Shared.Rent(6);
var header = buffer.Memory.Slice(0,6);
int n = await stream.ReadAsync(header);
Span<byte> header = stackalloc byte[6];
int n = stream.Read(header);
// TODO: Handle reading only part of the header?
@@ -174,18 +180,18 @@ namespace SharpCompress.Compressors.LZMA
return 0;
}
if (header.Span[0] != 'L' || header.Span[1] != 'Z' || header.Span[2] != 'I' || header.Span[3] != 'P' || header.Span[4] != 1 /* version 1 */)
if (header[0] != 'L' || header[1] != 'Z' || header[2] != 'I' || header[3] != 'P' || header[4] != 1 /* version 1 */)
{
return 0;
}
int basePower = header.Span[5] & 0x1F;
int subtractionNumerator = (header.Span[5] & 0xE0) >> 5;
int basePower = header[5] & 0x1F;
int subtractionNumerator = (header[5] & 0xE0) >> 5;
return (1 << basePower) - subtractionNumerator * (1 << (basePower - 4));
}
private static readonly byte[] headerBytes = new byte[6] { (byte)'L', (byte)'Z', (byte)'I', (byte)'P', 1, 113 };
public static async ValueTask WriteHeaderSizeAsync(Stream stream)
public static void WriteHeaderSize(Stream stream)
{
if (stream is null)
{
@@ -193,7 +199,7 @@ namespace SharpCompress.Compressors.LZMA
}
// hard coding the dictionary size encoding
await stream.WriteAsync(headerBytes, 0, 6);
stream.Write(headerBytes, 0, 6);
}
/// <summary>

View File

@@ -1,7 +1,7 @@
#nullable disable
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.LZMA.LZ;
using SharpCompress.Compressors.LZMA.RangeCoder;
@@ -11,11 +11,11 @@ namespace SharpCompress.Compressors.LZMA
{
private class LenDecoder
{
private BitDecoder _choice = new();
private BitDecoder _choice2 = new();
private BitDecoder _choice = new BitDecoder();
private BitDecoder _choice2 = new BitDecoder();
private readonly BitTreeDecoder[] _lowCoder = new BitTreeDecoder[Base.K_NUM_POS_STATES_MAX];
private readonly BitTreeDecoder[] _midCoder = new BitTreeDecoder[Base.K_NUM_POS_STATES_MAX];
private BitTreeDecoder _highCoder = new(Base.K_NUM_HIGH_LEN_BITS);
private BitTreeDecoder _highCoder = new BitTreeDecoder(Base.K_NUM_HIGH_LEN_BITS);
private uint _numPosStates;
public void Create(uint numPosStates)
@@ -40,21 +40,21 @@ namespace SharpCompress.Compressors.LZMA
_highCoder.Init();
}
public async ValueTask<uint> DecodeAsync(RangeCoder.Decoder rangeDecoder, uint posState, CancellationToken cancellationToken)
public uint Decode(RangeCoder.Decoder rangeDecoder, uint posState)
{
if (await _choice.DecodeAsync(rangeDecoder, cancellationToken) == 0)
if (_choice.Decode(rangeDecoder) == 0)
{
return await _lowCoder[posState].DecodeAsync(rangeDecoder, cancellationToken);
return _lowCoder[posState].Decode(rangeDecoder);
}
uint symbol = Base.K_NUM_LOW_LEN_SYMBOLS;
if (await _choice2.DecodeAsync(rangeDecoder, cancellationToken) == 0)
if (_choice2.Decode(rangeDecoder) == 0)
{
symbol += await _midCoder[posState].DecodeAsync(rangeDecoder, cancellationToken);
symbol += _midCoder[posState].Decode(rangeDecoder);
}
else
{
symbol += Base.K_NUM_MID_LEN_SYMBOLS;
symbol += await _highCoder.DecodeAsync(rangeDecoder, cancellationToken);
symbol += _highCoder.Decode(rangeDecoder);
}
return symbol;
}
@@ -79,31 +79,31 @@ namespace SharpCompress.Compressors.LZMA
}
}
public async ValueTask<byte> DecodeNormalAsync(RangeCoder.Decoder rangeDecoder, CancellationToken cancellationToken)
public byte DecodeNormal(RangeCoder.Decoder rangeDecoder)
{
uint symbol = 1;
do
{
symbol = (symbol << 1) | await _decoders[symbol].DecodeAsync(rangeDecoder, cancellationToken);
symbol = (symbol << 1) | _decoders[symbol].Decode(rangeDecoder);
}
while (symbol < 0x100);
return (byte)symbol;
}
public async ValueTask<byte> DecodeWithMatchByteAsync(RangeCoder.Decoder rangeDecoder, byte matchByte, CancellationToken cancellationToken)
public byte DecodeWithMatchByte(RangeCoder.Decoder rangeDecoder, byte matchByte)
{
uint symbol = 1;
do
{
uint matchBit = (uint)(matchByte >> 7) & 1;
matchByte <<= 1;
uint bit = await _decoders[((1 + matchBit) << 8) + symbol].DecodeAsync(rangeDecoder, cancellationToken);
uint bit = _decoders[((1 + matchBit) << 8) + symbol].Decode(rangeDecoder);
symbol = (symbol << 1) | bit;
if (matchBit != bit)
{
while (symbol < 0x100)
{
symbol = (symbol << 1) | await _decoders[symbol].DecodeAsync(rangeDecoder, cancellationToken);
symbol = (symbol << 1) | _decoders[symbol].Decode(rangeDecoder);
}
break;
}
@@ -113,12 +113,12 @@ namespace SharpCompress.Compressors.LZMA
}
}
private readonly Decoder2[]_coders;
private readonly int _numPrevBits;
private readonly int _numPosBits;
private readonly uint _posMask;
public LiteralDecoder(int numPosBits, int numPrevBits)
private Decoder2[] _coders;
private int _numPrevBits;
private int _numPosBits;
private uint _posMask;
public void Create(int numPosBits, int numPrevBits)
{
if (_coders != null && _numPrevBits == numPrevBits &&
_numPosBits == numPosBits)
@@ -150,18 +150,18 @@ namespace SharpCompress.Compressors.LZMA
return ((pos & _posMask) << _numPrevBits) + (uint)(prevByte >> (8 - _numPrevBits));
}
public ValueTask<byte> DecodeNormalAsync(RangeCoder.Decoder rangeDecoder, uint pos, byte prevByte, CancellationToken cancellationToken)
public byte DecodeNormal(RangeCoder.Decoder rangeDecoder, uint pos, byte prevByte)
{
return _coders[GetState(pos, prevByte)].DecodeNormalAsync(rangeDecoder, cancellationToken);
return _coders[GetState(pos, prevByte)].DecodeNormal(rangeDecoder);
}
public ValueTask<byte> DecodeWithMatchByteAsync(RangeCoder.Decoder rangeDecoder, uint pos, byte prevByte, byte matchByte, CancellationToken cancellationToken)
public byte DecodeWithMatchByte(RangeCoder.Decoder rangeDecoder, uint pos, byte prevByte, byte matchByte)
{
return _coders[GetState(pos, prevByte)].DecodeWithMatchByteAsync(rangeDecoder, matchByte, cancellationToken);
return _coders[GetState(pos, prevByte)].DecodeWithMatchByte(rangeDecoder, matchByte);
}
}
private OutWindow? _outWindow;
private OutWindow _outWindow;
private readonly BitDecoder[] _isMatchDecoders = new BitDecoder[Base.K_NUM_STATES << Base.K_NUM_POS_STATES_BITS_MAX];
private readonly BitDecoder[] _isRepDecoders = new BitDecoder[Base.K_NUM_STATES];
@@ -173,18 +173,18 @@ namespace SharpCompress.Compressors.LZMA
private readonly BitTreeDecoder[] _posSlotDecoder = new BitTreeDecoder[Base.K_NUM_LEN_TO_POS_STATES];
private readonly BitDecoder[] _posDecoders = new BitDecoder[Base.K_NUM_FULL_DISTANCES - Base.K_END_POS_MODEL_INDEX];
private BitTreeDecoder _posAlignDecoder = new(Base.K_NUM_ALIGN_BITS);
private BitTreeDecoder _posAlignDecoder = new BitTreeDecoder(Base.K_NUM_ALIGN_BITS);
private readonly LenDecoder _lenDecoder = new();
private readonly LenDecoder _repLenDecoder = new();
private readonly LenDecoder _lenDecoder = new LenDecoder();
private readonly LenDecoder _repLenDecoder = new LenDecoder();
private LiteralDecoder? _literalDecoder;
private readonly LiteralDecoder _literalDecoder = new LiteralDecoder();
private int _dictionarySize;
private uint _posStateMask;
private Base.State _state = new();
private Base.State _state = new Base.State();
private uint _rep0, _rep1, _rep2, _rep3;
public Decoder()
@@ -196,16 +196,15 @@ namespace SharpCompress.Compressors.LZMA
}
}
private OutWindow CreateDictionary()
private void CreateDictionary()
{
if (_dictionarySize < 0)
{
throw new InvalidParamException();
}
var outWindow = new OutWindow();
_outWindow = new OutWindow();
int blockSize = Math.Max(_dictionarySize, (1 << 12));
outWindow.Create(blockSize);
return outWindow;
_outWindow.Create(blockSize);
}
private void SetLiteralProperties(int lp, int lc)
@@ -218,7 +217,7 @@ namespace SharpCompress.Compressors.LZMA
{
throw new InvalidParamException();
}
_literalDecoder = new(lp, lc);
_literalDecoder.Create(lp, lc);
}
private void SetPosBitsProperties(int pb)
@@ -250,7 +249,7 @@ namespace SharpCompress.Compressors.LZMA
_isRepG2Decoders[i].Init();
}
_literalDecoder!.Init();
_literalDecoder.Init();
for (i = 0; i < Base.K_NUM_LEN_TO_POS_STATES; i++)
{
_posSlotDecoder[i].Init();
@@ -273,12 +272,12 @@ namespace SharpCompress.Compressors.LZMA
_rep3 = 0;
}
public async ValueTask CodeAsync(Stream inStream, Stream outStream,
Int64 inSize, Int64 outSize, ICodeProgress progress, CancellationToken cancellationToken)
public void Code(Stream inStream, Stream outStream,
Int64 inSize, Int64 outSize, ICodeProgress progress)
{
if (_outWindow is null)
{
_outWindow = CreateDictionary();
CreateDictionary();
}
_outWindow.Init(outStream);
if (outSize > 0)
@@ -291,9 +290,9 @@ namespace SharpCompress.Compressors.LZMA
}
RangeCoder.Decoder rangeDecoder = new RangeCoder.Decoder();
await rangeDecoder.InitAsync(inStream, cancellationToken);
rangeDecoder.Init(inStream);
await CodeAsync(_dictionarySize, _outWindow, rangeDecoder, cancellationToken);
Code(_dictionarySize, _outWindow, rangeDecoder);
_outWindow.ReleaseStream();
rangeDecoder.ReleaseStream();
@@ -309,9 +308,8 @@ namespace SharpCompress.Compressors.LZMA
_outWindow = null;
}
internal async ValueTask<bool> CodeAsync(int dictionarySize, OutWindow outWindow, RangeCoder.Decoder rangeDecoder, CancellationToken cancellationToken)
internal bool Code(int dictionarySize, OutWindow outWindow, RangeCoder.Decoder rangeDecoder)
{
_literalDecoder ??= _literalDecoder.CheckNotNull(nameof(_literalDecoder));
int dictionarySizeCheck = Math.Max(dictionarySize, 1);
outWindow.CopyPending();
@@ -319,19 +317,19 @@ namespace SharpCompress.Compressors.LZMA
while (outWindow.HasSpace)
{
uint posState = (uint)outWindow._total & _posStateMask;
if (await _isMatchDecoders[(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState].DecodeAsync(rangeDecoder, cancellationToken) == 0)
if (_isMatchDecoders[(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState].Decode(rangeDecoder) == 0)
{
byte b;
byte prevByte = outWindow.GetByte(0);
if (!_state.IsCharState())
{
b = await _literalDecoder.DecodeWithMatchByteAsync(rangeDecoder,
b = _literalDecoder.DecodeWithMatchByte(rangeDecoder,
(uint)outWindow._total, prevByte,
outWindow.GetByte((int)_rep0), cancellationToken);
outWindow.GetByte((int)_rep0));
}
else
{
b = await _literalDecoder.DecodeNormalAsync(rangeDecoder, (uint)outWindow._total, prevByte, cancellationToken);
b = _literalDecoder.DecodeNormal(rangeDecoder, (uint)outWindow._total, prevByte);
}
outWindow.PutByte(b);
_state.UpdateChar();
@@ -339,13 +337,13 @@ namespace SharpCompress.Compressors.LZMA
else
{
uint len;
if (await _isRepDecoders[_state._index].DecodeAsync(rangeDecoder, cancellationToken) == 1)
if (_isRepDecoders[_state._index].Decode(rangeDecoder) == 1)
{
if (await _isRepG0Decoders[_state._index].DecodeAsync(rangeDecoder, cancellationToken) == 0)
if (_isRepG0Decoders[_state._index].Decode(rangeDecoder) == 0)
{
if (
await _isRep0LongDecoders[(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState].DecodeAsync(
rangeDecoder, cancellationToken) == 0)
_isRep0LongDecoders[(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState].Decode(
rangeDecoder) == 0)
{
_state.UpdateShortRep();
outWindow.PutByte(outWindow.GetByte((int)_rep0));
@@ -355,13 +353,13 @@ namespace SharpCompress.Compressors.LZMA
else
{
UInt32 distance;
if (await _isRepG1Decoders[_state._index].DecodeAsync(rangeDecoder, cancellationToken) == 0)
if (_isRepG1Decoders[_state._index].Decode(rangeDecoder) == 0)
{
distance = _rep1;
}
else
{
if (await _isRepG2Decoders[_state._index].DecodeAsync(rangeDecoder, cancellationToken) == 0)
if (_isRepG2Decoders[_state._index].Decode(rangeDecoder) == 0)
{
distance = _rep2;
}
@@ -375,7 +373,7 @@ namespace SharpCompress.Compressors.LZMA
_rep1 = _rep0;
_rep0 = distance;
}
len = await _repLenDecoder.DecodeAsync(rangeDecoder, posState, cancellationToken) + Base.K_MATCH_MIN_LEN;
len = _repLenDecoder.Decode(rangeDecoder, posState) + Base.K_MATCH_MIN_LEN;
_state.UpdateRep();
}
else
@@ -383,22 +381,23 @@ namespace SharpCompress.Compressors.LZMA
_rep3 = _rep2;
_rep2 = _rep1;
_rep1 = _rep0;
len = Base.K_MATCH_MIN_LEN + await _lenDecoder.DecodeAsync(rangeDecoder, posState, cancellationToken);
len = Base.K_MATCH_MIN_LEN + _lenDecoder.Decode(rangeDecoder, posState);
_state.UpdateMatch();
uint posSlot = await _posSlotDecoder[Base.GetLenToPosState(len)].DecodeAsync(rangeDecoder, cancellationToken);
uint posSlot = _posSlotDecoder[Base.GetLenToPosState(len)].Decode(rangeDecoder);
if (posSlot >= Base.K_START_POS_MODEL_INDEX)
{
int numDirectBits = (int)((posSlot >> 1) - 1);
_rep0 = ((2 | (posSlot & 1)) << numDirectBits);
if (posSlot < Base.K_END_POS_MODEL_INDEX)
{
_rep0 += await BitTreeDecoder.ReverseDecode(_posDecoders,
_rep0 - posSlot - 1, rangeDecoder, numDirectBits, cancellationToken);
_rep0 += BitTreeDecoder.ReverseDecode(_posDecoders,
_rep0 - posSlot - 1, rangeDecoder, numDirectBits);
}
else
{
_rep0 += (await rangeDecoder.DecodeDirectBitsAsync(numDirectBits - Base.K_NUM_ALIGN_BITS, cancellationToken) << Base.K_NUM_ALIGN_BITS);
_rep0 += await _posAlignDecoder.ReverseDecode(rangeDecoder, cancellationToken);
_rep0 += (rangeDecoder.DecodeDirectBits(
numDirectBits - Base.K_NUM_ALIGN_BITS) << Base.K_NUM_ALIGN_BITS);
_rep0 += _posAlignDecoder.ReverseDecode(rangeDecoder);
}
}
else
@@ -451,7 +450,7 @@ namespace SharpCompress.Compressors.LZMA
{
if (_outWindow is null)
{
_outWindow = CreateDictionary();
CreateDictionary();
}
_outWindow.Train(stream);
}

View File

@@ -2,8 +2,6 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.LZMA.LZ;
using SharpCompress.Compressors.LZMA.RangeCoder;
@@ -63,7 +61,7 @@ namespace SharpCompress.Compressors.LZMA
return (UInt32)(G_FAST_POS[pos >> 26] + 52);
}
private Base.State _state = new();
private Base.State _state = new Base.State();
private Byte _previousByte;
private readonly UInt32[] _repDistances = new UInt32[Base.K_NUM_REP_DISTANCES];
@@ -99,18 +97,18 @@ namespace SharpCompress.Compressors.LZMA
}
}
public async ValueTask EncodeAsync(RangeCoder.Encoder rangeEncoder, byte symbol)
public void Encode(RangeCoder.Encoder rangeEncoder, byte symbol)
{
uint context = 1;
for (int i = 7; i >= 0; i--)
{
uint bit = (uint)((symbol >> i) & 1);
await _encoders[context].EncodeAsync(rangeEncoder, bit);
_encoders[context].Encode(rangeEncoder, bit);
context = (context << 1) | bit;
}
}
public async ValueTask EncodeMatchedAsync(RangeCoder.Encoder rangeEncoder, byte matchByte, byte symbol)
public void EncodeMatched(RangeCoder.Encoder rangeEncoder, byte matchByte, byte symbol)
{
uint context = 1;
bool same = true;
@@ -124,7 +122,7 @@ namespace SharpCompress.Compressors.LZMA
state += ((1 + matchBit) << 8);
same = (matchBit == bit);
}
await _encoders[state].EncodeAsync(rangeEncoder, bit);
_encoders[state].Encode(rangeEncoder, bit);
context = (context << 1) | bit;
}
}
@@ -198,11 +196,11 @@ namespace SharpCompress.Compressors.LZMA
private class LenEncoder
{
private BitEncoder _choice = new();
private BitEncoder _choice2 = new();
private BitEncoder _choice = new BitEncoder();
private BitEncoder _choice2 = new BitEncoder();
private readonly BitTreeEncoder[] _lowCoder = new BitTreeEncoder[Base.K_NUM_POS_STATES_ENCODING_MAX];
private readonly BitTreeEncoder[] _midCoder = new BitTreeEncoder[Base.K_NUM_POS_STATES_ENCODING_MAX];
private BitTreeEncoder _highCoder = new(Base.K_NUM_HIGH_LEN_BITS);
private BitTreeEncoder _highCoder = new BitTreeEncoder(Base.K_NUM_HIGH_LEN_BITS);
public LenEncoder()
{
@@ -225,26 +223,26 @@ namespace SharpCompress.Compressors.LZMA
_highCoder.Init();
}
public async ValueTask EncodeAsync(RangeCoder.Encoder rangeEncoder, UInt32 symbol, UInt32 posState)
public void Encode(RangeCoder.Encoder rangeEncoder, UInt32 symbol, UInt32 posState)
{
if (symbol < Base.K_NUM_LOW_LEN_SYMBOLS)
{
await _choice.EncodeAsync(rangeEncoder, 0);
await _lowCoder[posState].EncodeAsync(rangeEncoder, symbol);
_choice.Encode(rangeEncoder, 0);
_lowCoder[posState].Encode(rangeEncoder, symbol);
}
else
{
symbol -= Base.K_NUM_LOW_LEN_SYMBOLS;
await _choice.EncodeAsync(rangeEncoder, 1);
_choice.Encode(rangeEncoder, 1);
if (symbol < Base.K_NUM_MID_LEN_SYMBOLS)
{
await _choice2.EncodeAsync(rangeEncoder, 0);
await _midCoder[posState].EncodeAsync(rangeEncoder, symbol);
_choice2.Encode(rangeEncoder, 0);
_midCoder[posState].Encode(rangeEncoder, symbol);
}
else
{
await _choice2.EncodeAsync(rangeEncoder, 1);
await _highCoder.EncodeAsync(rangeEncoder, symbol - Base.K_NUM_MID_LEN_SYMBOLS);
_choice2.Encode(rangeEncoder, 1);
_highCoder.Encode(rangeEncoder, symbol - Base.K_NUM_MID_LEN_SYMBOLS);
}
}
}
@@ -311,9 +309,9 @@ namespace SharpCompress.Compressors.LZMA
}
}
public new async ValueTask EncodeAsync(RangeCoder.Encoder rangeEncoder, UInt32 symbol, UInt32 posState)
public new void Encode(RangeCoder.Encoder rangeEncoder, UInt32 symbol, UInt32 posState)
{
await base.EncodeAsync(rangeEncoder, symbol, posState);
base.Encode(rangeEncoder, symbol, posState);
if (--_counters[posState] == 0)
{
UpdateTable(posState);
@@ -363,7 +361,7 @@ namespace SharpCompress.Compressors.LZMA
private readonly Optimal[] _optimum = new Optimal[K_NUM_OPTS];
private BinTree _matchFinder;
private readonly RangeCoder.Encoder _rangeEncoder = new();
private readonly RangeCoder.Encoder _rangeEncoder = new RangeCoder.Encoder();
private readonly BitEncoder[] _isMatch =
new BitEncoder[Base.K_NUM_STATES << Base.K_NUM_POS_STATES_BITS_MAX];
@@ -381,12 +379,12 @@ namespace SharpCompress.Compressors.LZMA
private readonly BitEncoder[] _posEncoders =
new BitEncoder[Base.K_NUM_FULL_DISTANCES - Base.K_END_POS_MODEL_INDEX];
private BitTreeEncoder _posAlignEncoder = new(Base.K_NUM_ALIGN_BITS);
private BitTreeEncoder _posAlignEncoder = new BitTreeEncoder(Base.K_NUM_ALIGN_BITS);
private readonly LenPriceTableEncoder _lenEncoder = new();
private readonly LenPriceTableEncoder _repMatchLenEncoder = new();
private readonly LenPriceTableEncoder _lenEncoder = new LenPriceTableEncoder();
private readonly LenPriceTableEncoder _repMatchLenEncoder = new LenPriceTableEncoder();
private readonly LiteralEncoder _literalEncoder = new();
private readonly LiteralEncoder _literalEncoder = new LiteralEncoder();
private readonly UInt32[] _matchDistances = new UInt32[Base.K_MATCH_MAX_LEN * 2 + 2];
@@ -1191,40 +1189,40 @@ namespace SharpCompress.Compressors.LZMA
return (smallDist < ((UInt32)(1) << (32 - kDif)) && bigDist >= (smallDist << kDif));
}
private async ValueTask WriteEndMarkerAsync(UInt32 posState)
private void WriteEndMarker(UInt32 posState)
{
if (!_writeEndMark)
{
return;
}
await _isMatch[(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState].EncodeAsync(_rangeEncoder, 1);
await _isRep[_state._index].EncodeAsync(_rangeEncoder, 0);
_isMatch[(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState].Encode(_rangeEncoder, 1);
_isRep[_state._index].Encode(_rangeEncoder, 0);
_state.UpdateMatch();
UInt32 len = Base.K_MATCH_MIN_LEN;
await _lenEncoder.EncodeAsync(_rangeEncoder, len - Base.K_MATCH_MIN_LEN, posState);
_lenEncoder.Encode(_rangeEncoder, len - Base.K_MATCH_MIN_LEN, posState);
UInt32 posSlot = (1 << Base.K_NUM_POS_SLOT_BITS) - 1;
UInt32 lenToPosState = Base.GetLenToPosState(len);
await _posSlotEncoder[lenToPosState].EncodeAsync(_rangeEncoder, posSlot);
_posSlotEncoder[lenToPosState].Encode(_rangeEncoder, posSlot);
int footerBits = 30;
UInt32 posReduced = (((UInt32)1) << footerBits) - 1;
await _rangeEncoder.EncodeDirectBits(posReduced >> Base.K_NUM_ALIGN_BITS, footerBits - Base.K_NUM_ALIGN_BITS);
await _posAlignEncoder.ReverseEncodeAsync(_rangeEncoder, posReduced & Base.K_ALIGN_MASK);
_rangeEncoder.EncodeDirectBits(posReduced >> Base.K_NUM_ALIGN_BITS, footerBits - Base.K_NUM_ALIGN_BITS);
_posAlignEncoder.ReverseEncode(_rangeEncoder, posReduced & Base.K_ALIGN_MASK);
}
private async ValueTask FlushAsync(UInt32 nowPos)
private void Flush(UInt32 nowPos)
{
ReleaseMfStream();
await WriteEndMarkerAsync(nowPos & _posStateMask);
await _rangeEncoder.FlushData();
await _rangeEncoder.FlushAsync();
WriteEndMarker(nowPos & _posStateMask);
_rangeEncoder.FlushData();
_rangeEncoder.FlushStream();
}
public async ValueTask<(Int64, Int64, bool)> CodeOneBlockAsync()
public void CodeOneBlock(out Int64 inSize, out Int64 outSize, out bool finished)
{
long inSize = 0;
long outSize = 0;
var finished = true;
inSize = 0;
outSize = 0;
finished = true;
if (_inStream != null)
{
@@ -1235,7 +1233,7 @@ namespace SharpCompress.Compressors.LZMA
if (_finished)
{
return (inSize, outSize, finished);
return;
}
_finished = true;
@@ -1256,20 +1254,20 @@ namespace SharpCompress.Compressors.LZMA
if (_processingMode && _matchFinder.IsDataStarved)
{
_finished = false;
return (inSize, outSize, finished);
return;
}
if (_matchFinder.GetNumAvailableBytes() == 0)
{
await FlushAsync((UInt32)_nowPos64);
return (inSize, outSize, finished);
Flush((UInt32)_nowPos64);
return;
}
UInt32 len, numDistancePairs; // it's not used
ReadMatchDistances(out len, out numDistancePairs);
UInt32 posState = (UInt32)(_nowPos64) & _posStateMask;
await _isMatch[(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState].EncodeAsync(_rangeEncoder, 0);
_isMatch[(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState].Encode(_rangeEncoder, 0);
_state.UpdateChar();
Byte curByte = _matchFinder.GetIndexByte((Int32)(0 - _additionalOffset));
await _literalEncoder.GetSubCoder((UInt32)(_nowPos64), _previousByte).EncodeAsync(_rangeEncoder, curByte);
_literalEncoder.GetSubCoder((UInt32)(_nowPos64), _previousByte).Encode(_rangeEncoder, curByte);
_previousByte = curByte;
_additionalOffset--;
_nowPos64++;
@@ -1277,19 +1275,19 @@ namespace SharpCompress.Compressors.LZMA
if (_processingMode && _matchFinder.IsDataStarved)
{
_finished = false;
return (inSize, outSize, finished);
return;
}
if (_matchFinder.GetNumAvailableBytes() == 0)
{
await FlushAsync((UInt32)_nowPos64);
return (inSize, outSize, finished);
Flush((UInt32)_nowPos64);
return;
}
while (true)
{
if (_processingMode && _matchFinder.IsDataStarved)
{
_finished = false;
return (inSize, outSize, finished);
return;
}
UInt32 pos;
@@ -1299,51 +1297,51 @@ namespace SharpCompress.Compressors.LZMA
UInt32 complexState = (_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState;
if (len == 1 && pos == 0xFFFFFFFF)
{
await _isMatch[complexState].EncodeAsync(_rangeEncoder, 0);
_isMatch[complexState].Encode(_rangeEncoder, 0);
Byte curByte = _matchFinder.GetIndexByte((Int32)(0 - _additionalOffset));
LiteralEncoder.Encoder2 subCoder = _literalEncoder.GetSubCoder((UInt32)_nowPos64, _previousByte);
if (!_state.IsCharState())
{
Byte matchByte =
_matchFinder.GetIndexByte((Int32)(0 - _repDistances[0] - 1 - _additionalOffset));
await subCoder.EncodeMatchedAsync(_rangeEncoder, matchByte, curByte);
subCoder.EncodeMatched(_rangeEncoder, matchByte, curByte);
}
else
{
await subCoder.EncodeAsync(_rangeEncoder, curByte);
subCoder.Encode(_rangeEncoder, curByte);
}
_previousByte = curByte;
_state.UpdateChar();
}
else
{
await _isMatch[complexState].EncodeAsync(_rangeEncoder, 1);
_isMatch[complexState].Encode(_rangeEncoder, 1);
if (pos < Base.K_NUM_REP_DISTANCES)
{
await _isRep[_state._index].EncodeAsync(_rangeEncoder, 1);
_isRep[_state._index].Encode(_rangeEncoder, 1);
if (pos == 0)
{
await _isRepG0[_state._index].EncodeAsync(_rangeEncoder, 0);
_isRepG0[_state._index].Encode(_rangeEncoder, 0);
if (len == 1)
{
await _isRep0Long[complexState].EncodeAsync(_rangeEncoder, 0);
_isRep0Long[complexState].Encode(_rangeEncoder, 0);
}
else
{
await _isRep0Long[complexState].EncodeAsync(_rangeEncoder, 1);
_isRep0Long[complexState].Encode(_rangeEncoder, 1);
}
}
else
{
await _isRepG0[_state._index].EncodeAsync(_rangeEncoder, 1);
_isRepG0[_state._index].Encode(_rangeEncoder, 1);
if (pos == 1)
{
await _isRepG1[_state._index].EncodeAsync(_rangeEncoder, 0);
_isRepG1[_state._index].Encode(_rangeEncoder, 0);
}
else
{
await _isRepG1[_state._index].EncodeAsync(_rangeEncoder, 1);
await _isRepG2[_state._index].EncodeAsync(_rangeEncoder, pos - 2);
_isRepG1[_state._index].Encode(_rangeEncoder, 1);
_isRepG2[_state._index].Encode(_rangeEncoder, pos - 2);
}
}
if (len == 1)
@@ -1352,7 +1350,7 @@ namespace SharpCompress.Compressors.LZMA
}
else
{
await _repMatchLenEncoder.EncodeAsync(_rangeEncoder, len - Base.K_MATCH_MIN_LEN, posState);
_repMatchLenEncoder.Encode(_rangeEncoder, len - Base.K_MATCH_MIN_LEN, posState);
_state.UpdateRep();
}
UInt32 distance = _repDistances[pos];
@@ -1367,13 +1365,13 @@ namespace SharpCompress.Compressors.LZMA
}
else
{
await _isRep[_state._index].EncodeAsync(_rangeEncoder, 0);
_isRep[_state._index].Encode(_rangeEncoder, 0);
_state.UpdateMatch();
await _lenEncoder.EncodeAsync(_rangeEncoder, len - Base.K_MATCH_MIN_LEN, posState);
_lenEncoder.Encode(_rangeEncoder, len - Base.K_MATCH_MIN_LEN, posState);
pos -= Base.K_NUM_REP_DISTANCES;
UInt32 posSlot = GetPosSlot(pos);
UInt32 lenToPosState = Base.GetLenToPosState(len);
await _posSlotEncoder[lenToPosState].EncodeAsync(_rangeEncoder, posSlot);
_posSlotEncoder[lenToPosState].Encode(_rangeEncoder, posSlot);
if (posSlot >= Base.K_START_POS_MODEL_INDEX)
{
@@ -1383,15 +1381,15 @@ namespace SharpCompress.Compressors.LZMA
if (posSlot < Base.K_END_POS_MODEL_INDEX)
{
await BitTreeEncoder.ReverseEncodeAsync(_posEncoders,
baseVal - posSlot - 1, _rangeEncoder, footerBits,
posReduced);
BitTreeEncoder.ReverseEncode(_posEncoders,
baseVal - posSlot - 1, _rangeEncoder, footerBits,
posReduced);
}
else
{
await _rangeEncoder.EncodeDirectBits(posReduced >> Base.K_NUM_ALIGN_BITS,
_rangeEncoder.EncodeDirectBits(posReduced >> Base.K_NUM_ALIGN_BITS,
footerBits - Base.K_NUM_ALIGN_BITS);
await _posAlignEncoder.ReverseEncodeAsync(_rangeEncoder, posReduced & Base.K_ALIGN_MASK);
_posAlignEncoder.ReverseEncode(_rangeEncoder, posReduced & Base.K_ALIGN_MASK);
_alignPriceCount++;
}
}
@@ -1423,19 +1421,19 @@ namespace SharpCompress.Compressors.LZMA
if (_processingMode && _matchFinder.IsDataStarved)
{
_finished = false;
return (inSize, outSize, finished);
return;
}
if (_matchFinder.GetNumAvailableBytes() == 0)
{
await FlushAsync((UInt32)_nowPos64);
return (inSize, outSize, finished);
Flush((UInt32)_nowPos64);
return;
}
if (_nowPos64 - progressPosValuePrev >= (1 << 12))
{
_finished = false;
finished = false;
return (inSize, outSize, finished);
return;
}
}
}
@@ -1490,8 +1488,8 @@ namespace SharpCompress.Compressors.LZMA
_nowPos64 = 0;
}
public async ValueTask CodeAsync(Stream inStream, Stream outStream,
Int64 inSize, Int64 outSize, ICodeProgress progress, CancellationToken cancellationToken)
public void Code(Stream inStream, Stream outStream,
Int64 inSize, Int64 outSize, ICodeProgress progress)
{
_needReleaseMfStream = false;
_processingMode = false;
@@ -1500,7 +1498,10 @@ namespace SharpCompress.Compressors.LZMA
SetStreams(inStream, outStream, inSize, outSize);
while (true)
{
var (processedInSize, processedOutSize, finished) = await CodeOneBlockAsync();
Int64 processedInSize;
Int64 processedOutSize;
bool finished;
CodeOneBlock(out processedInSize, out processedOutSize, out finished);
if (finished)
{
return;
@@ -1517,7 +1518,7 @@ namespace SharpCompress.Compressors.LZMA
}
}
public async ValueTask<long> CodeAsync(Stream inStream, bool final)
public long Code(Stream inStream, bool final)
{
_matchFinder.SetStream(inStream);
_processingMode = !final;
@@ -1525,7 +1526,10 @@ namespace SharpCompress.Compressors.LZMA
{
while (true)
{
var (processedInSize, processedOutSize, finished) = await CodeOneBlockAsync();
Int64 processedInSize;
Int64 processedOutSize;
bool finished;
CodeOneBlock(out processedInSize, out processedOutSize, out finished);
if (finished)
{
return processedInSize;

View File

@@ -1,25 +1,21 @@
#nullable disable
using System;
using System.Buffers;
using System.Buffers.Binary;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.LZMA.LZ;
using SharpCompress.IO;
namespace SharpCompress.Compressors.LZMA
{
public class LzmaStream : AsyncStream
public class LzmaStream : Stream
{
private Stream _inputStream;
private long _inputSize;
private long _outputSize;
private readonly Stream _inputStream;
private readonly long _inputSize;
private readonly long _outputSize;
private int _dictionarySize;
private OutWindow _outWindow = new OutWindow();
private RangeCoder.Decoder _rangeDecoder = new RangeCoder.Decoder();
private readonly int _dictionarySize;
private readonly OutWindow _outWindow = new OutWindow();
private readonly RangeCoder.Decoder _rangeDecoder = new RangeCoder.Decoder();
private Decoder _decoder;
private long _position;
@@ -29,60 +25,70 @@ namespace SharpCompress.Compressors.LZMA
private long _inputPosition;
// LZMA2
private bool _isLzma2;
private readonly bool _isLzma2;
private bool _uncompressedChunk;
private bool _needDictReset = true;
private bool _needProps = true;
private readonly Encoder _encoder;
private bool _isDisposed;
private LzmaStream() {}
public static async ValueTask<LzmaStream> CreateAsync(byte[] properties, Stream inputStream, long inputSize = -1, long outputSize = -1,
Stream presetDictionary = null, bool? isLzma2 = null, CancellationToken cancellationToken = default)
public LzmaStream(byte[] properties, Stream inputStream)
: this(properties, inputStream, -1, -1, null, properties.Length < 5)
{
var ls = new LzmaStream();
ls._inputStream = inputStream;
ls._inputSize = inputSize;
ls._outputSize = outputSize;
ls._isLzma2 = isLzma2 ?? properties.Length < 5;
}
if (!ls._isLzma2)
public LzmaStream(byte[] properties, Stream inputStream, long inputSize)
: this(properties, inputStream, inputSize, -1, null, properties.Length < 5)
{
}
public LzmaStream(byte[] properties, Stream inputStream, long inputSize, long outputSize)
: this(properties, inputStream, inputSize, outputSize, null, properties.Length < 5)
{
}
public LzmaStream(byte[] properties, Stream inputStream, long inputSize, long outputSize,
Stream presetDictionary, bool isLzma2)
{
_inputStream = inputStream;
_inputSize = inputSize;
_outputSize = outputSize;
_isLzma2 = isLzma2;
if (!isLzma2)
{
ls._dictionarySize = BinaryPrimitives.ReadInt32LittleEndian(properties.AsSpan(1));
ls._outWindow.Create(ls._dictionarySize);
_dictionarySize = BinaryPrimitives.ReadInt32LittleEndian(properties.AsSpan(1));
_outWindow.Create(_dictionarySize);
if (presetDictionary != null)
{
ls._outWindow.Train(presetDictionary);
_outWindow.Train(presetDictionary);
}
await ls._rangeDecoder.InitAsync(inputStream, cancellationToken);
_rangeDecoder.Init(inputStream);
ls._decoder = new Decoder();
ls._decoder.SetDecoderProperties(properties);
ls.Properties = properties;
_decoder = new Decoder();
_decoder.SetDecoderProperties(properties);
Properties = properties;
ls._availableBytes = outputSize < 0 ? long.MaxValue : outputSize;
ls._rangeDecoderLimit = inputSize;
_availableBytes = outputSize < 0 ? long.MaxValue : outputSize;
_rangeDecoderLimit = inputSize;
}
else
{
ls. _dictionarySize = 2 | (properties[0] & 1);
ls. _dictionarySize <<= (properties[0] >> 1) + 11;
_dictionarySize = 2 | (properties[0] & 1);
_dictionarySize <<= (properties[0] >> 1) + 11;
ls._outWindow.Create(ls._dictionarySize);
_outWindow.Create(_dictionarySize);
if (presetDictionary != null)
{
ls._outWindow.Train(presetDictionary);
ls._needDictReset = false;
_outWindow.Train(presetDictionary);
_needDictReset = false;
}
ls. Properties = new byte[1];
ls._availableBytes = 0;
Properties = new byte[1];
_availableBytes = 0;
}
return ls;
}
public LzmaStream(LzmaEncoderProperties properties, bool isLzma2, Stream outputStream)
@@ -120,25 +126,33 @@ namespace SharpCompress.Compressors.LZMA
public override bool CanWrite => _encoder != null;
public override async ValueTask DisposeAsync()
public override void Flush()
{
}
protected override void Dispose(bool disposing)
{
if (_isDisposed)
{
return;
}
_isDisposed = true;
if (_encoder != null)
if (disposing)
{
_position = await _encoder.CodeAsync(null, true);
if (_encoder != null)
{
_position = _encoder.Code(null, true);
}
_inputStream?.Dispose();
}
_inputStream?.DisposeAsync();
base.Dispose(disposing);
}
public override long Length => _position + _availableBytes;
public override long Position { get => _position; set => throw new NotSupportedException(); }
public override async Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
public override int Read(byte[] buffer, int offset, int count)
{
if (_endReached)
{
@@ -152,7 +166,7 @@ namespace SharpCompress.Compressors.LZMA
{
if (_isLzma2)
{
await DecodeChunkHeader(cancellationToken);
DecodeChunkHeader();
}
else
{
@@ -175,7 +189,7 @@ namespace SharpCompress.Compressors.LZMA
{
_inputPosition += _outWindow.CopyStream(_inputStream, toProcess);
}
else if (await _decoder.CodeAsync(_dictionarySize, _outWindow, _rangeDecoder, cancellationToken)
else if (_decoder.Code(_dictionarySize, _outWindow, _rangeDecoder)
&& _outputSize < 0)
{
_availableBytes = _outWindow.AvailableBytes;
@@ -217,7 +231,7 @@ namespace SharpCompress.Compressors.LZMA
return total;
}
private async ValueTask DecodeChunkHeader(CancellationToken cancellationToken)
private void DecodeChunkHeader()
{
int control = _inputStream.ReadByte();
_inputPosition++;
@@ -269,7 +283,7 @@ namespace SharpCompress.Compressors.LZMA
_decoder.SetDecoderProperties(Properties);
}
await _rangeDecoder.InitAsync(_inputStream, cancellationToken);
_rangeDecoder.Init(_inputStream);
}
else if (control > 0x02)
{
@@ -293,25 +307,14 @@ namespace SharpCompress.Compressors.LZMA
throw new NotSupportedException();
}
public override async Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
public override void Write(byte[] buffer, int offset, int count)
{
if (_encoder != null)
{
_position = await _encoder.CodeAsync(new MemoryStream(buffer, offset, count), false);
_position = _encoder.Code(new MemoryStream(buffer, offset, count), false);
}
}
public override async ValueTask WriteAsync(ReadOnlyMemory<byte> buffer, CancellationToken cancellationToken = new CancellationToken())
{
if (_encoder != null)
{
var m = ArrayPool<byte>.Shared.Rent(buffer.Length);
buffer.CopyTo(m.AsMemory().Slice(0, buffer.Length));
_position = await _encoder.CodeAsync(new MemoryStream(m, 0, buffer.Length), false);
ArrayPool<byte>.Shared.Return(m);
}
}
public byte[] Properties { get; private set; }
public byte[] Properties { get; } = new byte[5];
}
}

View File

@@ -1,14 +1,11 @@
#nullable disable
using System;
using System.Buffers;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Compressors.LZMA.RangeCoder
{
internal class Encoder : IAsyncDisposable
internal class Encoder
{
public const uint K_TOP_VALUE = (1 << 24);
@@ -41,46 +38,43 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
_cache = 0;
}
public async ValueTask FlushData()
public void FlushData()
{
for (int i = 0; i < 5; i++)
{
await ShiftLowAsync();
ShiftLow();
}
}
public Task FlushAsync()
public void FlushStream()
{
return _stream.FlushAsync();
_stream.Flush();
}
public ValueTask DisposeAsync()
public void CloseStream()
{
return _stream.DisposeAsync();
_stream.Dispose();
}
public async ValueTask EncodeAsync(uint start, uint size, uint total)
public void Encode(uint start, uint size, uint total)
{
_low += start * (_range /= total);
_range *= size;
while (_range < K_TOP_VALUE)
{
_range <<= 8;
await ShiftLowAsync();
ShiftLow();
}
}
public async ValueTask ShiftLowAsync()
public void ShiftLow()
{
if ((uint)_low < 0xFF000000 || (uint)(_low >> 32) == 1)
{
using var buffer = MemoryPool<byte>.Shared.Rent(1);
var b = buffer.Memory.Slice(0,1);
byte temp = _cache;
do
{
b.Span[0] = (byte)(temp + (_low >> 32));
await _stream.WriteAsync(b);
_stream.WriteByte((byte)(temp + (_low >> 32)));
temp = 0xFF;
}
while (--_cacheSize != 0);
@@ -90,7 +84,7 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
_low = ((uint)_low) << 8;
}
public async ValueTask EncodeDirectBits(uint v, int numTotalBits)
public void EncodeDirectBits(uint v, int numTotalBits)
{
for (int i = numTotalBits - 1; i >= 0; i--)
{
@@ -102,12 +96,12 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
if (_range < K_TOP_VALUE)
{
_range <<= 8;
await ShiftLowAsync();
ShiftLow();
}
}
}
public async ValueTask EncodeBitAsync(uint size0, int numTotalBits, uint symbol)
public void EncodeBit(uint size0, int numTotalBits, uint symbol)
{
uint newBound = (_range >> numTotalBits) * size0;
if (symbol == 0)
@@ -122,7 +116,7 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
while (_range < K_TOP_VALUE)
{
_range <<= 8;
await ShiftLowAsync();
ShiftLow();
}
}
@@ -135,7 +129,7 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
}
}
internal class Decoder: IAsyncDisposable
internal class Decoder
{
public const uint K_TOP_VALUE = (1 << 24);
public uint _range;
@@ -145,7 +139,7 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
public Stream _stream;
public long _total;
public async ValueTask InitAsync(Stream stream, CancellationToken cancellationToken)
public void Init(Stream stream)
{
// Stream.Init(stream);
_stream = stream;
@@ -154,7 +148,7 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
_range = 0xFFFFFFFF;
for (int i = 0; i < 5; i++)
{
_code = (_code << 8) | await _stream.ReadByteAsync(cancellationToken);
_code = (_code << 8) | (byte)_stream.ReadByte();
}
_total = 5;
}
@@ -165,34 +159,44 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
_stream = null;
}
public ValueTask DisposeAsync()
public void CloseStream()
{
return _stream.DisposeAsync();
_stream.Dispose();
}
public async ValueTask NormalizeAsync(CancellationToken cancellationToken)
public void Normalize()
{
while (_range < K_TOP_VALUE)
{
_code = (_code << 8) | await _stream.ReadByteAsync(cancellationToken);
_code = (_code << 8) | (byte)_stream.ReadByte();
_range <<= 8;
_total++;
}
}
public void Normalize2()
{
if (_range < K_TOP_VALUE)
{
_code = (_code << 8) | (byte)_stream.ReadByte();
_range <<= 8;
_total++;
}
}
public uint GetThreshold(uint total)
{
return _code / (_range /= total);
}
public async ValueTask DecodeAsync(uint start, uint size, CancellationToken cancellationToken)
public void Decode(uint start, uint size)
{
_code -= start * _range;
_range *= size;
await NormalizeAsync(cancellationToken);
Normalize();
}
public async ValueTask<uint> DecodeDirectBitsAsync(int numTotalBits, CancellationToken cancellationToken)
public uint DecodeDirectBits(int numTotalBits)
{
uint range = _range;
uint code = _code;
@@ -214,7 +218,7 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
if (range < K_TOP_VALUE)
{
code = (code << 8) | await _stream.ReadByteAsync(cancellationToken);
code = (code << 8) | (byte)_stream.ReadByte();
range <<= 8;
_total++;
}
@@ -224,7 +228,7 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
return result;
}
public async ValueTask<uint> DecodeBitAsync(uint size0, int numTotalBits, CancellationToken cancellationToken)
public uint DecodeBit(uint size0, int numTotalBits)
{
uint newBound = (_range >> numTotalBits) * size0;
uint symbol;
@@ -239,7 +243,7 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
_code -= newBound;
_range -= newBound;
}
await NormalizeAsync(cancellationToken);
Normalize();
return symbol;
}

View File

@@ -1,7 +1,4 @@
using System;
using System.Buffers;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Compressors.LZMA.RangeCoder
{
@@ -32,7 +29,7 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
}
}
public async ValueTask EncodeAsync(Encoder encoder, uint symbol)
public void Encode(Encoder encoder, uint symbol)
{
// encoder.EncodeBit(Prob, kNumBitModelTotalBits, symbol);
// UpdateModel(symbol);
@@ -51,7 +48,7 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
if (encoder._range < Encoder.K_TOP_VALUE)
{
encoder._range <<= 8;
await encoder.ShiftLowAsync();
encoder.ShiftLow();
}
}
@@ -113,7 +110,7 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
_prob = K_BIT_MODEL_TOTAL >> 1;
}
public async ValueTask<uint> DecodeAsync(Decoder rangeDecoder, CancellationToken cancellationToken)
public uint Decode(Decoder rangeDecoder)
{
uint newBound = (rangeDecoder._range >> K_NUM_BIT_MODEL_TOTAL_BITS) * _prob;
if (rangeDecoder._code < newBound)
@@ -122,7 +119,7 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
_prob += (K_BIT_MODEL_TOTAL - _prob) >> K_NUM_MOVE_BITS;
if (rangeDecoder._range < Decoder.K_TOP_VALUE)
{
rangeDecoder._code = (rangeDecoder._code << 8) | await rangeDecoder._stream.ReadByteAsync(cancellationToken);
rangeDecoder._code = (rangeDecoder._code << 8) | (byte)rangeDecoder._stream.ReadByte();
rangeDecoder._range <<= 8;
rangeDecoder._total++;
}
@@ -133,7 +130,7 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
_prob -= (_prob) >> K_NUM_MOVE_BITS;
if (rangeDecoder._range < Decoder.K_TOP_VALUE)
{
rangeDecoder._code = (rangeDecoder._code << 8) | await rangeDecoder._stream.ReadByteAsync(cancellationToken);
rangeDecoder._code = (rangeDecoder._code << 8) | (byte)rangeDecoder._stream.ReadByte();
rangeDecoder._range <<= 8;
rangeDecoder._total++;
}

View File

@@ -1,6 +1,4 @@
using System;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Compressors.LZMA.RangeCoder
{
@@ -23,25 +21,25 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
}
}
public async ValueTask EncodeAsync(Encoder rangeEncoder, UInt32 symbol)
public void Encode(Encoder rangeEncoder, UInt32 symbol)
{
UInt32 m = 1;
for (int bitIndex = _numBitLevels; bitIndex > 0;)
{
bitIndex--;
UInt32 bit = (symbol >> bitIndex) & 1;
await _models[m].EncodeAsync(rangeEncoder, bit);
_models[m].Encode(rangeEncoder, bit);
m = (m << 1) | bit;
}
}
public async ValueTask ReverseEncodeAsync(Encoder rangeEncoder, UInt32 symbol)
public void ReverseEncode(Encoder rangeEncoder, UInt32 symbol)
{
UInt32 m = 1;
for (UInt32 i = 0; i < _numBitLevels; i++)
{
UInt32 bit = symbol & 1;
await _models[m].EncodeAsync(rangeEncoder, bit);
_models[m].Encode(rangeEncoder, bit);
m = (m << 1) | bit;
symbol >>= 1;
}
@@ -90,14 +88,14 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
return price;
}
public static async ValueTask ReverseEncodeAsync(BitEncoder[] models, UInt32 startIndex,
public static void ReverseEncode(BitEncoder[] models, UInt32 startIndex,
Encoder rangeEncoder, int numBitLevels, UInt32 symbol)
{
UInt32 m = 1;
for (int i = 0; i < numBitLevels; i++)
{
UInt32 bit = symbol & 1;
await models[startIndex + m].EncodeAsync(rangeEncoder, bit);
models[startIndex + m].Encode(rangeEncoder, bit);
m = (m << 1) | bit;
symbol >>= 1;
}
@@ -123,23 +121,23 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
}
}
public async ValueTask<uint> DecodeAsync(Decoder rangeDecoder, CancellationToken cancellationToken)
public uint Decode(Decoder rangeDecoder)
{
uint m = 1;
for (int bitIndex = _numBitLevels; bitIndex > 0; bitIndex--)
{
m = (m << 1) + await _models[m].DecodeAsync(rangeDecoder, cancellationToken);
m = (m << 1) + _models[m].Decode(rangeDecoder);
}
return m - ((uint)1 << _numBitLevels);
}
public async ValueTask<uint> ReverseDecode(Decoder rangeDecoder, CancellationToken cancellationToken)
public uint ReverseDecode(Decoder rangeDecoder)
{
uint m = 1;
uint symbol = 0;
for (int bitIndex = 0; bitIndex < _numBitLevels; bitIndex++)
{
uint bit = await _models[m].DecodeAsync(rangeDecoder, cancellationToken);
uint bit = _models[m].Decode(rangeDecoder);
m <<= 1;
m += bit;
symbol |= (bit << bitIndex);
@@ -147,14 +145,14 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
return symbol;
}
public static async ValueTask<uint> ReverseDecode(BitDecoder[] models, UInt32 startIndex,
Decoder rangeDecoder, int numBitLevels, CancellationToken cancellationToken)
public static uint ReverseDecode(BitDecoder[] models, UInt32 startIndex,
Decoder rangeDecoder, int numBitLevels)
{
uint m = 1;
uint symbol = 0;
for (int bitIndex = 0; bitIndex < numBitLevels; bitIndex++)
{
uint bit = await models[startIndex + m].DecodeAsync(rangeDecoder, cancellationToken);
uint bit = models[startIndex + m].Decode(rangeDecoder);
m <<= 1;
m += bit;
symbol |= (bit << bitIndex);

View File

@@ -1,14 +1,12 @@
using System;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.SevenZip;
using SharpCompress.Compressors.BZip2;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Compressors.Filters;
using SharpCompress.Compressors.LZMA.Utilites;
//using SharpCompress.Compressors.PPMd;
using SharpCompress.Compressors.PPMd;
namespace SharpCompress.Compressors.LZMA
{
@@ -24,10 +22,9 @@ namespace SharpCompress.Compressors.LZMA
private const uint K_DEFLATE = 0x040108;
private const uint K_B_ZIP2 = 0x040202;
internal static async ValueTask<Stream> CreateDecoderStream(CMethodId id, Stream[] inStreams, byte[] info, IPasswordProvider pass,
long limit, CancellationToken cancellationToken)
internal static Stream CreateDecoderStream(CMethodId id, Stream[] inStreams, byte[] info, IPasswordProvider pass,
long limit)
{
await Task.CompletedTask;
switch (id._id)
{
case K_COPY:
@@ -38,17 +35,17 @@ namespace SharpCompress.Compressors.LZMA
return inStreams.Single();
case K_LZMA:
case K_LZMA2:
return await LzmaStream.CreateAsync(info, inStreams.Single(), -1, limit, cancellationToken: cancellationToken);
return new LzmaStream(info, inStreams.Single(), -1, limit);
case CMethodId.K_AES_ID:
return new AesDecoderStream(inStreams.Single(), info, pass, limit);
case K_BCJ:
return new BCJFilter(false, inStreams.Single());
case K_BCJ2:
return new Bcj2DecoderStream(inStreams, info, limit);
/* case K_B_ZIP2:
return await BZip2Stream.CreateAsync(inStreams.Single(), CompressionMode.Decompress, true, cancellationToken);
case K_PPMD:
return new PpmdStream(new PpmdProperties(info), inStreams.Single(), false);*/
case K_B_ZIP2:
return new BZip2Stream(inStreams.Single(), CompressionMode.Decompress, true);
case K_PPMD:
return new PpmdStream(new PpmdProperties(info), inStreams.Single(), false);
case K_DEFLATE:
return new DeflateStream(inStreams.Single(), CompressionMode.Decompress);
default:

View File

@@ -3,8 +3,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
@@ -15,7 +13,7 @@ namespace SharpCompress.Compressors.Rar
private long currentPosition;
private long maxPosition;
private IAsyncEnumerator<RarFilePart> filePartEnumerator;
private IEnumerator<RarFilePart> filePartEnumerator;
private Stream currentStream;
private readonly IExtractionListener streamListener;
@@ -23,33 +21,34 @@ namespace SharpCompress.Compressors.Rar
private long currentPartTotalReadBytes;
private long currentEntryTotalReadBytes;
internal MultiVolumeReadOnlyStream(IExtractionListener streamListener)
internal MultiVolumeReadOnlyStream(IEnumerable<RarFilePart> parts, IExtractionListener streamListener)
{
this.streamListener = streamListener;
filePartEnumerator = parts.GetEnumerator();
filePartEnumerator.MoveNext();
InitializeNextFilePart();
}
internal async ValueTask Initialize(IAsyncEnumerable<RarFilePart> parts, CancellationToken cancellationToken)
protected override void Dispose(bool disposing)
{
filePartEnumerator = parts.GetAsyncEnumerator(cancellationToken);
await filePartEnumerator.MoveNextAsync(cancellationToken);
await InitializeNextFilePartAsync(cancellationToken);
}
public override async ValueTask DisposeAsync()
{
if (filePartEnumerator != null)
base.Dispose(disposing);
if (disposing)
{
await filePartEnumerator.DisposeAsync();
filePartEnumerator = null;
if (filePartEnumerator != null)
{
filePartEnumerator.Dispose();
filePartEnumerator = null;
}
currentStream = null;
}
currentStream = null;
}
private async ValueTask InitializeNextFilePartAsync(CancellationToken cancellationToken)
private void InitializeNextFilePart()
{
maxPosition = filePartEnumerator.Current.FileHeader.CompressedSize;
currentPosition = 0;
currentStream = await filePartEnumerator.Current.GetCompressedStreamAsync(cancellationToken);
currentStream = filePartEnumerator.Current.GetCompressedStream();
currentPartTotalReadBytes = 0;
@@ -61,15 +60,10 @@ namespace SharpCompress.Compressors.Rar
}
public override int Read(byte[] buffer, int offset, int count)
{
throw new NotImplementedException();
}
public override async ValueTask<int> ReadAsync(Memory<byte> buffer, CancellationToken cancellationToken)
{
int totalRead = 0;
int currentOffset = 0;
int currentCount = buffer.Length;
int currentOffset = offset;
int currentCount = count;
while (currentCount > 0)
{
int readSize = currentCount;
@@ -78,7 +72,7 @@ namespace SharpCompress.Compressors.Rar
readSize = (int)(maxPosition - currentPosition);
}
int read = await currentStream.ReadAsync(buffer.Slice(currentOffset, readSize), cancellationToken);
int read = currentStream.Read(buffer, currentOffset, readSize);
if (read < 0)
{
throw new EndOfStreamException();
@@ -96,12 +90,12 @@ namespace SharpCompress.Compressors.Rar
throw new InvalidFormatException("Sharpcompress currently does not support multi-volume decryption.");
}
string fileName = filePartEnumerator.Current.FileHeader.FileName;
if (!await filePartEnumerator.MoveNextAsync(cancellationToken))
if (!filePartEnumerator.MoveNext())
{
throw new InvalidFormatException(
"Multi-part rar file is incomplete. Entry expects a new volume: " + fileName);
}
await InitializeNextFilePartAsync(cancellationToken);
InitializeNextFilePart();
}
else
{

View File

@@ -1,9 +1,6 @@
using System;
using System.Buffers.Binary;
using System.Buffers;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Compressors.Xz
{
@@ -19,21 +16,20 @@ namespace SharpCompress.Compressors.Xz
{
return unchecked((uint)ReadLittleEndianInt32(reader));
}
public static async ValueTask<int> ReadLittleEndianInt32(this Stream stream, CancellationToken cancellationToken)
public static int ReadLittleEndianInt32(this Stream stream)
{
using var buffer = MemoryPool<byte>.Shared.Rent(4);
var slice = buffer.Memory.Slice(0, 4);
var read = await stream.ReadAsync(slice, cancellationToken);
if (read != 4)
Span<byte> bytes = stackalloc byte[4];
var read = stream.ReadFully(bytes);
if (!read)
{
throw new EndOfStreamException();
}
return BinaryPrimitives.ReadInt32LittleEndian(slice.Span);
return BinaryPrimitives.ReadInt32LittleEndian(bytes);
}
internal static async ValueTask<uint> ReadLittleEndianUInt32(this Stream stream, CancellationToken cancellationToken)
internal static uint ReadLittleEndianUInt32(this Stream stream)
{
return unchecked((uint)await ReadLittleEndianInt32(stream, cancellationToken));
return unchecked((uint)ReadLittleEndianInt32(stream));
}
internal static byte[] ToBigEndianBytes(this uint uint32)

View File

@@ -11,19 +11,19 @@ namespace SharpCompress.Compressors.Xz
private static UInt32[] defaultTable;
public static UInt32 Compute(ReadOnlyMemory<byte> buffer)
public static UInt32 Compute(byte[] buffer)
{
return Compute(DefaultSeed, buffer);
}
public static UInt32 Compute(UInt32 seed, ReadOnlyMemory<byte> buffer)
public static UInt32 Compute(UInt32 seed, byte[] buffer)
{
return Compute(DefaultPolynomial, seed, buffer);
}
public static UInt32 Compute(UInt32 polynomial, UInt32 seed, ReadOnlyMemory<byte> buffer)
public static UInt32 Compute(UInt32 polynomial, UInt32 seed, byte[] buffer)
{
return ~CalculateHash(InitializeTable(polynomial), seed, buffer.Span);
return ~CalculateHash(InitializeTable(polynomial), seed, buffer);
}
private static UInt32[] InitializeTable(UInt32 polynomial)

View File

@@ -1,7 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
namespace SharpCompress.Compressors.Xz.Filters
{
@@ -53,6 +52,6 @@ namespace SharpCompress.Compressors.Xz.Filters
return filter;
}
public abstract ValueTask SetBaseStreamAsync(Stream stream);
public abstract void SetBaseStream(Stream stream);
}
}

View File

@@ -1,7 +1,5 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.LZMA;
namespace SharpCompress.Compressors.Xz.Filters
@@ -51,14 +49,19 @@ namespace SharpCompress.Compressors.Xz.Filters
{
}
public override async ValueTask SetBaseStreamAsync(Stream stream)
public override void SetBaseStream(Stream stream)
{
BaseStream = await LzmaStream.CreateAsync(new[] { _dictionarySize }, stream);
BaseStream = new LzmaStream(new[] { _dictionarySize }, stream);
}
public override Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
public override int Read(byte[] buffer, int offset, int count)
{
return BaseStream.ReadAsync(buffer, offset, count, cancellationToken);
return BaseStream.Read(buffer, offset, count);
}
public override int ReadByte()
{
return BaseStream.ReadByte();
}
}
}

View File

@@ -2,13 +2,10 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Compressors.Xz
{
public abstract class ReadOnlyStream : AsyncStream
public abstract class ReadOnlyStream : Stream
{
public Stream BaseStream { get; protected set; }
@@ -26,6 +23,11 @@ namespace SharpCompress.Compressors.Xz
set => throw new NotSupportedException();
}
public override void Flush()
{
throw new NotSupportedException();
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();
@@ -36,12 +38,7 @@ namespace SharpCompress.Compressors.Xz
throw new NotSupportedException();
}
public override ValueTask DisposeAsync()
{
return new();
}
public override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException();
}

View File

@@ -1,10 +1,8 @@
using System;
using System.Buffers;
#nullable disable
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.Xz.Filters;
namespace SharpCompress.Compressors.Xz
@@ -21,7 +19,7 @@ namespace SharpCompress.Compressors.Xz
private bool _streamConnected;
private int _numFilters;
private byte _blockHeaderSizeByte;
private Stream? _decomStream;
private Stream _decomStream;
private bool _endOfStream;
private bool _paddingSkipped;
private bool _crcChecked;
@@ -34,25 +32,25 @@ namespace SharpCompress.Compressors.Xz
_checkSize = checkSize;
}
public override async ValueTask<int> ReadAsync(Memory<byte> buffer, CancellationToken cancellationToken = default)
public override int Read(byte[] buffer, int offset, int count)
{
int bytesRead = 0;
if (!HeaderIsLoaded)
{
await LoadHeader(cancellationToken);
LoadHeader();
}
if (!_streamConnected)
{
await ConnectStreamAsync();
ConnectStream();
}
if (!_endOfStream && _decomStream is not null)
if (!_endOfStream)
{
bytesRead = await _decomStream.ReadAsync(buffer, cancellationToken);
bytesRead = _decomStream.Read(buffer, offset, count);
}
if (bytesRead != buffer.Length)
if (bytesRead != count)
{
_endOfStream = true;
}
@@ -95,27 +93,24 @@ namespace SharpCompress.Compressors.Xz
_crcChecked = true;
}
private async ValueTask ConnectStreamAsync()
private void ConnectStream()
{
_decomStream = BaseStream;
while (Filters.Any())
{
BlockFilter filter = Filters.Pop();
await filter.SetBaseStreamAsync(_decomStream);
filter.SetBaseStream(_decomStream);
_decomStream = filter;
}
_streamConnected = true;
}
private async ValueTask LoadHeader(CancellationToken cancellationToken)
private void LoadHeader()
{
await ReadHeaderSize(cancellationToken);
using var blockHeaderWithoutCrc = MemoryPool<byte>.Shared.Rent(BlockHeaderSize - 4);
var headerCache = blockHeaderWithoutCrc.Memory.Slice(0, BlockHeaderSize - 4);
await CacheHeader(headerCache, cancellationToken);
ReadHeaderSize();
byte[] headerCache = CacheHeader();
//TODO: memory-size this
await using (var cache = new MemoryStream(headerCache.ToArray()))
using (var cache = new MemoryStream(headerCache))
using (var cachedReader = new BinaryReader(cache))
{
cachedReader.BaseStream.Position = 1; // skip the header size byte
@@ -125,30 +120,33 @@ namespace SharpCompress.Compressors.Xz
HeaderIsLoaded = true;
}
private async ValueTask ReadHeaderSize(CancellationToken cancellationToken)
private void ReadHeaderSize()
{
_blockHeaderSizeByte = await BaseStream.ReadByteAsync(cancellationToken);
_blockHeaderSizeByte = (byte)BaseStream.ReadByte();
if (_blockHeaderSizeByte == 0)
{
throw new XZIndexMarkerReachedException();
}
}
private async ValueTask CacheHeader(Memory<byte> blockHeaderWithoutCrc, CancellationToken cancellationToken)
private byte[] CacheHeader()
{
blockHeaderWithoutCrc.Span[0] = _blockHeaderSizeByte;
var read = await BaseStream.ReadAsync(blockHeaderWithoutCrc.Slice( 1, BlockHeaderSize - 5), cancellationToken);
byte[] blockHeaderWithoutCrc = new byte[BlockHeaderSize - 4];
blockHeaderWithoutCrc[0] = _blockHeaderSizeByte;
var read = BaseStream.Read(blockHeaderWithoutCrc, 1, BlockHeaderSize - 5);
if (read != BlockHeaderSize - 5)
{
throw new EndOfStreamException("Reached end of stream unexpectedly");
throw new EndOfStreamException("Reached end of stream unexectedly");
}
uint crc = await BaseStream.ReadLittleEndianUInt32(cancellationToken);
uint crc = BaseStream.ReadLittleEndianUInt32();
uint calcCrc = Crc32.Compute(blockHeaderWithoutCrc);
if (crc != calcCrc)
{
throw new InvalidDataException("Block header corrupt");
}
return blockHeaderWithoutCrc;
}
private void ReadBlockFlags(BinaryReader reader)

View File

@@ -1,65 +1,58 @@
using System;
using System.Buffers;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Compressors.Xz
{
public class XZHeader
{
private readonly Stream _stream;
private static readonly ReadOnlyMemory<byte> MagicHeader = new(new byte[]{ 0xFD, 0x37, 0x7A, 0x58, 0x5a, 0x00 });
private readonly BinaryReader _reader;
private readonly byte[] MagicHeader = { 0xFD, 0x37, 0x7A, 0x58, 0x5a, 0x00 };
public CheckType BlockCheckType { get; private set; }
public int BlockCheckSize => ((((int)BlockCheckType) + 2) / 3) * 4;
public XZHeader(Stream reader)
public XZHeader(BinaryReader reader)
{
_stream = reader;
_reader = reader;
}
public static async ValueTask<XZHeader> FromStream(Stream stream, CancellationToken cancellationToken = default)
public static XZHeader FromStream(Stream stream)
{
var header = new XZHeader(new NonDisposingStream(stream));
await header.Process(cancellationToken);
var header = new XZHeader(new BinaryReader(new NonDisposingStream(stream), Encoding.UTF8));
header.Process();
return header;
}
public async ValueTask Process(CancellationToken cancellationToken = default)
public void Process()
{
using var header = MemoryPool<byte>.Shared.Rent(6);
await _stream.ReadAsync(header.Memory.Slice(0, 6), cancellationToken);
CheckMagicBytes(header.Memory.Slice(0, 6));
await ProcessStreamFlags(cancellationToken);
CheckMagicBytes(_reader.ReadBytes(6));
ProcessStreamFlags();
}
private async ValueTask ProcessStreamFlags(CancellationToken cancellationToken)
private void ProcessStreamFlags()
{
using var header = MemoryPool<byte>.Shared.Rent(6);
await _stream.ReadAsync(header.Memory.Slice(0, 2), cancellationToken);
BlockCheckType = (CheckType)(header.Memory.Span[1] & 0x0F);
byte futureUse = (byte)(header.Memory.Span[1] & 0xF0);
if (futureUse != 0 || header.Memory.Span[0] != 0)
{
throw new InvalidDataException("Unknown XZ Stream Version");
}
UInt32 crc = await _stream.ReadLittleEndianUInt32(cancellationToken);
UInt32 calcCrc = Crc32.Compute(header.Memory.Slice(0, 2));
byte[] streamFlags = _reader.ReadBytes(2);
UInt32 crc = _reader.ReadLittleEndianUInt32();
UInt32 calcCrc = Crc32.Compute(streamFlags);
if (crc != calcCrc)
{
throw new InvalidDataException("Stream header corrupt");
}
BlockCheckType = (CheckType)(streamFlags[1] & 0x0F);
byte futureUse = (byte)(streamFlags[1] & 0xF0);
if (futureUse != 0 || streamFlags[0] != 0)
{
throw new InvalidDataException("Unknown XZ Stream Version");
}
}
private void CheckMagicBytes(ReadOnlyMemory<byte> header)
private void CheckMagicBytes(byte[] header)
{
if (!header.Equals(MagicHeader))
if (!header.SequenceEqual(MagicHeader))
{
throw new InvalidDataException("Invalid XZ Stream");
}

View File

@@ -1,18 +1,18 @@
using System;
#nullable disable
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Compressors.Xz
{
[CLSCompliant(false)]
public sealed class XZStream : XZReadOnlyStream
{
public static async ValueTask<bool> IsXZStreamAsync(Stream stream, CancellationToken cancellationToken = default)
public static bool IsXZStream(Stream stream)
{
try
{
return null != await XZHeader.FromStream(stream, cancellationToken);
return null != XZHeader.FromStream(stream);
}
catch (Exception)
{
@@ -22,7 +22,7 @@ namespace SharpCompress.Compressors.Xz
private void AssertBlockCheckTypeIsSupported()
{
switch (Header?.BlockCheckType)
switch (Header.BlockCheckType)
{
case CheckType.NONE:
break;
@@ -36,11 +36,11 @@ namespace SharpCompress.Compressors.Xz
throw new NotSupportedException("Check Type unknown to this version of decoder.");
}
}
public XZHeader? Header { get; private set; }
public XZIndex? Index { get; private set; }
public XZFooter? Footer { get; private set; }
public XZHeader Header { get; private set; }
public XZIndex Index { get; private set; }
public XZFooter Footer { get; private set; }
public bool HeaderIsRead { get; private set; }
private XZBlock? _currentBlock;
private XZBlock _currentBlock;
private bool _endOfStream;
@@ -48,12 +48,7 @@ namespace SharpCompress.Compressors.Xz
{
}
public override async Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
return await ReadAsync(new Memory<byte>(buffer, offset, count), cancellationToken);
}
public override async ValueTask<int> ReadAsync(Memory<byte> buffer, CancellationToken cancellationToken = default)
public override int Read(byte[] buffer, int offset, int count)
{
int bytesRead = 0;
if (_endOfStream)
@@ -63,11 +58,11 @@ namespace SharpCompress.Compressors.Xz
if (!HeaderIsRead)
{
await ReadHeader();
ReadHeader();
}
bytesRead = await ReadBlocks(buffer, cancellationToken);
if (bytesRead < buffer.Length)
bytesRead = ReadBlocks(buffer, offset, count);
if (bytesRead < count)
{
_endOfStream = true;
ReadIndex();
@@ -76,9 +71,9 @@ namespace SharpCompress.Compressors.Xz
return bytesRead;
}
private async ValueTask ReadHeader()
private void ReadHeader()
{
Header = await XZHeader.FromStream(BaseStream);
Header = XZHeader.FromStream(BaseStream);
AssertBlockCheckTypeIsSupported();
HeaderIsRead = true;
}
@@ -95,29 +90,29 @@ namespace SharpCompress.Compressors.Xz
// TODO verify footer
}
private async ValueTask<int> ReadBlocks(Memory<byte> buffer, CancellationToken cancellationToken)
private int ReadBlocks(byte[] buffer, int offset, int count)
{
int bytesRead = 0;
if (_currentBlock is null)
{
_currentBlock = NextBlock();
NextBlock();
}
for (; ; )
{
try
{
if (bytesRead >= buffer.Length)
if (bytesRead >= count)
{
break;
}
int remaining = buffer.Length - bytesRead;
int newOffset = bytesRead;
int justRead = await _currentBlock.ReadAsync(buffer.Slice(newOffset, remaining), cancellationToken);
int remaining = count - bytesRead;
int newOffset = offset + bytesRead;
int justRead = _currentBlock.Read(buffer, newOffset, remaining);
if (justRead < remaining)
{
_currentBlock = NextBlock();
NextBlock();
}
bytesRead += justRead;
@@ -130,9 +125,9 @@ namespace SharpCompress.Compressors.Xz
return bytesRead;
}
private XZBlock NextBlock()
private void NextBlock()
{
return new XZBlock(BaseStream, Header!.BlockCheckType, Header!.BlockCheckSize);
_currentBlock = new XZBlock(BaseStream, Header.BlockCheckType, Header.BlockCheckSize);
}
}
}

View File

@@ -2,8 +2,6 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Crypto
{
@@ -54,20 +52,16 @@ namespace SharpCompress.Crypto
}
#endif
public override async ValueTask WriteAsync(ReadOnlyMemory<byte> buffer, CancellationToken cancellationToken = new CancellationToken())
{
await stream.WriteAsync(buffer, cancellationToken);
hash = CalculateCrc(table, hash, buffer.Span);
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException();
stream.Write(buffer, offset, count);
hash = CalculateCrc(table, hash, buffer.AsSpan(offset, count));
}
public override void WriteByte(byte value)
{
throw new NotSupportedException();
stream.WriteByte(value);
hash = CalculateCrc(table, hash, value);
}
public override bool CanRead => stream.CanRead;

View File

@@ -1,76 +0,0 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.IO
{
public abstract class AsyncStream : Stream
{
protected sealed override void Dispose(bool disposing)
{
if (disposing)
{
throw new NotSupportedException();
}
}
public sealed override void Flush()
{
throw new NotSupportedException();
}
public abstract override ValueTask DisposeAsync();
public sealed override int Read(byte[] buffer, int offset, int count)
{
throw new NotSupportedException();
}
public sealed override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException();
}
public sealed override IAsyncResult BeginRead(byte[] buffer, int offset, int count, AsyncCallback? callback, object? state)
{
throw new NotSupportedException();
}
public sealed override int EndRead(IAsyncResult asyncResult)
{
throw new NotSupportedException();
}
public sealed override int ReadByte()
{
throw new NotSupportedException();
}
public sealed override void WriteByte(byte b)
{
throw new NotSupportedException();
}
public override async Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
return await ReadAsync(new Memory<byte>(buffer, offset, count), cancellationToken);
}
public abstract override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken);
#if !NET461 && !NETSTANDARD2_0
public sealed override int Read(Span<byte> buffer)
{
throw new NotSupportedException();
}
public sealed override void Write(ReadOnlySpan<byte> buffer)
{
throw new NotSupportedException();
}
#endif
}
}

View File

@@ -1,7 +1,5 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.IO
{
@@ -27,11 +25,16 @@ namespace SharpCompress.IO
public override bool CanWrite => false;
public override void Flush()
{
throw new NotSupportedException();
}
public override long Length => BytesLeftToRead;
public override long Position { get => throw new NotSupportedException(); set => throw new NotSupportedException(); }
public override async Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
public override int Read(byte[] buffer, int offset, int count)
{
if (count > BytesLeftToRead)
{
@@ -44,7 +47,7 @@ namespace SharpCompress.IO
{
cacheOffset = 0;
Stream.Position = position;
cacheLength = await Stream.ReadAsync(cache, 0, cache.Length, cancellationToken);
cacheLength = Stream.Read(cache, 0, cache.Length);
position += cacheLength;
}
@@ -71,5 +74,10 @@ namespace SharpCompress.IO
{
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException();
}
}
}

View File

@@ -1,7 +1,5 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.IO
{
@@ -19,15 +17,20 @@ namespace SharpCompress.IO
public override bool CanWrite => true;
public override Task FlushAsync(CancellationToken cancellationToken)
public override void Flush()
{
return Stream.FlushAsync(cancellationToken);
Stream.Flush();
}
public override long Length => throw new NotSupportedException();
public override long Position { get => throw new NotSupportedException(); set => throw new NotSupportedException(); }
public override int Read(byte[] buffer, int offset, int count)
{
throw new NotSupportedException();
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();
@@ -38,16 +41,16 @@ namespace SharpCompress.IO
throw new NotSupportedException();
}
public override async Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
public override void Write(byte[] buffer, int offset, int count)
{
await Stream.WriteAsync(buffer, offset, count, cancellationToken);
Stream.Write(buffer, offset, count);
Count += (uint)count;
}
public override async ValueTask WriteAsync(ReadOnlyMemory<byte> buffer, CancellationToken cancellationToken = default)
public override void WriteByte(byte value)
{
await Stream.WriteAsync(buffer, cancellationToken);
Count += (uint)buffer.Length;
Stream.WriteByte(value);
++Count;
}
}
}

View File

@@ -1,11 +1,9 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.IO
{
public class NonDisposingStream : AsyncStream
public class NonDisposingStream : Stream
{
public NonDisposingStream(Stream stream, bool throwOnDispose = false)
{
@@ -15,14 +13,12 @@ namespace SharpCompress.IO
public bool ThrowOnDispose { get; set; }
public override ValueTask DisposeAsync()
protected override void Dispose(bool disposing)
{
if (ThrowOnDispose)
{
throw new InvalidOperationException($"Attempt to dispose of a {nameof(NonDisposingStream)} when {nameof(ThrowOnDispose)} is {ThrowOnDispose}");
}
return new ValueTask();
}
protected Stream Stream { get; }
@@ -33,23 +29,18 @@ namespace SharpCompress.IO
public override bool CanWrite => Stream.CanWrite;
public override Task FlushAsync(CancellationToken cancellationToken)
public override void Flush()
{
return Stream.FlushAsync(cancellationToken);
Stream.Flush();
}
public override long Length => Stream.Length;
public override long Position { get => Stream.Position; set => Stream.Position = value; }
public override ValueTask<int> ReadAsync(Memory<byte> buffer, CancellationToken cancellationToken)
public override int Read(byte[] buffer, int offset, int count)
{
return Stream.ReadAsync(buffer, cancellationToken);
}
public override Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
return Stream.ReadAsync(buffer, offset, count, cancellationToken);
return Stream.Read(buffer, offset, count);
}
public override long Seek(long offset, SeekOrigin origin)
@@ -62,14 +53,23 @@ namespace SharpCompress.IO
Stream.SetLength(value);
}
public override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
public override void Write(byte[] buffer, int offset, int count)
{
return Stream.WriteAsync(buffer, offset, count, cancellationToken);
Stream.Write(buffer, offset, count);
}
public override ValueTask WriteAsync(ReadOnlyMemory<byte> buffer, CancellationToken cancellationToken = new CancellationToken())
#if !NET461 && !NETSTANDARD2_0
public override int Read(Span<byte> buffer)
{
return Stream.WriteAsync(buffer, cancellationToken);
return Stream.Read(buffer);
}
public override void Write(ReadOnlySpan<byte> buffer)
{
Stream.Write(buffer);
}
#endif
}
}

View File

@@ -1,7 +1,5 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.IO
{
@@ -30,47 +28,41 @@ namespace SharpCompress.IO
public override bool CanWrite => false;
public override void Flush()
{
throw new NotSupportedException();
}
public override long Length => throw new NotSupportedException();
public override long Position { get => throw new NotSupportedException(); set => throw new NotSupportedException(); }
public override async ValueTask<int> ReadAsync(Memory<byte> buffer, CancellationToken cancellationToken = default)
public override int Read(byte[] buffer, int offset, int count)
{
var count = buffer.Length;
if (BytesLeftToRead < count)
{
count = (int)BytesLeftToRead;
}
if (count == 0)
{
return 0;
}
int read = await Stream.ReadAsync(buffer.Slice(0, count), cancellationToken);
int read = Stream.Read(buffer, offset, count);
if (read > 0)
{
BytesLeftToRead -= read;
}
return read;
}
public override async Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
if (BytesLeftToRead < count)
{
count = (int)BytesLeftToRead;
}
if (count == 0)
public override int ReadByte()
{
if (BytesLeftToRead <= 0)
{
return 0;
return -1;
}
int read = await Stream.ReadAsync(buffer, offset, count, cancellationToken);
if (read > 0)
int value = Stream.ReadByte();
if (value != -1)
{
BytesLeftToRead -= read;
--BytesLeftToRead;
}
return read;
return value;
}
public override long Seek(long offset, SeekOrigin origin)
@@ -82,5 +74,10 @@ namespace SharpCompress.IO
{
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException();
}
}
}

View File

@@ -1,14 +1,12 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.IO
{
internal class RewindableStream : Stream
{
private readonly Stream stream;
private MemoryStream bufferStream = new();
private MemoryStream bufferStream = new MemoryStream();
private bool isRewound;
private bool isDisposed;
@@ -111,11 +109,6 @@ namespace SharpCompress.IO
}
public override int Read(byte[] buffer, int offset, int count)
{
throw new NotImplementedException();
}
public override async Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
//don't actually read if we don't really want to read anything
//currently a network stream bug on Windows for .NET Core
@@ -126,13 +119,13 @@ namespace SharpCompress.IO
int read;
if (isRewound && bufferStream.Position != bufferStream.Length)
{
read = await bufferStream.ReadAsync(buffer, offset, count, cancellationToken);
read = bufferStream.Read(buffer, offset, count);
if (read < count)
{
int tempRead = await stream.ReadAsync(buffer, read, count - read, cancellationToken);
int tempRead = stream.Read(buffer, offset + read, count - read);
if (IsRecording)
{
await bufferStream.WriteAsync(buffer, read, tempRead, cancellationToken);
bufferStream.Write(buffer, offset + read, tempRead);
}
read += tempRead;
}
@@ -144,48 +137,10 @@ namespace SharpCompress.IO
return read;
}
read = await stream.ReadAsync(buffer, cancellationToken);
read = stream.Read(buffer, offset, count);
if (IsRecording)
{
await bufferStream.WriteAsync(buffer, cancellationToken);
}
return read;
}
public override async ValueTask<int> ReadAsync(Memory<byte> buffer, CancellationToken cancellationToken = default)
{
var count = buffer.Length;
//don't actually read if we don't really want to read anything
//currently a network stream bug on Windows for .NET Core
if (count == 0)
{
return 0;
}
int read;
if (isRewound && bufferStream.Position != bufferStream.Length)
{
read = await bufferStream.ReadAsync(buffer, cancellationToken);
if (read < count)
{
int tempRead = await stream.ReadAsync(buffer.Slice(read, count - read), cancellationToken);
if (IsRecording)
{
await bufferStream.WriteAsync(buffer.Slice(read, tempRead), cancellationToken);
}
read += tempRead;
}
if (bufferStream.Position == bufferStream.Length && !IsRecording)
{
isRewound = false;
bufferStream.SetLength(0);
}
return read;
}
read = await stream.ReadAsync(buffer, cancellationToken);
if (IsRecording)
{
await bufferStream.WriteAsync(buffer, cancellationToken);
bufferStream.Write(buffer, offset, read);
}
return read;
}

View File

@@ -1,31 +1,23 @@
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
#nullable disable
using System;
using System.Collections;
using System.Collections.Generic;
namespace SharpCompress
{
internal sealed class LazyReadOnlyCollection<T> : IAsyncEnumerable<T>
internal sealed class LazyReadOnlyCollection<T> : ICollection<T>
{
private readonly List<T> backing = new();
private IAsyncEnumerator<T>? enumerator;
private readonly IAsyncEnumerable<T> enumerable;
private readonly List<T> backing = new List<T>();
private readonly IEnumerator<T> source;
private bool fullyLoaded;
public LazyReadOnlyCollection(IAsyncEnumerable<T> source)
public LazyReadOnlyCollection(IEnumerable<T> source)
{
enumerable = source;
this.source = source.GetEnumerator();
}
private IAsyncEnumerator<T> GetEnumerator()
{
if (enumerator is null)
{
enumerator = enumerable.GetAsyncEnumerator();
}
return enumerator;
}
private class LazyLoader : IAsyncEnumerator<T>
private class LazyLoader : IEnumerator<T>
{
private readonly LazyReadOnlyCollection<T> lazyReadOnlyCollection;
private bool disposed;
@@ -36,43 +28,58 @@ namespace SharpCompress
this.lazyReadOnlyCollection = lazyReadOnlyCollection;
}
#region IEnumerator<T> Members
public T Current => lazyReadOnlyCollection.backing[index];
public ValueTask DisposeAsync()
#endregion
#region IDisposable Members
public void Dispose()
{
if (!disposed)
{
disposed = true;
}
return new ValueTask(Task.CompletedTask);
}
#endregion
public async ValueTask<bool> MoveNextAsync()
#region IEnumerator Members
object IEnumerator.Current => Current;
public bool MoveNext()
{
if (index + 1 < lazyReadOnlyCollection.backing.Count)
{
index++;
return true;
}
if (!lazyReadOnlyCollection.fullyLoaded && await lazyReadOnlyCollection.GetEnumerator().MoveNextAsync())
if (!lazyReadOnlyCollection.fullyLoaded && lazyReadOnlyCollection.source.MoveNext())
{
lazyReadOnlyCollection.backing.Add(lazyReadOnlyCollection.GetEnumerator().Current);
lazyReadOnlyCollection.backing.Add(lazyReadOnlyCollection.source.Current);
index++;
return true;
}
lazyReadOnlyCollection.fullyLoaded = true;
return false;
}
public void Reset()
{
throw new NotSupportedException();
}
#endregion
}
internal async ValueTask EnsureFullyLoaded()
internal void EnsureFullyLoaded()
{
if (!fullyLoaded)
{
await foreach (var x in this)
{
}
this.ForEach(x => { });
fullyLoaded = true;
}
}
@@ -82,14 +89,65 @@ namespace SharpCompress
return backing;
}
#region ICollection<T> Members
public void Add(T item)
{
throw new NotSupportedException();
}
public void Clear()
{
throw new NotSupportedException();
}
public bool Contains(T item)
{
EnsureFullyLoaded();
return backing.Contains(item);
}
public void CopyTo(T[] array, int arrayIndex)
{
EnsureFullyLoaded();
backing.CopyTo(array, arrayIndex);
}
public int Count
{
get
{
EnsureFullyLoaded();
return backing.Count;
}
}
public bool IsReadOnly => true;
public bool Remove(T item)
{
throw new NotSupportedException();
}
#endregion
#region IEnumerable<T> Members
//TODO check for concurrent access
public IAsyncEnumerator<T> GetAsyncEnumerator(CancellationToken cancellationToken)
public IEnumerator<T> GetEnumerator()
{
return new LazyLoader(this);
}
#endregion
#region IEnumerable Members
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
#endregion
}
}

View File

@@ -1,18 +1,20 @@
#if NET461 || NETSTANDARD2_0
using System;
using System.Buffers;
using System.IO;
namespace System.IO
namespace SharpCompress
{
public static class StreamExtensions
internal static class StreamExtensions
{
public static int Read(this Stream stream, Span<byte> buffer)
internal static int Read(this Stream stream, Span<byte> buffer)
{
byte[] temp = ArrayPool<byte>.Shared.Rent(buffer.Length);
try
{
int read = stream.Read(buffer);
int read = stream.Read(temp, 0, buffer.Length);
temp.AsSpan(0, read).CopyTo(buffer);
@@ -24,7 +26,7 @@ namespace System.IO
}
}
public static void Write(this Stream stream, ReadOnlySpan<byte> buffer)
internal static void Write(this Stream stream, ReadOnlySpan<byte> buffer)
{
byte[] temp = ArrayPool<byte>.Shared.Rent(buffer.Length);

View File

@@ -1,15 +1,15 @@
#if NET461 || NETSTANDARD2_0
namespace System
namespace SharpCompress
{
public static class StringExtensions
internal static class StringExtensions
{
public static bool EndsWith(this string text, char value)
internal static bool EndsWith(this string text, char value)
{
return text.Length > 0 && text[text.Length - 1] == value;
}
public static bool Contains(this string text, char value)
internal static bool Contains(this string text, char value)
{
return text.IndexOf(value) > -1;
}

View File

@@ -2,8 +2,6 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Readers
@@ -16,7 +14,7 @@ namespace SharpCompress.Readers
where TVolume : Volume
{
private bool completed;
private IAsyncEnumerator<TEntry>? entriesForCurrentReadStream;
private IEnumerator<TEntry>? entriesForCurrentReadStream;
private bool wroteCurrentEntry;
public event EventHandler<ReaderExtractionEventArgs<IEntry>>? EntryExtractionProgress;
@@ -42,14 +40,14 @@ namespace SharpCompress.Readers
/// <summary>
/// Current file entry
/// </summary>
public TEntry? Entry => entriesForCurrentReadStream?.Current ?? default;
public TEntry Entry => entriesForCurrentReadStream!.Current;
#region IDisposable Members
public async ValueTask DisposeAsync()
public void Dispose()
{
await (entriesForCurrentReadStream?.DisposeAsync() ?? new ValueTask());
await Volume.DisposeAsync();
entriesForCurrentReadStream?.Dispose();
Volume?.Dispose();
}
#endregion
@@ -69,7 +67,7 @@ namespace SharpCompress.Readers
}
}
public async ValueTask<bool> MoveToNextEntryAsync(CancellationToken cancellationToken = default)
public bool MoveToNextEntry()
{
if (completed)
{
@@ -81,21 +79,14 @@ namespace SharpCompress.Readers
}
if (entriesForCurrentReadStream is null)
{
var stream = await RequestInitialStream(cancellationToken);
if (stream is null || !stream.CanRead)
{
throw new MultipartStreamRequiredException("File is split into multiple archives: '"
+ (Entry?.Key ?? "unknown") +
"'. A new readable stream is required. Use Cancel if it was intended.");
}
entriesForCurrentReadStream = GetEntries(stream, cancellationToken).GetAsyncEnumerator(cancellationToken);
}
else if (!wroteCurrentEntry)
return LoadStreamForReading(RequestInitialStream());
}
if (!wroteCurrentEntry)
{
await SkipEntry(cancellationToken);
SkipEntry();
}
wroteCurrentEntry = false;
if (await entriesForCurrentReadStream.MoveNextAsync())
if (NextEntryForCurrentStream())
{
return true;
}
@@ -103,29 +94,43 @@ namespace SharpCompress.Readers
return false;
}
protected virtual ValueTask<Stream> RequestInitialStream(CancellationToken cancellationToken)
protected bool LoadStreamForReading(Stream stream)
{
return new(Volume.Stream);
entriesForCurrentReadStream?.Dispose();
if ((stream is null) || (!stream.CanRead))
{
throw new MultipartStreamRequiredException("File is split into multiple archives: '"
+ Entry.Key +
"'. A new readable stream is required. Use Cancel if it was intended.");
}
entriesForCurrentReadStream = GetEntries(stream).GetEnumerator();
return entriesForCurrentReadStream.MoveNext();
}
protected abstract IAsyncEnumerable<TEntry> GetEntries(Stream stream, CancellationToken cancellationToken);
protected virtual Stream RequestInitialStream()
{
return Volume.Stream;
}
internal virtual bool NextEntryForCurrentStream()
{
return entriesForCurrentReadStream!.MoveNext();
}
protected abstract IEnumerable<TEntry> GetEntries(Stream stream);
#region Entry Skip/Write
private async ValueTask SkipEntry(CancellationToken cancellationToken)
private void SkipEntry()
{
if (Entry?.IsDirectory != true)
if (!Entry.IsDirectory)
{
await SkipAsync(cancellationToken);
Skip();
}
}
private async ValueTask SkipAsync(CancellationToken cancellationToken)
private void Skip()
{
if (Entry is null)
{
return;
}
if (ArchiveType != ArchiveType.Rar
&& !Entry.IsSolid
&& Entry.CompressedSize > 0)
@@ -137,17 +142,19 @@ namespace SharpCompress.Readers
if (rawStream != null)
{
var bytesToAdvance = Entry.CompressedSize;
await rawStream.SkipAsync(bytesToAdvance, cancellationToken: cancellationToken);
rawStream.Skip(bytesToAdvance);
part.Skipped = true;
return;
}
}
//don't know the size so we have to try to decompress to skip
await using var s = await OpenEntryStreamAsync(cancellationToken);
await s.SkipAsync(cancellationToken);
using (var s = OpenEntryStream())
{
s.Skip();
}
}
public async ValueTask WriteEntryToAsync(Stream writableStream, CancellationToken cancellationToken = default)
public void WriteEntryTo(Stream writableStream)
{
if (wroteCurrentEntry)
{
@@ -158,28 +165,26 @@ namespace SharpCompress.Readers
throw new ArgumentNullException("A writable Stream was required. Use Cancel if that was intended.");
}
await WriteAsync(writableStream, cancellationToken);
Write(writableStream);
wroteCurrentEntry = true;
}
private async ValueTask WriteAsync(Stream writeStream, CancellationToken cancellationToken)
internal void Write(Stream writeStream)
{
if (Entry is null)
{
throw new ArgumentException("Entry is null");
}
var streamListener = this as IReaderExtractionListener;
await using Stream s = await OpenEntryStreamAsync(cancellationToken);
await s.TransferToAsync(writeStream, Entry, streamListener, cancellationToken);
using (Stream s = OpenEntryStream())
{
s.TransferTo(writeStream, Entry, streamListener);
}
}
public async ValueTask<EntryStream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
public EntryStream OpenEntryStream()
{
if (wroteCurrentEntry)
{
throw new ArgumentException("WriteEntryTo or OpenEntryStream can only be called once.");
}
var stream = await GetEntryStreamAsync(cancellationToken);
var stream = GetEntryStream();
wroteCurrentEntry = true;
return stream;
}
@@ -189,21 +194,17 @@ namespace SharpCompress.Readers
/// </summary>
protected EntryStream CreateEntryStream(Stream decompressed)
{
return new(this, decompressed);
return new EntryStream(this, decompressed);
}
protected async ValueTask<EntryStream> GetEntryStreamAsync(CancellationToken cancellationToken)
protected virtual EntryStream GetEntryStream()
{
if (Entry is null)
{
throw new ArgumentException("Entry is null");
}
return CreateEntryStream(await Entry.Parts.First().GetCompressedStreamAsync(cancellationToken));
return CreateEntryStream(Entry.Parts.First().GetCompressedStream());
}
#endregion
IEntry? IReader.Entry => Entry;
IEntry IReader.Entry => Entry;
void IExtractionListener.FireCompressedBytesRead(long currentPartCompressedBytes, long compressedReadBytes)
{

View File

@@ -1,6 +1,5 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using SharpCompress.Common;
using SharpCompress.Common.GZip;
@@ -32,9 +31,9 @@ namespace SharpCompress.Readers.GZip
#endregion Open
protected override IAsyncEnumerable<GZipEntry> GetEntries(Stream stream, CancellationToken cancellationToken)
protected override IEnumerable<GZipEntry> GetEntries(Stream stream)
{
return GZipEntry.GetEntries(stream, Options, cancellationToken);
return GZipEntry.GetEntries(stream, Options);
}
}
}

View File

@@ -1,12 +1,10 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Readers
{
public interface IReader : IAsyncDisposable
public interface IReader : IDisposable
{
event EventHandler<ReaderExtractionEventArgs<IEntry>> EntryExtractionProgress;
@@ -15,13 +13,13 @@ namespace SharpCompress.Readers
ArchiveType ArchiveType { get; }
IEntry? Entry { get; }
IEntry Entry { get; }
/// <summary>
/// Decompresses the current entry to the stream. This cannot be called twice for the current entry.
/// </summary>
/// <param name="writableStream"></param>
ValueTask WriteEntryToAsync(Stream writableStream, CancellationToken cancellationToken = default);
void WriteEntryTo(Stream writableStream);
bool Cancelled { get; }
void Cancel();
@@ -30,12 +28,12 @@ namespace SharpCompress.Readers
/// Moves to the next entry by reading more data from the underlying stream. This skips if data has not been read.
/// </summary>
/// <returns></returns>
ValueTask<bool> MoveToNextEntryAsync(CancellationToken cancellationToken = default);
bool MoveToNextEntry();
/// <summary>
/// Opens the current entry as a stream that will decompress as it is read.
/// Read the entire stream or use SkipEntry on EntryStream.
/// </summary>
ValueTask<EntryStream> OpenEntryStreamAsync(CancellationToken cancellationToken = default);
EntryStream OpenEntryStream();
}
}

View File

@@ -1,73 +1,63 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using System.IO;
using SharpCompress.Common;
namespace SharpCompress.Readers
{
public static class IReaderExtensions
{
public static async ValueTask WriteEntryToAsync(this IReader reader, string filePath)
public static void WriteEntryTo(this IReader reader, string filePath)
{
await using Stream stream = File.Open(filePath, FileMode.Create, FileAccess.Write);
await reader.WriteEntryToAsync(stream);
using (Stream stream = File.Open(filePath, FileMode.Create, FileAccess.Write))
{
reader.WriteEntryTo(stream);
}
}
public static async ValueTask WriteEntryToAsync(this IReader reader, FileInfo filePath)
public static void WriteEntryTo(this IReader reader, FileInfo filePath)
{
await using Stream stream = filePath.Open(FileMode.Create);
await reader.WriteEntryToAsync(stream);
using (Stream stream = filePath.Open(FileMode.Create))
{
reader.WriteEntryTo(stream);
}
}
/// <summary>
/// Extract all remaining unread entries to specific directory, retaining filename
/// </summary>
public static async ValueTask WriteAllToDirectoryAsync(this IReader reader, string destinationDirectory,
public static void WriteAllToDirectory(this IReader reader, string destinationDirectory,
ExtractionOptions? options = null)
{
while (await reader.MoveToNextEntryAsync())
while (reader.MoveToNextEntry())
{
await reader.WriteEntryToDirectoryAsync(destinationDirectory, options);
reader.WriteEntryToDirectory(destinationDirectory, options);
}
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static ValueTask WriteEntryToDirectoryAsync(this IReader reader, string destinationDirectory,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default)
public static void WriteEntryToDirectory(this IReader reader, string destinationDirectory,
ExtractionOptions? options = null)
{
if (reader.Entry is null)
{
throw new ArgumentException("Entry is null");
}
return ExtractionMethods.WriteEntryToDirectoryAsync(reader.Entry, destinationDirectory, options,
async (x, o, ct) =>
{
await reader.WriteEntryToFileAsync(x, o, ct);
}, cancellationToken);
ExtractionMethods.WriteEntryToDirectory(reader.Entry, destinationDirectory, options,
reader.WriteEntryToFile);
}
/// <summary>
/// Extract to specific file
/// </summary>
public static async ValueTask WriteEntryToFileAsync(this IReader reader,
public static void WriteEntryToFile(this IReader reader,
string destinationFileName,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default)
ExtractionOptions? options = null)
{
if (reader.Entry is null)
{
throw new ArgumentException("Entry is null");
}
await ExtractionMethods.WriteEntryToFileAsync(reader.Entry, destinationFileName, options,
async (x, fm, ct) =>
ExtractionMethods.WriteEntryToFile(reader.Entry, destinationFileName, options,
(x, fm) =>
{
await using FileStream fs = File.Open(x, fm);
await reader.WriteEntryToAsync(fs, ct);
}, cancellationToken);
using (FileStream fs = File.Open(destinationFileName, fm))
{
reader.WriteEntryTo(fs);
}
});
}
}
}

View File

@@ -1,9 +1,7 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives.GZip;
//using SharpCompress.Archives.Rar;
using SharpCompress.Archives.Rar;
using SharpCompress.Archives.Tar;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
@@ -12,7 +10,7 @@ using SharpCompress.Compressors.BZip2;
using SharpCompress.Compressors.Deflate;
using SharpCompress.IO;
using SharpCompress.Readers.GZip;
//using SharpCompress.Readers.Rar;
using SharpCompress.Readers.Rar;
using SharpCompress.Readers.Tar;
using SharpCompress.Readers.Zip;
using SharpCompress.Compressors.LZMA;
@@ -28,80 +26,80 @@ namespace SharpCompress.Readers
/// <param name="stream"></param>
/// <param name="options"></param>
/// <returns></returns>
public static async ValueTask<IReader> OpenAsync(Stream stream, ReaderOptions? options = null, CancellationToken cancellationToken = default)
public static IReader Open(Stream stream, ReaderOptions? options = null)
{
stream.CheckNotNull(nameof(stream));
options ??= new ReaderOptions()
{
LeaveStreamOpen = false
};
RewindableStream rewindableStream = new(stream);
options = options ?? new ReaderOptions()
{
LeaveStreamOpen = false
};
RewindableStream rewindableStream = new RewindableStream(stream);
rewindableStream.StartRecording();
if (await ZipArchive.IsZipFileAsync(rewindableStream, options.Password, cancellationToken))
if (ZipArchive.IsZipFile(rewindableStream, options.Password))
{
rewindableStream.Rewind(true);
return ZipReader.Open(rewindableStream, options);
}
rewindableStream.Rewind(false);
if (await GZipArchive.IsGZipFileAsync(rewindableStream, cancellationToken))
if (GZipArchive.IsGZipFile(rewindableStream))
{
rewindableStream.Rewind(false);
GZipStream testStream = new(rewindableStream, CompressionMode.Decompress);
if (await TarArchive.IsTarFileAsync(testStream, cancellationToken))
GZipStream testStream = new GZipStream(rewindableStream, CompressionMode.Decompress);
if (TarArchive.IsTarFile(testStream))
{
rewindableStream.Rewind(true);
return new TarReader(rewindableStream, options, CompressionType.GZip);
}
}
rewindableStream.Rewind(true);
return GZipReader.Open(rewindableStream, options);
}
rewindableStream.Rewind(false);
if (await BZip2Stream.IsBZip2Async(rewindableStream, cancellationToken))
if (BZip2Stream.IsBZip2(rewindableStream))
{
rewindableStream.Rewind(false);
var testStream = await BZip2Stream.CreateAsync(new NonDisposingStream(rewindableStream), CompressionMode.Decompress, false, cancellationToken);
if (await TarArchive.IsTarFileAsync(testStream, cancellationToken))
BZip2Stream testStream = new BZip2Stream(new NonDisposingStream(rewindableStream), CompressionMode.Decompress, false);
if (TarArchive.IsTarFile(testStream))
{
rewindableStream.Rewind(true);
return new TarReader(rewindableStream, options, CompressionType.BZip2);
}
}
rewindableStream.Rewind(false);
if (await LZipStream.IsLZipFileAsync(rewindableStream))
{
rewindableStream.Rewind(false);
var testStream = await LZipStream.CreateAsync(new NonDisposingStream(rewindableStream), CompressionMode.Decompress);
if (await TarArchive.IsTarFileAsync(testStream, cancellationToken))
{
rewindableStream.Rewind(true);
return new TarReader(rewindableStream, options, CompressionType.LZip);
}
}
}
/* rewindableStream.Rewind(false);
if (RarArchive.IsRarFile(rewindableStream, options))
{
rewindableStream.Rewind(true);
return RarReader.Open(rewindableStream, options);
} */
rewindableStream.Rewind(false);
if (await TarArchive.IsTarFileAsync(rewindableStream, cancellationToken))
if (LZipStream.IsLZipFile(rewindableStream))
{
rewindableStream.Rewind(true);
return await TarReader.OpenAsync(rewindableStream, options, cancellationToken);
}
rewindableStream.Rewind(false);
LZipStream testStream = new LZipStream(new NonDisposingStream(rewindableStream), CompressionMode.Decompress);
if (TarArchive.IsTarFile(testStream))
{
rewindableStream.Rewind(true);
return new TarReader(rewindableStream, options, CompressionType.LZip);
}
}
rewindableStream.Rewind(false);
if (await XZStream.IsXZStreamAsync(rewindableStream, cancellationToken))
if (RarArchive.IsRarFile(rewindableStream, options))
{
rewindableStream.Rewind(true);
XZStream testStream = new(rewindableStream);
if (await TarArchive.IsTarFileAsync(testStream, cancellationToken))
return RarReader.Open(rewindableStream, options);
}
rewindableStream.Rewind(false);
if (TarArchive.IsTarFile(rewindableStream))
{
rewindableStream.Rewind(true);
return TarReader.Open(rewindableStream, options);
}
rewindableStream.Rewind(false);
if (XZStream.IsXZStream(rewindableStream))
{
rewindableStream.Rewind(true);
XZStream testStream = new XZStream(rewindableStream);
if (TarArchive.IsTarFile(testStream))
{
rewindableStream.Rewind(true);
return new TarReader(rewindableStream, options, CompressionType.Xz);
}
}
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Reader Formats: Zip, GZip, BZip2, Tar, Rar, LZip, XZ");
}

View File

@@ -1,8 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives.GZip;
using SharpCompress.Archives.Tar;
using SharpCompress.Common;
@@ -29,22 +27,22 @@ namespace SharpCompress.Readers.Tar
public override TarVolume Volume { get; }
protected override async ValueTask<Stream> RequestInitialStream(CancellationToken cancellationToken)
protected override Stream RequestInitialStream()
{
var stream = await base.RequestInitialStream(cancellationToken);
var stream = base.RequestInitialStream();
switch (compressionType)
{
/* case CompressionType.BZip2:
case CompressionType.BZip2:
{
return await BZip2Stream.CreateAsync(stream, CompressionMode.Decompress, false, cancellationToken);
} */
return new BZip2Stream(stream, CompressionMode.Decompress, false);
}
case CompressionType.GZip:
{
return new GZipStream(stream, CompressionMode.Decompress);
}
case CompressionType.LZip:
{
return await LZipStream.CreateAsync(stream, CompressionMode.Decompress);
return new LZipStream(stream, CompressionMode.Decompress);
}
case CompressionType.Xz:
{
@@ -69,17 +67,17 @@ namespace SharpCompress.Readers.Tar
/// <param name="stream"></param>
/// <param name="options"></param>
/// <returns></returns>
public static async ValueTask<TarReader> OpenAsync(Stream stream, ReaderOptions? options = null, CancellationToken cancellationToken = default)
public static TarReader Open(Stream stream, ReaderOptions? options = null)
{
stream.CheckNotNull(nameof(stream));
options ??= new ReaderOptions();
RewindableStream rewindableStream = new(stream);
options = options ?? new ReaderOptions();
RewindableStream rewindableStream = new RewindableStream(stream);
rewindableStream.StartRecording();
if (await GZipArchive.IsGZipFileAsync(rewindableStream, cancellationToken))
if (GZipArchive.IsGZipFile(rewindableStream))
{
rewindableStream.Rewind(false);
GZipStream testStream = new(rewindableStream, CompressionMode.Decompress);
if (await TarArchive.IsTarFileAsync(testStream, cancellationToken))
GZipStream testStream = new GZipStream(rewindableStream, CompressionMode.Decompress);
if (TarArchive.IsTarFile(testStream))
{
rewindableStream.Rewind(true);
return new TarReader(rewindableStream, options, CompressionType.GZip);
@@ -87,25 +85,25 @@ namespace SharpCompress.Readers.Tar
throw new InvalidFormatException("Not a tar file.");
}
/*rewindableStream.Rewind(false);
if (await BZip2Stream.IsBZip2Async(rewindableStream, cancellationToken))
rewindableStream.Rewind(false);
if (BZip2Stream.IsBZip2(rewindableStream))
{
rewindableStream.Rewind(false);
var testStream = await BZip2Stream.CreateAsync(rewindableStream, CompressionMode.Decompress, false, cancellationToken);
if (await TarArchive.IsTarFileAsync(testStream, cancellationToken))
BZip2Stream testStream = new BZip2Stream(rewindableStream, CompressionMode.Decompress, false);
if (TarArchive.IsTarFile(testStream))
{
rewindableStream.Rewind(true);
return new TarReader(rewindableStream, options, CompressionType.BZip2);
}
throw new InvalidFormatException("Not a tar file.");
} */
}
rewindableStream.Rewind(false);
if (await LZipStream.IsLZipFileAsync(rewindableStream))
if (LZipStream.IsLZipFile(rewindableStream))
{
rewindableStream.Rewind(false);
var testStream = await LZipStream.CreateAsync(rewindableStream, CompressionMode.Decompress);
if (await TarArchive.IsTarFileAsync(testStream, cancellationToken))
LZipStream testStream = new LZipStream(rewindableStream, CompressionMode.Decompress);
if (TarArchive.IsTarFile(testStream))
{
rewindableStream.Rewind(true);
return new TarReader(rewindableStream, options, CompressionType.LZip);
@@ -118,9 +116,9 @@ namespace SharpCompress.Readers.Tar
#endregion Open
protected override IAsyncEnumerable<TarEntry> GetEntries(Stream stream, CancellationToken cancellationToken)
protected override IEnumerable<TarEntry> GetEntries(Stream stream)
{
return TarEntry.GetEntries(StreamingMode.Streaming, stream, compressionType, Options.ArchiveEncoding, cancellationToken);
return TarEntry.GetEntries(StreamingMode.Streaming, stream, compressionType, Options.ArchiveEncoding);
}
}
}

View File

@@ -1,12 +1,8 @@
using System.Collections.Generic;
using System.IO;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Zip;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
namespace SharpCompress.Readers.Zip
{
@@ -39,18 +35,9 @@ namespace SharpCompress.Readers.Zip
#endregion Open
protected override async IAsyncEnumerable<ZipEntry> GetEntries(Stream stream, [EnumeratorCancellation] CancellationToken cancellationToken)
protected override IEnumerable<ZipEntry> GetEntries(Stream stream)
{
RewindableStream rewindableStream;
if (stream is RewindableStream rs)
{
rewindableStream = rs;
}
else
{
rewindableStream = new RewindableStream(stream);
}
await foreach (ZipHeader h in _headerFactory.ReadStreamHeader(rewindableStream, cancellationToken).WithCancellation(cancellationToken))
foreach (ZipHeader h in _headerFactory.ReadStreamHeader(stream))
{
if (h != null)
{

View File

@@ -2,13 +2,12 @@
<PropertyGroup>
<AssemblyTitle>SharpCompress - Pure C# Decompression/Compression</AssemblyTitle>
<NeutralLanguage>en-US</NeutralLanguage>
<VersionPrefix>0.28.0</VersionPrefix>
<AssemblyVersion>0.28.0</AssemblyVersion>
<FileVersion>0.28.0</FileVersion>
<VersionPrefix>0.28.2</VersionPrefix>
<AssemblyVersion>0.28.2</AssemblyVersion>
<FileVersion>0.28.2</FileVersion>
<Authors>Adam Hathcock</Authors>
<TargetFrameworks>netstandard2.1;netcoreapp3.1;net5.0</TargetFrameworks>
<TargetFrameworks>netstandard2.0;netstandard2.1;netcoreapp3.1;net5.0</TargetFrameworks>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<WarningsAsErrors>true</WarningsAsErrors>
<AllowUnsafeBlocks>false</AllowUnsafeBlocks>
<AssemblyName>SharpCompress</AssemblyName>
<AssemblyOriginatorKeyFile>../../SharpCompress.snk</AssemblyOriginatorKeyFile>
@@ -30,39 +29,13 @@
<ItemGroup>
<PackageReference Include="Microsoft.SourceLink.GitHub" Version="1.0.0" PrivateAssets="All" />
<PackageReference Include="System.Linq.Async" Version="5.0.0" />
</ItemGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'netstandard2.1' ">
<PackageReference Include="System.Text.Encoding.CodePages" Version="5.0.0" />
</ItemGroup>
<ItemGroup>
<Compile Remove="Archives\SevenZip\**" />
<Compile Remove="Archives\Rar\**" />
<Compile Remove="Readers\Rar\**" />
<Compile Remove="Common\Rar\**" />
<Compile Remove="Common\SevenZip\ArchiveDatabase.cs" />
<Compile Remove="Common\SevenZip\ArchiveReader.cs" />
<Compile Remove="Common\SevenZip\CStreamSwitch.cs" />
<Compile Remove="Common\SevenZip\SevenZipEntry.cs" />
<Compile Remove="Common\SevenZip\SevenZipFilePart.cs" />
<Compile Remove="Common\SevenZip\SevenZipVolume.cs" />
<Compile Remove="Compressors\Rar\**" />
<Compile Remove="Compressors\PPMd\**" />
</ItemGroup>
<ItemGroup>
<EmbeddedResource Remove="Archives\SevenZip\**" />
<EmbeddedResource Remove="Readers\Rar\**" />
<EmbeddedResource Remove="Common\Rar\**" />
<EmbeddedResource Remove="Compressors\Rar\**" />
<EmbeddedResource Remove="Compressors\PPMd\**" />
</ItemGroup>
<ItemGroup>
<None Remove="Archives\SevenZip\**" />
<None Remove="Archives\Rar\**" />
<None Remove="Readers\Rar\**" />
<None Remove="Common\Rar\**" />
<None Remove="Compressors\Rar\**" />
<None Remove="Compressors\PPMd\**" />
</ItemGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'netstandard2.0' ">
<PackageReference Include="System.Text.Encoding.CodePages" Version="5.0.0" />
<PackageReference Include="System.Memory" Version="4.5.4" />
</ItemGroup>
</Project>

View File

@@ -1,108 +1,18 @@
using System;
using System.Buffers;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Runtime.InteropServices;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Readers;
namespace SharpCompress
{
internal static class Utility
{
public static async ValueTask ForEachAsync<T>(this IEnumerable<T> collection, Func<T, Task> action)
public static ReadOnlyCollection<T> ToReadOnly<T>(this ICollection<T> items)
{
foreach (T item in collection)
{
await action(item);
}
}
private static async ValueTask WritePrimitive<T>(this Stream stream, Action<Memory<byte>> func, CancellationToken cancellationToken)
where T : struct
{
var bytes = Marshal.SizeOf<T>();
using var buffer = MemoryPool<byte>.Shared.Rent(bytes);
var memory = buffer.Memory.Slice(0, bytes);
func(memory);
await stream.WriteAsync(memory, cancellationToken);
}
public static ValueTask WriteByteAsync(this Stream stream, byte val, CancellationToken cancellationToken = default)
{
return stream.WritePrimitive<byte>( x => x.Span[0] = val, cancellationToken);
}
public static async ValueTask WriteBytes(this Stream stream, params byte[] val)
{
using var buffer = MemoryPool<byte>.Shared.Rent(val.Length);
var memory = buffer.Memory.Slice(0, val.Length);
val.CopyTo(memory);
await stream.WriteAsync(memory);
}
public static ValueTask WriteUInt16(this Stream stream, ushort val, CancellationToken cancellationToken =default)
{
return stream.WritePrimitive<ushort>( x => BinaryPrimitives.WriteUInt16LittleEndian(x.Span, val), cancellationToken);
}
public static ValueTask WriteUInt32(this Stream stream, uint val, CancellationToken cancellationToken= default)
{
return stream.WritePrimitive<uint>( x => BinaryPrimitives.WriteUInt32LittleEndian(x.Span, val), cancellationToken);
}
public static ValueTask WriteUInt64(this Stream stream, ulong val, CancellationToken cancellationToken= default)
{
return stream.WritePrimitive<ulong>( x => BinaryPrimitives.WriteUInt64LittleEndian(x.Span, val), cancellationToken);
}
private static async ValueTask<T?> ReadPrimitive<T>(this Stream stream, Func<ReadOnlyMemory<byte>, T> func, CancellationToken cancellationToken)
where T : struct
{
var bytes = Marshal.SizeOf<T>();
using var buffer = MemoryPool<byte>.Shared.Rent(bytes);
var memory = buffer.Memory.Slice(0, bytes);
var n = await stream.ReadAsync(memory, cancellationToken);
if (n != memory.Length)
{
return null;
}
return func(memory);
}
public static async ValueTask<byte> ReadByteAsync(this Stream stream, CancellationToken cancellationToken = default)
{
return await stream.ReadPrimitive(x => x.Span[0], cancellationToken) ?? default;
}
public static async ValueTask<ushort> ReadUInt16(this Stream stream, CancellationToken cancellationToken)
{
return await stream.ReadPrimitive( x => BinaryPrimitives.ReadUInt16LittleEndian(x.Span), cancellationToken)?? default;
}
public static async ValueTask<uint> ReadUInt32(this Stream stream, CancellationToken cancellationToken)
{
return await stream.ReadPrimitive( x => BinaryPrimitives.ReadUInt32LittleEndian(x.Span), cancellationToken)?? default;
}
public static ValueTask<uint?> ReadUInt32OrNull(this Stream stream, CancellationToken cancellationToken)
{
return stream.ReadPrimitive(x => BinaryPrimitives.ReadUInt32LittleEndian(x.Span), cancellationToken);
}
public static async ValueTask<int> ReadInt32(this Stream stream, CancellationToken cancellationToken)
{
return await stream.ReadPrimitive( x => BinaryPrimitives.ReadInt32LittleEndian(x.Span), cancellationToken)?? default;
}
public static async ValueTask<ulong> ReadUInt64(this Stream stream, CancellationToken cancellationToken)
{
return await stream.ReadPrimitive( x => BinaryPrimitives.ReadUInt64LittleEndian(x.Span), cancellationToken)?? default;
return new ReadOnlyCollection<T>(items);
}
public static async ValueTask<byte[]> ReadBytes(this Stream stream, int bytes, CancellationToken cancellationToken)
{
using var buffer = MemoryPool<byte>.Shared.Rent(bytes);
await stream.ReadAsync(buffer.Memory.Slice(0, bytes), cancellationToken);
return buffer.Memory.Slice(0, bytes).ToArray();
}
/// <summary>
/// Performs an unsigned bitwise right shift with the specified number
/// </summary>
@@ -182,21 +92,13 @@ namespace SharpCompress
{
yield return item;
}
public static async IAsyncEnumerable<T> AsAsyncEnumerable<T>(this T item)
{
await Task.CompletedTask;
yield return item;
}
public static T CheckNotNull<T>(this T? obj, string name)
where T : class
public static void CheckNotNull(this object obj, string name)
{
if (obj is null)
{
throw new ArgumentNullException(name);
}
return obj;
}
public static void CheckNotNullOrEmpty(this string obj, string name)
@@ -207,36 +109,6 @@ namespace SharpCompress
throw new ArgumentException("String is empty.", name);
}
}
public static async ValueTask SkipAsync(this Stream source, long advanceAmount, CancellationToken cancellationToken = default)
{
if (source.CanSeek)
{
source.Position += advanceAmount;
return;
}
using var buffer = MemoryPool<byte>.Shared.Rent(81920);
do
{
var readCount = 81920;
if (readCount > advanceAmount)
{
readCount = (int)advanceAmount;
}
int read = await source.ReadAsync(buffer.Memory.Slice(0, readCount), cancellationToken);
if (read <= 0)
{
break;
}
advanceAmount -= read;
if (advanceAmount == 0)
{
break;
}
}
while (true);
}
public static void Skip(this Stream source, long advanceAmount)
{
@@ -292,15 +164,6 @@ namespace SharpCompress
ArrayPool<byte>.Shared.Return(buffer);
}
}
public static async ValueTask SkipAsync(this Stream source, CancellationToken cancellationToken)
{
using var buffer = MemoryPool<byte>.Shared.Rent(81920);
do
{
}
while (await source.ReadAsync(buffer.Memory.Slice(0, 81920), cancellationToken) > 0);
}
public static DateTime DosDateToDateTime(UInt16 iDate, UInt16 iTime)
{
@@ -366,24 +229,6 @@ namespace SharpCompress
DateTime sTime = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
return sTime.AddSeconds(unixtime);
}
public static async Task<long> TransferToAsync(this Stream source, Stream destination, CancellationToken cancellationToken)
{
using var buffer = MemoryPool<byte>.Shared.Rent(81920);
long total = 0;
while (true)
{
int bytesRead = await source.ReadAsync(buffer.Memory.Slice(0, 81920), cancellationToken);
if (bytesRead == 0)
{
break;
}
total += bytesRead;
await destination.WriteAsync(buffer.Memory.Slice(0, bytesRead), cancellationToken);
}
return total;
}
public static long TransferTo(this Stream source, Stream destination)
{
@@ -391,8 +236,7 @@ namespace SharpCompress
try
{
long total = 0;
var count = 0;
while ((count = source.Read(array, 0, array.Length)) != 0)
while (ReadTransferBlock(source, array, out int count))
{
total += count;
destination.Write(array, 0, count);
@@ -405,28 +249,53 @@ namespace SharpCompress
}
}
public static async ValueTask<long> TransferToAsync(this Stream source, Stream destination, Common.Entry entry, IReaderExtractionListener readerExtractionListener, CancellationToken cancellationToken)
public static long TransferTo(this Stream source, Stream destination, Common.Entry entry, IReaderExtractionListener readerExtractionListener)
{
using var buffer = MemoryPool<byte>.Shared.Rent(81920);
var iterations = 0;
long total = 0;
var count = 0;
var slice = buffer.Memory.Slice(0, 81920);
while ((count = await source.ReadAsync(slice, cancellationToken)) != 0)
byte[] array = GetTransferByteArray();
try
{
total += count;
await destination.WriteAsync(slice.Slice(0, count), cancellationToken);
iterations++;
readerExtractionListener.FireEntryExtractionProgress(entry, total, iterations);
var iterations = 0;
long total = 0;
while (ReadTransferBlock(source, array, out int count))
{
total += count;
destination.Write(array, 0, count);
iterations++;
readerExtractionListener.FireEntryExtractionProgress(entry, total, iterations);
}
return total;
}
return total;
finally
{
ArrayPool<byte>.Shared.Return(array);
}
}
private static bool ReadTransferBlock(Stream source, byte[] array, out int count)
{
return (count = source.Read(array, 0, array.Length)) != 0;
}
private static byte[] GetTransferByteArray()
{
return ArrayPool<byte>.Shared.Rent(81920);
}
public static bool ReadFully(this Stream stream, byte[] buffer)
{
int total = 0;
int read;
while ((read = stream.Read(buffer, total, buffer.Length - total)) > 0)
{
total += read;
if (total >= buffer.Length)
{
return true;
}
}
return (total >= buffer.Length);
}
public static bool ReadFully(this Stream stream, Span<byte> buffer)
{
int total = 0;

View File

@@ -2,21 +2,21 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Writers
{
public abstract class AbstractWriter : IWriter
{
private bool _isDisposed;
protected AbstractWriter(ArchiveType type, WriterOptions writerOptions)
{
WriterType = type;
WriterOptions = writerOptions;
}
protected void InitializeStream(Stream stream)
protected void InitalizeStream(Stream stream)
{
OutputStream = stream;
}
@@ -27,18 +27,33 @@ namespace SharpCompress.Writers
protected WriterOptions WriterOptions { get; }
public abstract ValueTask WriteAsync(string filename, Stream source, DateTime? modificationTime, CancellationToken cancellationToken);
public abstract void Write(string filename, Stream source, DateTime? modificationTime);
public async ValueTask DisposeAsync()
protected virtual void Dispose(bool isDisposing)
{
await DisposeAsyncCore();
GC.SuppressFinalize(this);
if (isDisposing)
{
OutputStream.Dispose();
}
}
protected virtual ValueTask DisposeAsyncCore()
public void Dispose()
{
return OutputStream.DisposeAsync();
if (!_isDisposed)
{
GC.SuppressFinalize(this);
Dispose(true);
_isDisposed = true;
}
}
~AbstractWriter()
{
if (!_isDisposed)
{
Dispose(false);
_isDisposed = true;
}
}
}
}

View File

@@ -1,7 +1,5 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
@@ -20,18 +18,22 @@ namespace SharpCompress.Writers.GZip
{
destination = new NonDisposingStream(destination);
}
InitializeStream(new GZipStream(destination, CompressionMode.Compress,
InitalizeStream(new GZipStream(destination, CompressionMode.Compress,
options?.CompressionLevel ?? CompressionLevel.Default,
WriterOptions.ArchiveEncoding.GetEncoding()));
}
protected override ValueTask DisposeAsyncCore()
protected override void Dispose(bool isDisposing)
{
//dispose here to finish the GZip, GZip won't close the underlying stream
return OutputStream.DisposeAsync();
if (isDisposing)
{
//dispose here to finish the GZip, GZip won't close the underlying stream
OutputStream.Dispose();
}
base.Dispose(isDisposing);
}
public override async ValueTask WriteAsync(string filename, Stream source, DateTime? modificationTime, CancellationToken cancellationToken)
public override void Write(string filename, Stream source, DateTime? modificationTime)
{
if (_wroteToStream)
{
@@ -40,7 +42,7 @@ namespace SharpCompress.Writers.GZip
GZipStream stream = (GZipStream)OutputStream;
stream.FileName = filename;
stream.LastModified = modificationTime;
await source.TransferToAsync(stream, cancellationToken);
source.TransferTo(stream);
_wroteToStream = true;
}
}

View File

@@ -1,14 +1,12 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Writers
{
public interface IWriter : IAsyncDisposable
public interface IWriter : IDisposable
{
ArchiveType WriterType { get; }
ValueTask WriteAsync(string filename, Stream source, DateTime? modificationTime, CancellationToken cancellationToken = default);
void Write(string filename, Stream source, DateTime? modificationTime);
}
}

View File

@@ -2,46 +2,43 @@
using System.IO;
using System.Linq;
using System.Linq.Expressions;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Writers
{
public static class IWriterExtensions
{
public static ValueTask WriteAsync(this IWriter writer, string entryPath, Stream source, CancellationToken cancellationToken = default)
public static void Write(this IWriter writer, string entryPath, Stream source)
{
return writer.WriteAsync(entryPath, source, null, cancellationToken);
writer.Write(entryPath, source, null);
}
public static async ValueTask WriteAsync(this IWriter writer, string entryPath, FileInfo source, CancellationToken cancellationToken = default)
public static void Write(this IWriter writer, string entryPath, FileInfo source)
{
if (!source.Exists)
{
throw new ArgumentException("Source does not exist: " + source.FullName);
}
await using (var stream = source.OpenRead())
using (var stream = source.OpenRead())
{
await writer.WriteAsync(entryPath, stream, source.LastWriteTime, cancellationToken);
writer.Write(entryPath, stream, source.LastWriteTime);
}
}
public static ValueTask WriteAsync(this IWriter writer, string entryPath, string source, CancellationToken cancellationToken = default)
public static void Write(this IWriter writer, string entryPath, string source)
{
return writer.WriteAsync(entryPath, new FileInfo(source), cancellationToken);
writer.Write(entryPath, new FileInfo(source));
}
public static ValueTask WriteAllAsync(this IWriter writer, string directory, string searchPattern = "*", SearchOption option = SearchOption.TopDirectoryOnly, CancellationToken cancellationToken = default)
public static void WriteAll(this IWriter writer, string directory, string searchPattern = "*", SearchOption option = SearchOption.TopDirectoryOnly)
{
return writer.WriteAllAsync(directory, searchPattern, null, option, cancellationToken);
writer.WriteAll(directory, searchPattern, null, option);
}
public static async ValueTask WriteAllAsync(this IWriter writer,
string directory,
string searchPattern = "*",
Expression<Func<string, bool>>? fileSearchFunc = null,
SearchOption option = SearchOption.TopDirectoryOnly, CancellationToken cancellationToken = default)
public static void WriteAll(this IWriter writer,
string directory,
string searchPattern = "*",
Expression<Func<string, bool>>? fileSearchFunc = null,
SearchOption option = SearchOption.TopDirectoryOnly)
{
if (!Directory.Exists(directory))
{
@@ -54,7 +51,7 @@ namespace SharpCompress.Writers
}
foreach (var file in Directory.EnumerateFiles(directory, searchPattern, option).Where(fileSearchFunc.Compile()))
{
await writer.WriteAsync(file.Substring(directory.Length), file, cancellationToken);
writer.Write(file.Substring(directory.Length), file);
}
}
}

View File

@@ -1,8 +1,5 @@
using System;
using System.Buffers;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Compressors;
@@ -15,24 +12,18 @@ namespace SharpCompress.Writers.Tar
{
public class TarWriter : AbstractWriter
{
private bool finalizeArchiveOnClose;
private readonly bool finalizeArchiveOnClose;
private TarWriter(TarWriterOptions options)
public TarWriter(Stream destination, TarWriterOptions options)
: base(ArchiveType.Tar, options)
{
}
public static async ValueTask<TarWriter> CreateAsync(Stream destination, TarWriterOptions options, CancellationToken cancellationToken = default)
{
await Task.CompletedTask;
var tw = new TarWriter(options);
tw.finalizeArchiveOnClose = options.FinalizeArchiveOnClose;
finalizeArchiveOnClose = options.FinalizeArchiveOnClose;
if (!destination.CanWrite)
{
throw new ArgumentException("Tars require writable streams.");
}
if (tw.WriterOptions.LeaveStreamOpen)
if (WriterOptions.LeaveStreamOpen)
{
destination = new NonDisposingStream(destination);
}
@@ -40,11 +31,11 @@ namespace SharpCompress.Writers.Tar
{
case CompressionType.None:
break;
case CompressionType.BZip2:
case CompressionType.BZip2:
{
destination = await BZip2Stream.CreateAsync(destination, CompressionMode.Compress, false, cancellationToken);
destination = new BZip2Stream(destination, CompressionMode.Compress, false);
}
break;
break;
case CompressionType.GZip:
{
destination = new GZipStream(destination, CompressionMode.Compress);
@@ -52,7 +43,7 @@ namespace SharpCompress.Writers.Tar
break;
case CompressionType.LZip:
{
destination = await LZipStream.CreateAsync(destination, CompressionMode.Compress);
destination = new LZipStream(destination, CompressionMode.Compress);
}
break;
default:
@@ -60,32 +51,12 @@ namespace SharpCompress.Writers.Tar
throw new InvalidFormatException("Tar does not support compression: " + options.CompressionType);
}
}
tw.InitializeStream(destination);
return tw;
InitalizeStream(destination);
}
public override ValueTask WriteAsync(string filename, Stream source, DateTime? modificationTime, CancellationToken cancellationToken = default)
public override void Write(string filename, Stream source, DateTime? modificationTime)
{
return WriteAsync(filename, source, modificationTime, null, cancellationToken);
}
public async ValueTask WriteAsync(string filename, Stream source, DateTime? modificationTime, long? size, CancellationToken cancellationToken = default)
{
if (!source.CanSeek && size == null)
{
throw new ArgumentException("Seekable stream is required if no size is given.");
}
long realSize = size ?? source.Length;
TarHeader header = new(WriterOptions.ArchiveEncoding);
header.LastModifiedTime = modificationTime ?? TarHeader.EPOCH;
header.Name = NormalizeFilename(filename);
header.Size = realSize;
await header.WriteAsync(OutputStream);
size = await source.TransferToAsync(OutputStream, cancellationToken);
await PadTo512Async(size.Value, false);
Write(filename, source, modificationTime, null);
}
private string NormalizeFilename(string filename)
@@ -101,7 +72,26 @@ namespace SharpCompress.Writers.Tar
return filename.Trim('/');
}
private async Task PadTo512Async(long size, bool forceZeros)
public void Write(string filename, Stream source, DateTime? modificationTime, long? size)
{
if (!source.CanSeek && size is null)
{
throw new ArgumentException("Seekable stream is required if no size is given.");
}
long realSize = size ?? source.Length;
TarHeader header = new TarHeader(WriterOptions.ArchiveEncoding);
header.LastModifiedTime = modificationTime ?? TarHeader.EPOCH;
header.Name = NormalizeFilename(filename);
header.Size = realSize;
header.Write(OutputStream);
size = source.TransferTo(OutputStream);
PadTo512(size.Value, false);
}
private void PadTo512(long size, bool forceZeros)
{
int zeros = (int)size % 512;
if (zeros == 0 && !forceZeros)
@@ -109,31 +99,33 @@ namespace SharpCompress.Writers.Tar
return;
}
zeros = 512 - zeros;
using var zeroBuffer = MemoryPool<byte>.Shared.Rent(zeros);
zeroBuffer.Memory.Span.Clear();
await OutputStream.WriteAsync(zeroBuffer.Memory.Slice(0, zeros));
OutputStream.Write(stackalloc byte[zeros]);
}
protected override async ValueTask DisposeAsyncCore()
protected override void Dispose(bool isDisposing)
{
if (finalizeArchiveOnClose)
if (isDisposing)
{
await PadTo512Async(0, true);
await PadTo512Async(0, true);
}
switch (OutputStream)
{
/* case BZip2Stream b:
{
await b.FinishAsync(CancellationToken.None);
break;
} */
case LZipStream l:
{
await l.FinishAsync();
break;
}
if (finalizeArchiveOnClose)
{
PadTo512(0, true);
PadTo512(0, true);
}
switch (OutputStream)
{
case BZip2Stream b:
{
b.Finish();
break;
}
case LZipStream l:
{
l.Finish();
break;
}
}
}
base.Dispose(isDisposing);
}
}
}

Some files were not shown because too many files have changed in this diff Show More