Compare commits

..

33 Commits
dmg ... async

Author SHA1 Message Date
Adam Hathcock
e08e4e5d9f Enabling Bzip2 but something else is broken 2021-02-21 18:01:56 +00:00
Adam Hathcock
dd710ec308 fixed read only sub stream 2021-02-21 13:37:58 +00:00
Adam Hathcock
5cfc608010 More fixes? 2021-02-21 13:21:33 +00:00
Adam Hathcock
997c11ef25 Bug fix on counting 2021-02-21 12:14:07 +00:00
Adam Hathcock
249f11f543 Rework some zip writing 2021-02-21 12:10:17 +00:00
Adam Hathcock
eeb6761a9f Reuse gzip header reading 2021-02-20 13:11:40 +00:00
Adam Hathcock
0c35abdebe Explicit exception for read shortcut 2021-02-20 12:52:29 +00:00
Adam Hathcock
30da0b91ed Fixed Gzip by reverting EmitHeaderAsync 2021-02-20 11:47:38 +00:00
Adam Hathcock
d9c53e1c82 ZLIbStreamfile fixes? 2021-02-20 11:41:46 +00:00
Adam Hathcock
14e6d95559 More clean up doesn’t help 2021-02-14 18:09:22 +00:00
Adam Hathcock
8cdc49cb85 ReadByteAsync 2021-02-14 14:09:25 +00:00
Adam Hathcock
5c11075d36 Updates for merge 2021-02-14 13:52:55 +00:00
Adam Hathcock
be34fe2056 Merge branch 'master' into async
# Conflicts:
#	src/SharpCompress/Archives/GZip/GZipArchive.cs
#	src/SharpCompress/Common/GZip/GZipFilePart.cs
#	src/SharpCompress/Common/Tar/Headers/TarHeader.cs
#	src/SharpCompress/Common/Zip/SeekableZipHeaderFactory.cs
#	src/SharpCompress/Common/Zip/ZipFilePart.cs
#	src/SharpCompress/Compressors/Deflate/ZlibBaseStream.cs
#	src/SharpCompress/Compressors/LZMA/LZipStream.cs
#	src/SharpCompress/Compressors/Xz/BinaryUtils.cs
#	src/SharpCompress/Compressors/Xz/Crc32.cs
#	src/SharpCompress/Writers/Tar/TarWriter.cs
#	src/SharpCompress/Writers/Zip/ZipCentralDirectoryEntry.cs
#	src/SharpCompress/Writers/Zip/ZipWriter.cs
2021-02-14 13:38:58 +00:00
Adam Hathcock
7e9fb645cb Minor changes 2021-02-14 12:55:05 +00:00
Adam Hathcock
15209178ce AsyncStream for BZip2 2021-02-13 18:09:58 +00:00
Adam Hathcock
ea688e1f4c Writer problems still :( 2021-02-13 17:52:31 +00:00
Adam Hathcock
fe4cc8e6cb Zip LZMA write will roundtrip 2021-02-13 16:44:53 +00:00
Adam Hathcock
1f37ced35a AsyncStream everything 2021-02-13 16:16:03 +00:00
Adam Hathcock
949e90351f More LZMA conversion going, BZip2 not for now 2021-02-13 10:45:57 +00:00
Adam Hathcock
db02e8b634 Minor fixes 2021-02-08 18:25:14 +00:00
Adam Hathcock
d6fe729068 create async 2021-02-08 12:07:45 +00:00
Adam Hathcock
ef3d4da286 Fix test and some zip writing 2021-02-08 11:18:57 +00:00
Adam Hathcock
813bd5ae80 Async open entry 2021-02-08 10:17:34 +00:00
Adam Hathcock
f40d3342c8 Tar and Xz mostly work 2021-02-07 18:58:24 +00:00
Adam Hathcock
9738b812c4 Fix rewindable stream and encoding tests 2021-02-07 18:08:29 +00:00
Adam Hathcock
c6a011df17 Fixed reader issue 2021-02-07 18:03:17 +00:00
Adam Hathcock
7d2dc58766 More API fixes 2021-02-07 13:38:41 +00:00
Adam Hathcock
d234f2d509 First pass of trying tar 2021-02-07 11:30:44 +00:00
Adam Hathcock
cdba5ec419 AsyncEnumerable usage in entries 2021-02-07 09:08:15 +00:00
Adam Hathcock
9cf8a3dbbe more awaits 2021-02-06 09:08:09 +00:00
Adam Hathcock
2b4f02997e more async await 2021-02-01 08:34:12 +00:00
Adam Hathcock
bcdfd992a3 async dispose and fix tests? 2021-01-24 09:18:38 +00:00
Adam Hathcock
3a820c52bd async Deflate. Start of writer 2021-01-24 09:06:02 +00:00
167 changed files with 3096 additions and 6670 deletions

View File

@@ -19,6 +19,7 @@
| Tar.XZ | LZMA2 | Decompress | TarArchive | TarReader | TarWriter (3) |
| GZip (single file) | DEFLATE | Both | GZipArchive | GZipReader | GZipWriter |
| 7Zip (4) | LZMA, LZMA2, BZip2, PPMd, BCJ, BCJ2, Deflate | Decompress | SevenZipArchive | N/A | N/A |
| LZip (single file) (5) | LZip (LZMA) | Both | LZipArchive | LZipReader | LZipWriter |
1. SOLID Rars are only supported in the RarReader API.
2. Zip format supports pkware and WinzipAES encryption. However, encrypted LZMA is not supported. Zip64 reading/writing is supported but only with seekable streams as the Zip spec doesn't support Zip64 data in post data descriptors. Deflate64 is only supported for reading.

View File

@@ -2,29 +2,25 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
namespace SharpCompress.Archives
{
public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtractionListener
public abstract class AbstractArchive<TEntry, TVolume> : IArchive
where TEntry : IArchiveEntry
where TVolume : IVolume
{
private readonly LazyReadOnlyCollection<TVolume> lazyVolumes;
private readonly LazyReadOnlyCollection<TEntry> lazyEntries;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionBegin;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionEnd;
public event EventHandler<CompressedBytesReadEventArgs>? CompressedBytesRead;
public event EventHandler<FilePartExtractionBeginEventArgs>? FilePartExtractionBegin;
protected ReaderOptions ReaderOptions { get; }
protected ReaderOptions ReaderOptions { get; } = new ();
private bool disposed;
internal AbstractArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerOptions)
internal AbstractArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerOptions, CancellationToken cancellationToken)
{
Type = type;
if (!fileInfo.Exists)
@@ -33,42 +29,30 @@ namespace SharpCompress.Archives
}
ReaderOptions = readerOptions;
readerOptions.LeaveStreamOpen = false;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(fileInfo));
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(fileInfo, cancellationToken));
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes, cancellationToken));
}
protected abstract IEnumerable<TVolume> LoadVolumes(FileInfo file);
protected abstract IAsyncEnumerable<TVolume> LoadVolumes(FileInfo file, CancellationToken cancellationToken);
internal AbstractArchive(ArchiveType type, IEnumerable<Stream> streams, ReaderOptions readerOptions)
internal AbstractArchive(ArchiveType type, IAsyncEnumerable<Stream> streams, ReaderOptions readerOptions, CancellationToken cancellationToken)
{
Type = type;
ReaderOptions = readerOptions;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(streams.Select(CheckStreams)));
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(streams.Select(CheckStreams), cancellationToken));
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes, cancellationToken));
}
#nullable disable
internal AbstractArchive(ArchiveType type)
{
Type = type;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
lazyVolumes = new LazyReadOnlyCollection<TVolume>( AsyncEnumerable.Empty<TVolume>());
lazyEntries = new LazyReadOnlyCollection<TEntry>(AsyncEnumerable.Empty<TEntry>());
}
#nullable enable
public ArchiveType Type { get; }
void IArchiveExtractionListener.FireEntryExtractionBegin(IArchiveEntry entry)
{
EntryExtractionBegin?.Invoke(this, new ArchiveExtractionEventArgs<IArchiveEntry>(entry));
}
void IArchiveExtractionListener.FireEntryExtractionEnd(IArchiveEntry entry)
{
EntryExtractionEnd?.Invoke(this, new ArchiveExtractionEventArgs<IArchiveEntry>(entry));
}
private static Stream CheckStreams(Stream stream)
{
if (!stream.CanSeek || !stream.CanRead)
@@ -81,63 +65,48 @@ namespace SharpCompress.Archives
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveEntries across the one or many parts of the RarArchive.
/// </summary>
public virtual ICollection<TEntry> Entries => lazyEntries;
public virtual IAsyncEnumerable<TEntry> Entries => lazyEntries;
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveVolumes across the one or many parts of the RarArchive.
/// </summary>
public ICollection<TVolume> Volumes => lazyVolumes;
public IAsyncEnumerable<TVolume> Volumes => lazyVolumes;
/// <summary>
/// The total size of the files compressed in the archive.
/// </summary>
public virtual long TotalSize => Entries.Aggregate(0L, (total, cf) => total + cf.CompressedSize);
public virtual async ValueTask<long> TotalSizeAsync()
{
await EnsureEntriesLoaded();
return await Entries.AggregateAsync(0L, (total, cf) => total + cf.CompressedSize);
}
/// <summary>
/// The total size of the files as uncompressed in the archive.
/// </summary>
public virtual long TotalUncompressSize => Entries.Aggregate(0L, (total, cf) => total + cf.Size);
public virtual async ValueTask<long> TotalUncompressedSizeAsync()
{
await EnsureEntriesLoaded();
return await Entries.AggregateAsync(0L, (total, cf) => total + cf.Size);
}
protected abstract IEnumerable<TVolume> LoadVolumes(IEnumerable<Stream> streams);
protected abstract IEnumerable<TEntry> LoadEntries(IEnumerable<TVolume> volumes);
protected abstract IAsyncEnumerable<TVolume> LoadVolumes(IAsyncEnumerable<Stream> streams, CancellationToken cancellationToken);
protected abstract IAsyncEnumerable<TEntry> LoadEntries(IAsyncEnumerable<TVolume> volumes, CancellationToken cancellationToken);
IEnumerable<IArchiveEntry> IArchive.Entries => Entries.Cast<IArchiveEntry>();
IAsyncEnumerable<IArchiveEntry> IArchive.Entries => Entries.Select(x => (IArchiveEntry)x);
IEnumerable<IVolume> IArchive.Volumes => lazyVolumes.Cast<IVolume>();
IAsyncEnumerable<IVolume> IArchive.Volumes => lazyVolumes.Select(x => (IVolume)x);
public virtual void Dispose()
public virtual async ValueTask DisposeAsync()
{
if (!disposed)
{
lazyVolumes.ForEach(v => v.Dispose());
lazyEntries.GetLoaded().Cast<Entry>().ForEach(x => x.Close());
await lazyVolumes.ForEachAsync(async v => await v.DisposeAsync());
await lazyEntries.GetLoaded().Cast<Entry>().ForEachAsync(async x => await x.CloseAsync());
disposed = true;
}
}
void IArchiveExtractionListener.EnsureEntriesLoaded()
{
lazyEntries.EnsureFullyLoaded();
lazyVolumes.EnsureFullyLoaded();
}
void IExtractionListener.FireCompressedBytesRead(long currentPartCompressedBytes, long compressedReadBytes)
{
CompressedBytesRead?.Invoke(this, new CompressedBytesReadEventArgs(
currentFilePartCompressedBytesRead: currentPartCompressedBytes,
compressedBytesRead: compressedReadBytes
));
}
void IExtractionListener.FireFilePartExtractionBegin(string name, long size, long compressedSize)
{
FilePartExtractionBegin?.Invoke(this, new FilePartExtractionBeginEventArgs(
compressedSize: compressedSize,
size: size,
name: name
));
}
/// <summary>
/// Use this method to extract all entries in an archive in order.
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
@@ -149,29 +118,32 @@ namespace SharpCompress.Archives
/// occur if this is used at the same time as other extraction methods on this instance.
/// </summary>
/// <returns></returns>
public IReader ExtractAllEntries()
public async ValueTask<IReader> ExtractAllEntries()
{
((IArchiveExtractionListener)this).EnsureEntriesLoaded();
return CreateReaderForSolidExtraction();
await EnsureEntriesLoaded();
return await CreateReaderForSolidExtraction();
}
public async ValueTask EnsureEntriesLoaded()
{
await lazyEntries.EnsureFullyLoaded();
await lazyVolumes.EnsureFullyLoaded();
}
protected abstract IReader CreateReaderForSolidExtraction();
protected abstract ValueTask<IReader> CreateReaderForSolidExtraction();
/// <summary>
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
/// </summary>
public virtual bool IsSolid => false;
public virtual ValueTask<bool> IsSolidAsync() => new(false);
/// <summary>
/// The archive can find all the parts of the archive needed to fully extract the archive. This forces the parsing of the entire archive.
/// </summary>
public bool IsComplete
public async ValueTask<bool> IsCompleteAsync()
{
get
{
((IArchiveExtractionListener)this).EnsureEntriesLoaded();
return Entries.All(x => x.IsComplete);
}
await EnsureEntriesLoaded();
return await Entries.AllAsync(x => x.IsComplete);
}
}
}

View File

@@ -2,6 +2,8 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
using SharpCompress.Writers;
@@ -12,7 +14,7 @@ namespace SharpCompress.Archives
where TEntry : IArchiveEntry
where TVolume : IVolume
{
private class RebuildPauseDisposable : IDisposable
private class RebuildPauseDisposable : IAsyncDisposable
{
private readonly AbstractWritableArchive<TEntry, TVolume> archive;
@@ -22,16 +24,16 @@ namespace SharpCompress.Archives
archive.pauseRebuilding = true;
}
public void Dispose()
public async ValueTask DisposeAsync()
{
archive.pauseRebuilding = false;
archive.RebuildModifiedCollection();
await archive.RebuildModifiedCollection();
}
}
private readonly List<TEntry> newEntries = new List<TEntry>();
private readonly List<TEntry> removedEntries = new List<TEntry>();
private readonly List<TEntry> newEntries = new();
private readonly List<TEntry> removedEntries = new();
private readonly List<TEntry> modifiedEntries = new List<TEntry>();
private readonly List<TEntry> modifiedEntries = new();
private bool hasModifications;
private bool pauseRebuilding;
@@ -40,34 +42,36 @@ namespace SharpCompress.Archives
{
}
internal AbstractWritableArchive(ArchiveType type, Stream stream, ReaderOptions readerFactoryOptions)
: base(type, stream.AsEnumerable(), readerFactoryOptions)
internal AbstractWritableArchive(ArchiveType type, Stream stream, ReaderOptions readerFactoryOptions,
CancellationToken cancellationToken)
: base(type, stream.AsAsyncEnumerable(), readerFactoryOptions, cancellationToken)
{
}
internal AbstractWritableArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerFactoryOptions)
: base(type, fileInfo, readerFactoryOptions)
internal AbstractWritableArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerFactoryOptions,
CancellationToken cancellationToken)
: base(type, fileInfo, readerFactoryOptions, cancellationToken)
{
}
public override ICollection<TEntry> Entries
public override IAsyncEnumerable<TEntry> Entries
{
get
{
if (hasModifications)
{
return modifiedEntries;
return modifiedEntries.ToAsyncEnumerable();
}
return base.Entries;
}
}
public IDisposable PauseEntryRebuilding()
public IAsyncDisposable PauseEntryRebuilding()
{
return new RebuildPauseDisposable(this);
}
private void RebuildModifiedCollection()
private async ValueTask RebuildModifiedCollection()
{
if (pauseRebuilding)
{
@@ -76,56 +80,57 @@ namespace SharpCompress.Archives
hasModifications = true;
newEntries.RemoveAll(v => removedEntries.Contains(v));
modifiedEntries.Clear();
modifiedEntries.AddRange(OldEntries.Concat(newEntries));
modifiedEntries.AddRange(await OldEntries.Concat(newEntries.ToAsyncEnumerable()).ToListAsync());
}
private IEnumerable<TEntry> OldEntries { get { return base.Entries.Where(x => !removedEntries.Contains(x)); } }
private IAsyncEnumerable<TEntry> OldEntries { get { return base.Entries.Where(x => !removedEntries.Contains(x)); } }
public void RemoveEntry(TEntry entry)
public async ValueTask RemoveEntryAsync(TEntry entry)
{
if (!removedEntries.Contains(entry))
{
removedEntries.Add(entry);
RebuildModifiedCollection();
await RebuildModifiedCollection();
}
}
void IWritableArchive.RemoveEntry(IArchiveEntry entry)
ValueTask IWritableArchive.RemoveEntryAsync(IArchiveEntry entry, CancellationToken cancellationToken)
{
RemoveEntry((TEntry)entry);
return RemoveEntryAsync((TEntry)entry);
}
public TEntry AddEntry(string key, Stream source,
long size = 0, DateTime? modified = null)
public ValueTask<TEntry> AddEntryAsync(string key, Stream source,
long size = 0, DateTime? modified = null,
CancellationToken cancellationToken = default)
{
return AddEntry(key, source, false, size, modified);
return AddEntryAsync(key, source, false, size, modified, cancellationToken);
}
IArchiveEntry IWritableArchive.AddEntry(string key, Stream source, bool closeStream, long size, DateTime? modified)
async ValueTask<IArchiveEntry> IWritableArchive.AddEntryAsync(string key, Stream source, bool closeStream, long size, DateTime? modified, CancellationToken cancellationToken)
{
return AddEntry(key, source, closeStream, size, modified);
return await AddEntryAsync(key, source, closeStream, size, modified, cancellationToken);
}
public TEntry AddEntry(string key, Stream source, bool closeStream,
long size = 0, DateTime? modified = null)
public async ValueTask<TEntry> AddEntryAsync(string key, Stream source, bool closeStream,
long size = 0, DateTime? modified = null, CancellationToken cancellationToken = default)
{
if (key.Length > 0 && key[0] is '/' or '\\')
{
key = key.Substring(1);
}
if (DoesKeyMatchExisting(key))
if (await DoesKeyMatchExisting(key))
{
throw new ArchiveException("Cannot add entry with duplicate key: " + key);
}
var entry = CreateEntry(key, source, size, modified, closeStream);
var entry = await CreateEntry(key, source, size, modified, closeStream, cancellationToken);
newEntries.Add(entry);
RebuildModifiedCollection();
await RebuildModifiedCollection();
return entry;
}
private bool DoesKeyMatchExisting(string key)
private async ValueTask<bool> DoesKeyMatchExisting(string key)
{
foreach (var path in Entries.Select(x => x.Key))
await foreach (var path in Entries.Select(x => x.Key))
{
var p = path.Replace('/', '\\');
if (p.Length > 0 && p[0] == '\\')
@@ -137,34 +142,35 @@ namespace SharpCompress.Archives
return false;
}
public void SaveTo(Stream stream, WriterOptions options)
public async ValueTask SaveToAsync(Stream stream, WriterOptions options, CancellationToken cancellationToken = default)
{
//reset streams of new entries
newEntries.Cast<IWritableArchiveEntry>().ForEach(x => x.Stream.Seek(0, SeekOrigin.Begin));
SaveTo(stream, options, OldEntries, newEntries);
await SaveToAsync(stream, options, OldEntries, newEntries.ToAsyncEnumerable(), cancellationToken);
}
protected TEntry CreateEntry(string key, Stream source, long size, DateTime? modified,
bool closeStream)
protected ValueTask<TEntry> CreateEntry(string key, Stream source, long size, DateTime? modified,
bool closeStream, CancellationToken cancellationToken)
{
if (!source.CanRead || !source.CanSeek)
{
throw new ArgumentException("Streams must be readable and seekable to use the Writing Archive API");
}
return CreateEntryInternal(key, source, size, modified, closeStream);
return CreateEntryInternal(key, source, size, modified, closeStream, cancellationToken);
}
protected abstract TEntry CreateEntryInternal(string key, Stream source, long size, DateTime? modified,
bool closeStream);
protected abstract ValueTask<TEntry> CreateEntryInternal(string key, Stream source, long size, DateTime? modified,
bool closeStream, CancellationToken cancellationToken);
protected abstract void SaveTo(Stream stream, WriterOptions options, IEnumerable<TEntry> oldEntries, IEnumerable<TEntry> newEntries);
protected abstract ValueTask SaveToAsync(Stream stream, WriterOptions options, IAsyncEnumerable<TEntry> oldEntries, IAsyncEnumerable<TEntry> newEntries,
CancellationToken cancellationToken = default);
public override void Dispose()
public override async ValueTask DisposeAsync()
{
base.Dispose();
newEntries.Cast<Entry>().ForEach(x => x.Close());
removedEntries.Cast<Entry>().ForEach(x => x.Close());
modifiedEntries.Cast<Entry>().ForEach(x => x.Close());
await base.DisposeAsync();
await newEntries.Cast<Entry>().ForEachAsync(async x => await x.CloseAsync());
await removedEntries.Cast<Entry>().ForEachAsync(async x => await x.CloseAsync());
await modifiedEntries.Cast<Entry>().ForEachAsync(async x => await x.CloseAsync());
}
}
}

View File

@@ -1,9 +1,10 @@
using System;
using System.IO;
using SharpCompress.Archives.Dmg;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives.GZip;
using SharpCompress.Archives.Rar;
using SharpCompress.Archives.SevenZip;
//using SharpCompress.Archives.Rar;
//using SharpCompress.Archives.SevenZip;
using SharpCompress.Archives.Tar;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
@@ -19,7 +20,7 @@ namespace SharpCompress.Archives
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <returns></returns>
public static IArchive Open(Stream stream, ReaderOptions? readerOptions = null)
public static async ValueTask<IArchive> OpenAsync(Stream stream, ReaderOptions? readerOptions = null, CancellationToken cancellationToken = default)
{
stream.CheckNotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
@@ -27,42 +28,36 @@ namespace SharpCompress.Archives
throw new ArgumentException("Stream should be readable and seekable");
}
readerOptions ??= new ReaderOptions();
if (ZipArchive.IsZipFile(stream, null))
if (await ZipArchive.IsZipFileAsync(stream, null, cancellationToken))
{
stream.Seek(0, SeekOrigin.Begin);
return ZipArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
if (SevenZipArchive.IsSevenZipFile(stream))
/*if (SevenZipArchive.IsSevenZipFile(stream))
{
stream.Seek(0, SeekOrigin.Begin);
return SevenZipArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
if (GZipArchive.IsGZipFile(stream))
stream.Seek(0, SeekOrigin.Begin); */
if (await GZipArchive.IsGZipFileAsync(stream, cancellationToken))
{
stream.Seek(0, SeekOrigin.Begin);
return GZipArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
if (DmgArchive.IsDmgFile(stream))
{
stream.Seek(0, SeekOrigin.Begin);
return DmgArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
if (RarArchive.IsRarFile(stream, readerOptions))
/* if (RarArchive.IsRarFile(stream, readerOptions))
{
stream.Seek(0, SeekOrigin.Begin);
return RarArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
if (TarArchive.IsTarFile(stream))
stream.Seek(0, SeekOrigin.Begin); */
if (await TarArchive.IsTarFileAsync(stream, cancellationToken))
{
stream.Seek(0, SeekOrigin.Begin);
return TarArchive.Open(stream, readerOptions);
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip, LZip, Dmg");
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip, LZip");
}
public static IWritableArchive Create(ArchiveType type)
@@ -70,7 +65,7 @@ namespace SharpCompress.Archives
return type switch
{
ArchiveType.Zip => ZipArchive.Create(),
ArchiveType.Tar => TarArchive.Create(),
//ArchiveType.Tar => TarArchive.Create(),
ArchiveType.GZip => GZipArchive.Create(),
_ => throw new NotSupportedException("Cannot create Archives of type: " + type)
};
@@ -81,10 +76,10 @@ namespace SharpCompress.Archives
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
public static IArchive Open(string filePath, ReaderOptions? options = null)
public static ValueTask<IArchive> OpenAsync(string filePath, ReaderOptions? options = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), options);
return OpenAsync(new FileInfo(filePath), options);
}
/// <summary>
@@ -92,34 +87,28 @@ namespace SharpCompress.Archives
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
public static IArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
public static async ValueTask<IArchive> OpenAsync(FileInfo fileInfo, ReaderOptions? options = null, CancellationToken cancellationToken = default)
{
fileInfo.CheckNotNull(nameof(fileInfo));
options ??= new ReaderOptions { LeaveStreamOpen = false };
using var stream = fileInfo.OpenRead();
if (ZipArchive.IsZipFile(stream, null))
await using var stream = fileInfo.OpenRead();
if (await ZipArchive.IsZipFileAsync(stream, null, cancellationToken))
{
return ZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (SevenZipArchive.IsSevenZipFile(stream))
/*if (SevenZipArchive.IsSevenZipFile(stream))
{
return SevenZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (GZipArchive.IsGZipFile(stream))
stream.Seek(0, SeekOrigin.Begin); */
if (await GZipArchive.IsGZipFileAsync(stream, cancellationToken))
{
return GZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (DmgArchive.IsDmgFile(stream))
{
stream.Seek(0, SeekOrigin.Begin);
return DmgArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (RarArchive.IsRarFile(stream, options))
/*if (RarArchive.IsRarFile(stream, options))
{
return RarArchive.Open(fileInfo, options);
}
@@ -127,20 +116,22 @@ namespace SharpCompress.Archives
if (TarArchive.IsTarFile(stream))
{
return TarArchive.Open(fileInfo, options);
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip, Dmg");
} */
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(string sourceArchive, string destinationDirectory,
ExtractionOptions? options = null)
public static async ValueTask WriteToDirectory(string sourceArchive,
string destinationDirectory,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default)
{
using IArchive archive = Open(sourceArchive);
foreach (IArchiveEntry entry in archive.Entries)
await using IArchive archive = await OpenAsync(sourceArchive);
await foreach (IArchiveEntry entry in archive.Entries.WithCancellation(cancellationToken))
{
entry.WriteToDirectory(destinationDirectory, options);
await entry.WriteEntryToDirectoryAsync(destinationDirectory, options, cancellationToken);
}
}
}

View File

@@ -1,117 +0,0 @@
using SharpCompress.Common;
using SharpCompress.Common.Dmg;
using SharpCompress.Common.Dmg.Headers;
using SharpCompress.Common.Dmg.HFS;
using SharpCompress.Readers;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
namespace SharpCompress.Archives.Dmg
{
public class DmgArchive : AbstractArchive<DmgArchiveEntry, DmgVolume>
{
private readonly string _fileName;
internal DmgArchive(FileInfo fileInfo, ReaderOptions readerOptions)
: base(ArchiveType.Dmg, fileInfo, readerOptions)
{
_fileName = fileInfo.FullName;
}
internal DmgArchive(Stream stream, ReaderOptions readerOptions)
: base(ArchiveType.Dmg, stream.AsEnumerable(), readerOptions)
{
_fileName = string.Empty;
}
protected override IReader CreateReaderForSolidExtraction()
=> new DmgReader(ReaderOptions, this, _fileName);
protected override IEnumerable<DmgArchiveEntry> LoadEntries(IEnumerable<DmgVolume> volumes)
=> volumes.Single().LoadEntries();
protected override IEnumerable<DmgVolume> LoadVolumes(FileInfo file)
=> new DmgVolume(this, file.OpenRead(), file.FullName, ReaderOptions).AsEnumerable();
protected override IEnumerable<DmgVolume> LoadVolumes(IEnumerable<Stream> streams)
=> new DmgVolume(this, streams.Single(), string.Empty, ReaderOptions).AsEnumerable();
public static bool IsDmgFile(FileInfo fileInfo)
{
if (!fileInfo.Exists) return false;
using var stream = fileInfo.OpenRead();
return IsDmgFile(stream);
}
public static bool IsDmgFile(Stream stream)
{
long headerPos = stream.Length - DmgHeader.HeaderSize;
if (headerPos < 0) return false;
stream.Position = headerPos;
return DmgHeader.TryRead(stream, out _);
}
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static DmgArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static DmgArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new DmgArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static DmgArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
return new DmgArchive(stream, readerOptions ?? new ReaderOptions());
}
private sealed class DmgReader : AbstractReader<DmgEntry, DmgVolume>
{
private readonly DmgArchive _archive;
private readonly string _fileName;
private readonly Stream? _partitionStream;
public override DmgVolume Volume { get; }
internal DmgReader(ReaderOptions readerOptions, DmgArchive archive, string fileName)
: base(readerOptions, ArchiveType.Dmg)
{
_archive = archive;
_fileName = fileName;
Volume = archive.Volumes.Single();
using var compressedStream = DmgUtil.LoadHFSPartitionStream(Volume.Stream, Volume.Header);
_partitionStream = compressedStream?.Decompress();
}
protected override IEnumerable<DmgEntry> GetEntries(Stream stream)
{
if (_partitionStream is null) return Array.Empty<DmgArchiveEntry>();
else return HFSUtil.LoadEntriesFromPartition(_partitionStream, _fileName, _archive);
}
}
}
}

View File

@@ -1,32 +0,0 @@
using SharpCompress.Common.Dmg;
using SharpCompress.Common.Dmg.HFS;
using System;
using System.IO;
namespace SharpCompress.Archives.Dmg
{
public sealed class DmgArchiveEntry : DmgEntry, IArchiveEntry
{
private readonly Stream? _stream;
public bool IsComplete { get; } = true;
public IArchive Archive { get; }
internal DmgArchiveEntry(Stream? stream, DmgArchive archive, HFSCatalogRecord record, string path, DmgFilePart part)
: base(record, path, stream?.Length ?? 0, part)
{
_stream = stream;
Archive = archive;
}
public Stream OpenEntryStream()
{
if (IsDirectory)
throw new NotSupportedException("Directories cannot be opened as stream");
_stream!.Position = 0;
return _stream;
}
}
}

View File

@@ -1,7 +1,11 @@
using System;
using System.Buffers;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.GZip;
using SharpCompress.Readers;
@@ -29,10 +33,11 @@ namespace SharpCompress.Archives.GZip
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new GZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
return new GZipArchive(fileInfo, readerOptions ?? new ReaderOptions(), cancellationToken);
}
/// <summary>
@@ -40,10 +45,11 @@ namespace SharpCompress.Archives.GZip
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
public static GZipArchive Open(Stream stream, ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default)
{
stream.CheckNotNull(nameof(stream));
return new GZipArchive(stream, readerOptions ?? new ReaderOptions());
return new GZipArchive(stream, readerOptions ?? new ReaderOptions(), cancellationToken);
}
public static GZipArchive Create()
@@ -56,57 +62,58 @@ namespace SharpCompress.Archives.GZip
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
internal GZipArchive(FileInfo fileInfo, ReaderOptions options)
: base(ArchiveType.GZip, fileInfo, options)
internal GZipArchive(FileInfo fileInfo, ReaderOptions options,
CancellationToken cancellationToken)
: base(ArchiveType.GZip, fileInfo, options, cancellationToken)
{
}
protected override IEnumerable<GZipVolume> LoadVolumes(FileInfo file)
protected override IAsyncEnumerable<GZipVolume> LoadVolumes(FileInfo file,
CancellationToken cancellationToken)
{
return new GZipVolume(file, ReaderOptions).AsEnumerable();
return new GZipVolume(file, ReaderOptions).AsAsyncEnumerable();
}
public static bool IsGZipFile(string filePath)
public static ValueTask<bool> IsGZipFileAsync(string filePath, CancellationToken cancellationToken = default)
{
return IsGZipFile(new FileInfo(filePath));
return IsGZipFileAsync(new FileInfo(filePath), cancellationToken);
}
public static bool IsGZipFile(FileInfo fileInfo)
public static async ValueTask<bool> IsGZipFileAsync(FileInfo fileInfo, CancellationToken cancellationToken = default)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsGZipFile(stream);
await using Stream stream = fileInfo.OpenRead();
return await IsGZipFileAsync(stream, cancellationToken);
}
public void SaveTo(string filePath)
public Task SaveToAsync(string filePath, CancellationToken cancellationToken = default)
{
SaveTo(new FileInfo(filePath));
return SaveToAsync(new FileInfo(filePath), cancellationToken);
}
public void SaveTo(FileInfo fileInfo)
public async Task SaveToAsync(FileInfo fileInfo, CancellationToken cancellationToken = default)
{
using (var stream = fileInfo.Open(FileMode.Create, FileAccess.Write))
{
SaveTo(stream, new WriterOptions(CompressionType.GZip));
}
await using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
await SaveToAsync(stream, new WriterOptions(CompressionType.GZip), cancellationToken);
}
public static bool IsGZipFile(Stream stream)
public static async ValueTask<bool> IsGZipFileAsync(Stream stream, CancellationToken cancellationToken = default)
{
// read the header on the first read
Span<byte> header = stackalloc byte[10];
using var header = MemoryPool<byte>.Shared.Rent(10);
var slice = header.Memory.Slice(0, 10);
// workitem 8501: handle edge case (decompress empty stream)
if (!stream.ReadFully(header))
if (await stream.ReadAsync(slice, cancellationToken) != 10)
{
return false;
}
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
if (slice.Span[0] != 0x1F || slice.Span[1] != 0x8B || slice.Span[2] != 8)
{
return false;
}
@@ -119,8 +126,9 @@ namespace SharpCompress.Archives.GZip
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
internal GZipArchive(Stream stream, ReaderOptions options)
: base(ArchiveType.GZip, stream, options)
internal GZipArchive(Stream stream, ReaderOptions options,
CancellationToken cancellationToken)
: base(ArchiveType.GZip, stream, options, cancellationToken)
{
}
@@ -129,51 +137,54 @@ namespace SharpCompress.Archives.GZip
{
}
protected override GZipArchiveEntry CreateEntryInternal(string filePath, Stream source, long size, DateTime? modified,
bool closeStream)
protected override async ValueTask<GZipArchiveEntry> CreateEntryInternal(string filePath, Stream source, long size, DateTime? modified,
bool closeStream, CancellationToken cancellationToken = default)
{
if (Entries.Any())
if (await Entries.AnyAsync(cancellationToken: cancellationToken))
{
throw new InvalidOperationException("Only one entry is allowed in a GZip Archive");
}
return new GZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
}
protected override void SaveTo(Stream stream, WriterOptions options,
IEnumerable<GZipArchiveEntry> oldEntries,
IEnumerable<GZipArchiveEntry> newEntries)
protected override async ValueTask SaveToAsync(Stream stream, WriterOptions options,
IAsyncEnumerable<GZipArchiveEntry> oldEntries,
IAsyncEnumerable<GZipArchiveEntry> newEntries,
CancellationToken cancellationToken = default)
{
if (Entries.Count > 1)
if (await Entries.CountAsync(cancellationToken: cancellationToken) > 1)
{
throw new InvalidOperationException("Only one entry is allowed in a GZip Archive");
}
using (var writer = new GZipWriter(stream, new GZipWriterOptions(options)))
await using var writer = new GZipWriter(stream, new GZipWriterOptions(options));
await foreach (var entry in oldEntries.Concat(newEntries)
.Where(x => !x.IsDirectory)
.WithCancellation(cancellationToken))
{
foreach (var entry in oldEntries.Concat(newEntries)
.Where(x => !x.IsDirectory))
{
using (var entryStream = entry.OpenEntryStream())
{
writer.Write(entry.Key, entryStream, entry.LastModifiedTime);
}
}
await using var entryStream = await entry.OpenEntryStreamAsync(cancellationToken);
await writer.WriteAsync(entry.Key, entryStream, entry.LastModifiedTime, cancellationToken);
}
}
protected override IEnumerable<GZipVolume> LoadVolumes(IEnumerable<Stream> streams)
protected override async IAsyncEnumerable<GZipVolume> LoadVolumes(IAsyncEnumerable<Stream> streams,
[EnumeratorCancellation]CancellationToken cancellationToken)
{
return new GZipVolume(streams.First(), ReaderOptions).AsEnumerable();
yield return new GZipVolume(await streams.FirstAsync(cancellationToken: cancellationToken), ReaderOptions);
}
protected override IEnumerable<GZipArchiveEntry> LoadEntries(IEnumerable<GZipVolume> volumes)
protected override async IAsyncEnumerable<GZipArchiveEntry> LoadEntries(IAsyncEnumerable<GZipVolume> volumes,
[EnumeratorCancellation]CancellationToken cancellationToken)
{
Stream stream = volumes.Single().Stream;
yield return new GZipArchiveEntry(this, new GZipFilePart(stream, ReaderOptions.ArchiveEncoding));
Stream stream = (await volumes.SingleAsync(cancellationToken: cancellationToken)).Stream;
var part = new GZipFilePart(ReaderOptions.ArchiveEncoding);
await part.Initialize(stream, cancellationToken);
yield return new GZipArchiveEntry(this, part);
}
protected override IReader CreateReaderForSolidExtraction()
protected override async ValueTask<IReader> CreateReaderForSolidExtraction()
{
var stream = Volumes.Single().Stream;
var stream = (await Volumes.SingleAsync()).Stream;
stream.Position = 0;
return GZipReader.Open(stream);
}

View File

@@ -1,5 +1,7 @@
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.GZip;
namespace SharpCompress.Archives.GZip
@@ -12,7 +14,7 @@ namespace SharpCompress.Archives.GZip
Archive = archive;
}
public virtual Stream OpenEntryStream()
public virtual async ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
{
//this is to reset the stream to be read multiple times
var part = (GZipFilePart)Parts.Single();
@@ -20,7 +22,7 @@ namespace SharpCompress.Archives.GZip
{
part.GetRawStream().Position = part.EntryStartPosition;
}
return Parts.Single().GetCompressedStream();
return await Parts.Single().GetCompressedStreamAsync(cancellationToken);
}
#region IArchiveEntry Members

View File

@@ -3,6 +3,8 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
@@ -50,18 +52,18 @@ namespace SharpCompress.Archives.GZip
Stream IWritableArchiveEntry.Stream => stream;
public override Stream OpenEntryStream()
public override ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new NonDisposingStream(stream);
return new(new NonDisposingStream(stream));
}
internal override void Close()
internal override async ValueTask CloseAsync()
{
if (closeStream)
{
stream.Dispose();
await stream.DisposeAsync();
}
}
}

View File

@@ -1,49 +1,44 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
namespace SharpCompress.Archives
{
public interface IArchive : IDisposable
public interface IArchive : IAsyncDisposable
{
event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionBegin;
event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionEnd;
event EventHandler<CompressedBytesReadEventArgs> CompressedBytesRead;
event EventHandler<FilePartExtractionBeginEventArgs> FilePartExtractionBegin;
IEnumerable<IArchiveEntry> Entries { get; }
IEnumerable<IVolume> Volumes { get; }
IAsyncEnumerable<IArchiveEntry> Entries { get; }
IAsyncEnumerable<IVolume> Volumes { get; }
ArchiveType Type { get; }
ValueTask EnsureEntriesLoaded();
/// <summary>
/// Use this method to extract all entries in an archive in order.
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
/// extracted sequentially for the best performance.
/// </summary>
IReader ExtractAllEntries();
ValueTask<IReader> ExtractAllEntries();
/// <summary>
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
/// Rar Archives can be SOLID while all 7Zip archives are considered SOLID.
/// </summary>
bool IsSolid { get; }
ValueTask<bool> IsSolidAsync();
/// <summary>
/// This checks to see if all the known entries have IsComplete = true
/// </summary>
bool IsComplete { get; }
ValueTask<bool> IsCompleteAsync();
/// <summary>
/// The total size of the files compressed in the archive.
/// </summary>
long TotalSize { get; }
ValueTask<long> TotalSizeAsync();
/// <summary>
/// The total size of the files as uncompressed in the archive.
/// </summary>
long TotalUncompressSize { get; }
ValueTask<long> TotalUncompressedSizeAsync();
}
}

View File

@@ -1,4 +1,6 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Archives
@@ -9,7 +11,7 @@ namespace SharpCompress.Archives
/// Opens the current entry as a stream that will decompress as it is read.
/// Read the entire stream or use SkipEntry on EntryStream.
/// </summary>
Stream OpenEntryStream();
ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default);
/// <summary>
/// The archive can find all the parts of the archive needed to extract this entry.

View File

@@ -1,4 +1,6 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
@@ -6,58 +8,53 @@ namespace SharpCompress.Archives
{
public static class IArchiveEntryExtensions
{
public static void WriteTo(this IArchiveEntry archiveEntry, Stream streamToWriteTo)
public static async ValueTask WriteToAsync(this IArchiveEntry archiveEntry, Stream streamToWriteTo, CancellationToken cancellationToken = default)
{
if (archiveEntry.IsDirectory)
{
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
}
var streamListener = (IArchiveExtractionListener)archiveEntry.Archive;
streamListener.EnsureEntriesLoaded();
streamListener.FireEntryExtractionBegin(archiveEntry);
streamListener.FireFilePartExtractionBegin(archiveEntry.Key, archiveEntry.Size, archiveEntry.CompressedSize);
var entryStream = archiveEntry.OpenEntryStream();
var archive = archiveEntry.Archive;
await archive.EnsureEntriesLoaded();
var entryStream = await archiveEntry.OpenEntryStreamAsync(cancellationToken);
if (entryStream is null)
{
return;
}
using (entryStream)
await using (entryStream)
{
using (Stream s = new ListeningStream(streamListener, entryStream))
{
s.TransferTo(streamToWriteTo);
}
await entryStream.TransferToAsync(streamToWriteTo, cancellationToken);
}
streamListener.FireEntryExtractionEnd(archiveEntry);
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(this IArchiveEntry entry, string destinationDirectory,
ExtractionOptions? options = null)
public static ValueTask WriteEntryToDirectoryAsync(this IArchiveEntry entry,
string destinationDirectory,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default)
{
ExtractionMethods.WriteEntryToDirectory(entry, destinationDirectory, options,
entry.WriteToFile);
return ExtractionMethods.WriteEntryToDirectoryAsync(entry, destinationDirectory, options,
entry.WriteToFileAsync, cancellationToken);
}
/// <summary>
/// Extract to specific file
/// </summary>
public static void WriteToFile(this IArchiveEntry entry,
public static ValueTask WriteToFileAsync(this IArchiveEntry entry,
string destinationFileName,
ExtractionOptions? options = null)
ExtractionOptions? options = null,
CancellationToken cancellationToken = default)
{
ExtractionMethods.WriteEntryToFile(entry, destinationFileName, options,
(x, fm) =>
return ExtractionMethods.WriteEntryToFileAsync(entry, destinationFileName, options,
async (x, fm, ct) =>
{
using (FileStream fs = File.Open(destinationFileName, fm))
{
entry.WriteTo(fs);
}
});
await using FileStream fs = File.Open(x, fm);
await entry.WriteToAsync(fs, ct);
}, cancellationToken);
}
}
}

View File

@@ -1,4 +1,6 @@
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Archives
@@ -8,12 +10,14 @@ namespace SharpCompress.Archives
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(this IArchive archive, string destinationDirectory,
ExtractionOptions? options = null)
public static async ValueTask WriteToDirectoryAsync(this IArchive archive,
string destinationDirectory,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default)
{
foreach (IArchiveEntry entry in archive.Entries.Where(x => !x.IsDirectory))
await foreach (IArchiveEntry entry in archive.Entries.Where(x => !x.IsDirectory).WithCancellation(cancellationToken))
{
entry.WriteToDirectory(destinationDirectory, options);
await entry.WriteEntryToDirectoryAsync(destinationDirectory, options, cancellationToken);
}
}
}

View File

@@ -1,11 +0,0 @@
using SharpCompress.Common;
namespace SharpCompress.Archives
{
internal interface IArchiveExtractionListener : IExtractionListener
{
void EnsureEntriesLoaded();
void FireEntryExtractionBegin(IArchiveEntry entry);
void FireEntryExtractionEnd(IArchiveEntry entry);
}
}

View File

@@ -1,21 +1,23 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Writers;
namespace SharpCompress.Archives
{
public interface IWritableArchive : IArchive
{
void RemoveEntry(IArchiveEntry entry);
ValueTask RemoveEntryAsync(IArchiveEntry entry, CancellationToken cancellationToken = default);
IArchiveEntry AddEntry(string key, Stream source, bool closeStream, long size = 0, DateTime? modified = null);
ValueTask<IArchiveEntry> AddEntryAsync(string key, Stream source, bool closeStream, long size = 0, DateTime? modified = null, CancellationToken cancellationToken = default);
void SaveTo(Stream stream, WriterOptions options);
ValueTask SaveToAsync(Stream stream, WriterOptions options, CancellationToken cancellationToken = default);
/// <summary>
/// Use this to pause entry rebuilding when adding large collections of entries. Dispose when complete. A using statement is recommended.
/// </summary>
/// <returns>IDisposeable to resume entry rebuilding</returns>
IDisposable PauseEntryRebuilding();
IAsyncDisposable PauseEntryRebuilding();
}
}

View File

@@ -1,57 +1,62 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Writers;
namespace SharpCompress.Archives
{
public static class IWritableArchiveExtensions
{
public static void AddEntry(this IWritableArchive writableArchive,
string entryPath, string filePath)
public static async ValueTask AddEntryAsync(this IWritableArchive writableArchive,
string entryPath, string filePath,
CancellationToken cancellationToken = default)
{
var fileInfo = new FileInfo(filePath);
if (!fileInfo.Exists)
{
throw new FileNotFoundException("Could not AddEntry: " + filePath);
}
writableArchive.AddEntry(entryPath, new FileInfo(filePath).OpenRead(), true, fileInfo.Length,
fileInfo.LastWriteTime);
await writableArchive.AddEntryAsync(entryPath, new FileInfo(filePath).OpenRead(), true, fileInfo.Length,
fileInfo.LastWriteTime, cancellationToken);
}
public static void SaveTo(this IWritableArchive writableArchive, string filePath, WriterOptions options)
public static Task SaveToAsync(this IWritableArchive writableArchive, string filePath, WriterOptions options, CancellationToken cancellationToken = default)
{
writableArchive.SaveTo(new FileInfo(filePath), options);
return writableArchive.SaveToAsync(new FileInfo(filePath), options, cancellationToken);
}
public static void SaveTo(this IWritableArchive writableArchive, FileInfo fileInfo, WriterOptions options)
public static async Task SaveToAsync(this IWritableArchive writableArchive, FileInfo fileInfo, WriterOptions options, CancellationToken cancellationToken = default)
{
using (var stream = fileInfo.Open(FileMode.Create, FileAccess.Write))
{
writableArchive.SaveTo(stream, options);
}
await using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
await writableArchive.SaveToAsync(stream, options, cancellationToken);
}
public static void AddAllFromDirectory(
public static async ValueTask AddAllFromDirectoryAsync(
this IWritableArchive writableArchive,
string filePath, string searchPattern = "*.*", SearchOption searchOption = SearchOption.AllDirectories)
string filePath, string searchPattern = "*.*",
SearchOption searchOption = SearchOption.AllDirectories,
CancellationToken cancellationToken = default)
{
using (writableArchive.PauseEntryRebuilding())
await using (writableArchive.PauseEntryRebuilding())
{
foreach (var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption))
{
var fileInfo = new FileInfo(path);
writableArchive.AddEntry(path.Substring(filePath.Length), fileInfo.OpenRead(), true, fileInfo.Length,
fileInfo.LastWriteTime);
await writableArchive.AddEntryAsync(path.Substring(filePath.Length), fileInfo.OpenRead(), true, fileInfo.Length,
fileInfo.LastWriteTime,
cancellationToken);
}
}
}
public static IArchiveEntry AddEntry(this IWritableArchive writableArchive, string key, FileInfo fileInfo)
public static ValueTask<IArchiveEntry> AddEntryAsync(this IWritableArchive writableArchive, string key, FileInfo fileInfo,
CancellationToken cancellationToken = default)
{
if (!fileInfo.Exists)
{
throw new ArgumentException("FileInfo does not exist.");
}
return writableArchive.AddEntry(key, fileInfo.OpenRead(), true, fileInfo.Length, fileInfo.LastWriteTime);
return writableArchive.AddEntryAsync(key, fileInfo.OpenRead(), true, fileInfo.Length, fileInfo.LastWriteTime, cancellationToken);
}
}
}

View File

@@ -2,6 +2,9 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Tar;
using SharpCompress.Common.Tar.Headers;
@@ -31,10 +34,11 @@ namespace SharpCompress.Archives.Tar
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
public static TarArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new TarArchive(fileInfo, readerOptions ?? new ReaderOptions());
return new TarArchive(fileInfo, readerOptions ?? new ReaderOptions(), cancellationToken);
}
/// <summary>
@@ -42,35 +46,35 @@ namespace SharpCompress.Archives.Tar
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(Stream stream, ReaderOptions? readerOptions = null)
public static TarArchive Open(Stream stream, ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default)
{
stream.CheckNotNull(nameof(stream));
return new TarArchive(stream, readerOptions ?? new ReaderOptions());
return new TarArchive(stream, readerOptions ?? new ReaderOptions(), cancellationToken);
}
public static bool IsTarFile(string filePath)
public static ValueTask<bool> IsTarFileAsync(string filePath, CancellationToken cancellationToken = default)
{
return IsTarFile(new FileInfo(filePath));
return IsTarFileAsync(new FileInfo(filePath), cancellationToken);
}
public static bool IsTarFile(FileInfo fileInfo)
public static async ValueTask<bool> IsTarFileAsync(FileInfo fileInfo, CancellationToken cancellationToken = default)
{
if (!fileInfo.Exists)
{
return false;
}
using (Stream stream = fileInfo.OpenRead())
{
return IsTarFile(stream);
}
await using Stream stream = fileInfo.OpenRead();
return await IsTarFileAsync(stream, cancellationToken);
}
public static bool IsTarFile(Stream stream)
public static async ValueTask<bool> IsTarFileAsync(Stream stream, CancellationToken cancellationToken = default)
{
try
{
TarHeader tarHeader = new TarHeader(new ArchiveEncoding());
bool readSucceeded = tarHeader.Read(new BinaryReader(stream));
TarHeader tarHeader = new(new ArchiveEncoding());
bool readSucceeded = await tarHeader.Read(stream, cancellationToken);
bool isEmptyArchive = tarHeader.Name.Length == 0 && tarHeader.Size == 0 && Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
return readSucceeded || isEmptyArchive;
}
@@ -85,14 +89,15 @@ namespace SharpCompress.Archives.Tar
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
internal TarArchive(FileInfo fileInfo, ReaderOptions readerOptions)
: base(ArchiveType.Tar, fileInfo, readerOptions)
internal TarArchive(FileInfo fileInfo, ReaderOptions readerOptions,
CancellationToken cancellationToken)
: base(ArchiveType.Tar, fileInfo, readerOptions, cancellationToken)
{
}
protected override IEnumerable<TarVolume> LoadVolumes(FileInfo file)
protected override IAsyncEnumerable<TarVolume> LoadVolumes(FileInfo file, CancellationToken cancellationToken)
{
return new TarVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
return new TarVolume(file.OpenRead(), ReaderOptions).AsAsyncEnumerable();
}
/// <summary>
@@ -100,8 +105,9 @@ namespace SharpCompress.Archives.Tar
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
internal TarArchive(Stream stream, ReaderOptions readerOptions)
: base(ArchiveType.Tar, stream, readerOptions)
internal TarArchive(Stream stream, ReaderOptions readerOptions,
CancellationToken cancellationToken)
: base(ArchiveType.Tar, stream, readerOptions, cancellationToken)
{
}
@@ -110,16 +116,18 @@ namespace SharpCompress.Archives.Tar
{
}
protected override IEnumerable<TarVolume> LoadVolumes(IEnumerable<Stream> streams)
protected override async IAsyncEnumerable<TarVolume> LoadVolumes(IAsyncEnumerable<Stream> streams,
[EnumeratorCancellation]CancellationToken cancellationToken)
{
return new TarVolume(streams.First(), ReaderOptions).AsEnumerable();
yield return new TarVolume(await streams.FirstAsync(cancellationToken: cancellationToken), ReaderOptions);
}
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
protected override async IAsyncEnumerable<TarArchiveEntry> LoadEntries(IAsyncEnumerable<TarVolume> volumes,
[EnumeratorCancellation]CancellationToken cancellationToken)
{
Stream stream = volumes.Single().Stream;
Stream stream = (await volumes.SingleAsync(cancellationToken: cancellationToken)).Stream;
TarHeader? previousHeader = null;
foreach (TarHeader? header in TarHeaderFactory.ReadHeader(StreamingMode.Seekable, stream, ReaderOptions.ArchiveEncoding))
await foreach (TarHeader? header in TarHeaderFactory.ReadHeader(StreamingMode.Seekable, stream, ReaderOptions.ArchiveEncoding, cancellationToken))
{
if (header != null)
{
@@ -136,11 +144,11 @@ namespace SharpCompress.Archives.Tar
var oldStreamPos = stream.Position;
using (var entryStream = entry.OpenEntryStream())
await using (var entryStream = await entry.OpenEntryStreamAsync(cancellationToken))
{
using (var memoryStream = new MemoryStream())
await using (var memoryStream = new MemoryStream())
{
entryStream.TransferTo(memoryStream);
await entryStream.TransferToAsync(memoryStream, cancellationToken);
memoryStream.Position = 0;
var bytes = memoryStream.ToArray();
@@ -160,38 +168,37 @@ namespace SharpCompress.Archives.Tar
public static TarArchive Create()
{
return new TarArchive();
return new();
}
protected override TarArchiveEntry CreateEntryInternal(string filePath, Stream source,
long size, DateTime? modified, bool closeStream)
protected override ValueTask<TarArchiveEntry> CreateEntryInternal(string filePath, Stream source,
long size, DateTime? modified, bool closeStream,
CancellationToken cancellationToken)
{
return new TarWritableArchiveEntry(this, source, CompressionType.Unknown, filePath, size, modified,
closeStream);
return new (new TarWritableArchiveEntry(this, source, CompressionType.Unknown, filePath, size, modified,
closeStream));
}
protected override void SaveTo(Stream stream, WriterOptions options,
IEnumerable<TarArchiveEntry> oldEntries,
IEnumerable<TarArchiveEntry> newEntries)
protected override async ValueTask SaveToAsync(Stream stream, WriterOptions options,
IAsyncEnumerable<TarArchiveEntry> oldEntries,
IAsyncEnumerable<TarArchiveEntry> newEntries,
CancellationToken cancellationToken = default)
{
using (var writer = new TarWriter(stream, new TarWriterOptions(options)))
await using var writer = await TarWriter.CreateAsync(stream, new TarWriterOptions(options), cancellationToken);
await foreach (var entry in oldEntries.Concat(newEntries)
.Where(x => !x.IsDirectory)
.WithCancellation(cancellationToken))
{
foreach (var entry in oldEntries.Concat(newEntries)
.Where(x => !x.IsDirectory))
{
using (var entryStream = entry.OpenEntryStream())
{
writer.Write(entry.Key, entryStream, entry.LastModifiedTime, entry.Size);
}
}
await using var entryStream = await entry.OpenEntryStreamAsync(cancellationToken);
await writer.WriteAsync(entry.Key, entryStream, entry.LastModifiedTime, entry.Size, cancellationToken);
}
}
protected override IReader CreateReaderForSolidExtraction()
protected override async ValueTask<IReader> CreateReaderForSolidExtraction()
{
var stream = Volumes.Single().Stream;
var stream = (await Volumes.SingleAsync()).Stream;
stream.Position = 0;
return TarReader.Open(stream);
return await TarReader.OpenAsync(stream);
}
}
}

View File

@@ -1,5 +1,7 @@
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Tar;
@@ -13,9 +15,9 @@ namespace SharpCompress.Archives.Tar
Archive = archive;
}
public virtual Stream OpenEntryStream()
public virtual async ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
{
return Parts.Single().GetCompressedStream();
return await Parts.Single().GetCompressedStreamAsync(cancellationToken);
}
#region IArchiveEntry Members

View File

@@ -3,6 +3,8 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
@@ -49,18 +51,18 @@ namespace SharpCompress.Archives.Tar
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
Stream IWritableArchiveEntry.Stream => stream;
public override Stream OpenEntryStream()
public override ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new NonDisposingStream(stream);
return new(new NonDisposingStream(stream));
}
internal override void Close()
internal override async ValueTask CloseAsync()
{
if (closeStream)
{
stream.Dispose();
await stream.DisposeAsync();
}
}
}

View File

@@ -2,10 +2,14 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Zip;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors.Deflate;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.Zip;
using SharpCompress.Writers;
@@ -41,10 +45,11 @@ namespace SharpCompress.Archives.Zip
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new ZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
return new ZipArchive(fileInfo, readerOptions ?? new ReaderOptions(), cancellationToken);
}
/// <summary>
@@ -52,35 +57,45 @@ namespace SharpCompress.Archives.Zip
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
public static ZipArchive Open(Stream stream, ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default)
{
stream.CheckNotNull(nameof(stream));
return new ZipArchive(stream, readerOptions ?? new ReaderOptions());
return new ZipArchive(stream, readerOptions ?? new ReaderOptions(), cancellationToken);
}
public static bool IsZipFile(string filePath, string? password = null)
public static ValueTask<bool> IsZipFile(string filePath, string? password = null)
{
return IsZipFile(new FileInfo(filePath), password);
return IsZipFileAsync(new FileInfo(filePath), password);
}
public static bool IsZipFile(FileInfo fileInfo, string? password = null)
public static async ValueTask<bool> IsZipFileAsync(FileInfo fileInfo, string? password = null)
{
if (!fileInfo.Exists)
{
return false;
}
using (Stream stream = fileInfo.OpenRead())
{
return IsZipFile(stream, password);
}
await using Stream stream = fileInfo.OpenRead();
return await IsZipFileAsync(stream, password);
}
public static bool IsZipFile(Stream stream, string? password = null)
public static async ValueTask<bool> IsZipFileAsync(Stream stream, string? password = null, CancellationToken cancellationToken = default)
{
StreamingZipHeaderFactory headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding());
StreamingZipHeaderFactory headerFactory = new(password, new ArchiveEncoding());
try
{
ZipHeader? header = headerFactory.ReadStreamHeader(stream).FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
RewindableStream rewindableStream;
if (stream is RewindableStream rs)
{
rewindableStream = rs;
}
else
{
rewindableStream = new RewindableStream(stream);
}
ZipHeader? header = await headerFactory.ReadStreamHeader(rewindableStream, cancellationToken)
.FirstOrDefaultAsync(x => x.ZipHeaderType != ZipHeaderType.Split, cancellationToken: cancellationToken);
if (header is null)
{
return false;
@@ -102,15 +117,17 @@ namespace SharpCompress.Archives.Zip
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
internal ZipArchive(FileInfo fileInfo, ReaderOptions readerOptions)
: base(ArchiveType.Zip, fileInfo, readerOptions)
internal ZipArchive(FileInfo fileInfo, ReaderOptions readerOptions,
CancellationToken cancellationToken)
: base(ArchiveType.Zip, fileInfo, readerOptions, cancellationToken)
{
headerFactory = new SeekableZipHeaderFactory(readerOptions.Password, readerOptions.ArchiveEncoding);
}
protected override IEnumerable<ZipVolume> LoadVolumes(FileInfo file)
protected override IAsyncEnumerable<ZipVolume> LoadVolumes(FileInfo file,
CancellationToken cancellationToken)
{
return new ZipVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
return new ZipVolume(file.OpenRead(), ReaderOptions).AsAsyncEnumerable();
}
internal ZipArchive()
@@ -123,82 +140,86 @@ namespace SharpCompress.Archives.Zip
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
internal ZipArchive(Stream stream, ReaderOptions readerOptions)
: base(ArchiveType.Zip, stream, readerOptions)
internal ZipArchive(Stream stream, ReaderOptions readerOptions,
CancellationToken cancellationToken)
: base(ArchiveType.Zip, stream, readerOptions, cancellationToken)
{
headerFactory = new SeekableZipHeaderFactory(readerOptions.Password, readerOptions.ArchiveEncoding);
}
protected override IEnumerable<ZipVolume> LoadVolumes(IEnumerable<Stream> streams)
protected override async IAsyncEnumerable<ZipVolume> LoadVolumes(IAsyncEnumerable<Stream> streams,
[EnumeratorCancellation]CancellationToken cancellationToken)
{
return new ZipVolume(streams.First(), ReaderOptions).AsEnumerable();
yield return new ZipVolume(await streams.FirstAsync(cancellationToken: cancellationToken), ReaderOptions);
}
protected override IEnumerable<ZipArchiveEntry> LoadEntries(IEnumerable<ZipVolume> volumes)
protected override async IAsyncEnumerable<ZipArchiveEntry> LoadEntries(IAsyncEnumerable<ZipVolume> volumes,
[EnumeratorCancellation]CancellationToken cancellationToken)
{
var volume = volumes.Single();
await Task.CompletedTask;
var volume = await volumes.SingleAsync(cancellationToken: cancellationToken);
Stream stream = volume.Stream;
foreach (ZipHeader h in headerFactory.ReadSeekableHeader(stream))
await foreach (ZipHeader h in headerFactory.ReadSeekableHeader(stream, cancellationToken))
{
if (h != null)
{
switch (h.ZipHeaderType)
{
case ZipHeaderType.DirectoryEntry:
{
yield return new ZipArchiveEntry(this,
new SeekableZipFilePart(headerFactory,
(DirectoryEntryHeader)h,
stream));
}
{
yield return new ZipArchiveEntry(this,
new SeekableZipFilePart(headerFactory,
(DirectoryEntryHeader)h,
stream));
}
break;
case ZipHeaderType.DirectoryEnd:
{
byte[] bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
volume.Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
yield break;
}
{
byte[] bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
volume.Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
yield break;
}
}
}
}
}
public void SaveTo(Stream stream)
public ValueTask SaveToAsync(Stream stream, CancellationToken cancellationToken = default)
{
SaveTo(stream, new WriterOptions(CompressionType.Deflate));
return SaveToAsync(stream, new WriterOptions(CompressionType.Deflate), cancellationToken);
}
protected override void SaveTo(Stream stream, WriterOptions options,
IEnumerable<ZipArchiveEntry> oldEntries,
IEnumerable<ZipArchiveEntry> newEntries)
protected override async ValueTask SaveToAsync(Stream stream, WriterOptions options,
IAsyncEnumerable<ZipArchiveEntry> oldEntries,
IAsyncEnumerable<ZipArchiveEntry> newEntries,
CancellationToken cancellationToken = default)
{
using (var writer = new ZipWriter(stream, new ZipWriterOptions(options)))
await using var writer = new ZipWriter(stream, new ZipWriterOptions(options));
await foreach (var entry in oldEntries.Concat(newEntries)
.Where(x => !x.IsDirectory)
.WithCancellation(cancellationToken))
{
foreach (var entry in oldEntries.Concat(newEntries)
.Where(x => !x.IsDirectory))
await using (var entryStream = await entry.OpenEntryStreamAsync(cancellationToken))
{
using (var entryStream = entry.OpenEntryStream())
{
writer.Write(entry.Key, entryStream, entry.LastModifiedTime);
}
await writer.WriteAsync(entry.Key, entryStream, entry.LastModifiedTime, cancellationToken);
}
}
}
protected override ZipArchiveEntry CreateEntryInternal(string filePath, Stream source, long size, DateTime? modified,
bool closeStream)
protected override ValueTask<ZipArchiveEntry> CreateEntryInternal(string filePath, Stream source, long size, DateTime? modified,
bool closeStream, CancellationToken cancellationToken = default)
{
return new ZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
return new(new ZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream));
}
public static ZipArchive Create()
{
return new ZipArchive();
return new();
}
protected override IReader CreateReaderForSolidExtraction()
protected override async ValueTask<IReader> CreateReaderForSolidExtraction()
{
var stream = Volumes.Single().Stream;
var stream = (await Volumes.SingleAsync()).Stream;
stream.Position = 0;
return ZipReader.Open(stream, ReaderOptions);
}

View File

@@ -1,5 +1,7 @@
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip;
namespace SharpCompress.Archives.Zip
@@ -12,9 +14,9 @@ namespace SharpCompress.Archives.Zip
Archive = archive;
}
public virtual Stream OpenEntryStream()
public virtual ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
{
return Parts.Single().GetCompressedStream();
return Parts.Single().GetCompressedStreamAsync(cancellationToken);
}
#region IArchiveEntry Members

View File

@@ -1,6 +1,8 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
@@ -49,18 +51,18 @@ namespace SharpCompress.Archives.Zip
Stream IWritableArchiveEntry.Stream => stream;
public override Stream OpenEntryStream()
public override ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new NonDisposingStream(stream);
return new(new NonDisposingStream(stream));
}
internal override void Close()
internal override async ValueTask CloseAsync()
{
if (closeStream && !isDisposed)
{
stream.Dispose();
await stream.DisposeAsync();
isDisposed = true;
}
}

View File

@@ -0,0 +1,25 @@
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress
{
public static class AsyncEnumerable
{
public static IAsyncEnumerable<T> Empty<T>() => EmptyAsyncEnumerable<T>.Instance;
private class EmptyAsyncEnumerable<T> : IAsyncEnumerator<T>, IAsyncEnumerable<T>
{
public static readonly EmptyAsyncEnumerable<T> Instance =
new();
public T Current => default!;
public ValueTask DisposeAsync() => default;
public ValueTask<bool> MoveNextAsync() => new(false);
public IAsyncEnumerator<T> GetAsyncEnumerator(CancellationToken cancellationToken = new CancellationToken())
{
return this;
}
}
}
}

View File

@@ -24,7 +24,7 @@ namespace SharpCompress.Common
/// Set this when you want to use a custom method for all decoding operations.
/// </summary>
/// <returns>string Func(bytes, index, length)</returns>
public Func<byte[], int, int, string>? CustomDecoder { get; set; }
//public Func<byte[], int, int, string>? CustomDecoder { get; set; }
public ArchiveEncoding()
: this(Encoding.Default, Encoding.Default)
@@ -50,7 +50,12 @@ namespace SharpCompress.Common
public string Decode(byte[] bytes, int start, int length)
{
return GetDecoder().Invoke(bytes, start, length);
return GetEncoding().GetString(bytes, start, length);
}
public string Decode(ReadOnlySpan<byte> span)
{
return GetEncoding().GetString(span);
}
public string DecodeUTF8(byte[] bytes)
@@ -67,10 +72,5 @@ namespace SharpCompress.Common
{
return Forced ?? Default ?? Encoding.UTF8;
}
public Func<byte[], int, int, string> GetDecoder()
{
return CustomDecoder ?? ((bytes, index, count) => GetEncoding().GetString(bytes, index, count));
}
}
}

View File

@@ -8,10 +8,5 @@ namespace SharpCompress.Common
: base(message)
{
}
public ArchiveException(string message, Exception inner)
: base(message, inner)
{
}
}
}

View File

@@ -6,7 +6,6 @@
Zip,
Tar,
SevenZip,
GZip,
Dmg
GZip
}
}

View File

@@ -1,323 +0,0 @@
using SharpCompress.Common.Dmg.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.ADC;
using SharpCompress.Compressors.BZip2;
using SharpCompress.Compressors.Deflate;
using SharpCompress.IO;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
namespace SharpCompress.Common.Dmg
{
internal sealed class DmgBlockDataStream : Stream
{
private readonly Stream _baseStream;
private readonly DmgHeader _header;
private readonly BlkxTable _table;
private long _position;
private bool _isEnded;
private int _chunkIndex;
private Stream? _chunkStream;
private long _chunkPos;
public override bool CanRead => true;
public override bool CanWrite => false;
public override bool CanSeek => true;
public override long Length { get; }
public override long Position
{
get => _position;
set
{
if ((value < 0) || (value > Length)) throw new ArgumentOutOfRangeException(nameof(value));
if (value == Length)
{
// End of the stream
_position = Length;
_isEnded = true;
_chunkIndex = -1;
_chunkStream = null;
}
else if (value != _position)
{
_position = value;
// We can only seek over entire chunks at a time because some chunks may be compressed.
// So we first find the chunk that we are now in, then we read to the exact position inside that chunk.
for (int i = 0; i < _table.Chunks.Count; i++)
{
var chunk = _table.Chunks[i];
if (IsChunkValid(chunk) && (chunk.UncompressedOffset <= (ulong)_position)
&& ((chunk.UncompressedOffset + chunk.UncompressedLength) > (ulong)_position))
{
if (i == _chunkIndex)
{
// We are still in the same chunk, so if the new position is
// behind the previous one we can just read to the new position.
long offset = (long)chunk.UncompressedOffset + _chunkPos;
if (offset <= _position)
{
long skip = _position - offset;
_chunkStream!.Skip(skip);
_chunkPos += skip;
break;
}
}
_chunkIndex = i;
_chunkStream = GetChunkStream();
_chunkPos = 0;
// If the chunk happens to not be compressed this read will still result in a fast seek
if ((ulong)_position != chunk.UncompressedOffset)
{
long skip = _position - (long)chunk.UncompressedOffset;
_chunkStream.Skip(skip);
_chunkPos = skip;
}
break;
}
}
}
}
}
public DmgBlockDataStream(Stream baseStream, DmgHeader header, BlkxTable table)
{
if (!baseStream.CanRead) throw new ArgumentException("Requires a readable stream", nameof(baseStream));
if (!baseStream.CanSeek) throw new ArgumentException("Requires a seekable stream", nameof(baseStream));
_baseStream = baseStream;
_header = header;
_table = table;
Length = 0;
foreach (var chunk in table.Chunks)
{
if (IsChunkValid(chunk))
Length += (long)chunk.UncompressedLength;
}
_position = 0;
_chunkIndex = -1;
_chunkIndex = GetNextChunk();
_isEnded = _chunkIndex < 0;
if (!_isEnded) _chunkStream = GetChunkStream();
_chunkPos = 0;
}
private static bool IsChunkValid(BlkxChunk chunk)
{
return chunk.Type switch
{
BlkxChunkType.Zero => true,
BlkxChunkType.Uncompressed => true,
BlkxChunkType.Ignore => true,
BlkxChunkType.AdcCompressed => true,
BlkxChunkType.ZlibCompressed => true,
BlkxChunkType.Bz2Compressed => true,
_ => false
};
}
private int GetNextChunk()
{
int index = _chunkIndex;
bool isValid = false;
while (!isValid)
{
index++;
if (index >= _table.Chunks.Count) return -1;
var chunk = _table.Chunks[index];
if (chunk.Type == BlkxChunkType.Last) return -1;
isValid = IsChunkValid(chunk);
}
return index;
}
private Stream GetChunkStream()
{
if (_chunkIndex < 0)
throw new InvalidOperationException("Invalid chunk index");
var chunk = _table.Chunks[_chunkIndex];
// For our purposes, ignore behaves the same as zero
if ((chunk.Type == BlkxChunkType.Zero) || (chunk.Type == BlkxChunkType.Ignore))
return new ConstantStream(0, (long)chunk.UncompressedLength);
// We first create a sub-stream on the region of the base stream where the
// (possibly compressed) data is physically located at.
var subStream = new SeekableSubStream(_baseStream,
(long)(_header.DataForkOffset + _table.DataOffset + chunk.CompressedOffset),
(long)chunk.CompressedLength);
// Then we nest that sub-stream into the apropriate compressed stream.
return chunk.Type switch
{
BlkxChunkType.Uncompressed => subStream,
BlkxChunkType.AdcCompressed => new ADCStream(subStream, CompressionMode.Decompress),
BlkxChunkType.ZlibCompressed => new ZlibStream(subStream, CompressionMode.Decompress),
BlkxChunkType.Bz2Compressed => new BZip2Stream(subStream, CompressionMode.Decompress, false),
_ => throw new InvalidOperationException("Invalid chunk type")
};
}
// Decompresses the entire stream in memory for faster extraction.
// This is about two orders of magnitude faster than decompressing
// on-the-fly while extracting, but also eats RAM for breakfest.
public Stream Decompress()
{
// We have to load all the chunks into separate memory streams first
// because otherwise the decompression threads would block each other
// and actually be slower than just a single decompression thread.
var rawStreams = new Stream?[_table.Chunks.Count];
for (int i = 0; i < rawStreams.Length; i++)
{
var chunk = _table.Chunks[i];
if (IsChunkValid(chunk))
{
if ((chunk.Type == BlkxChunkType.Zero) || (chunk.Type == BlkxChunkType.Ignore))
{
rawStreams[i] = new ConstantStream(0, (long)chunk.UncompressedLength);
}
else
{
var subStream = new SeekableSubStream(_baseStream,
(long)(_header.DataForkOffset + _table.DataOffset + chunk.CompressedOffset),
(long)chunk.CompressedLength);
var memStream = new MemoryStream();
subStream.CopyTo(memStream);
memStream.Position = 0;
rawStreams[i] = memStream;
}
}
else
{
rawStreams[i] = null;
}
}
// Now we can decompress the chunks multithreaded
var streams = new Stream?[_table.Chunks.Count];
Parallel.For(0, streams.Length, i =>
{
var rawStream = rawStreams[i];
if (rawStream is not null)
{
var chunk = _table.Chunks[i];
if ((chunk.Type == BlkxChunkType.Zero)
|| (chunk.Type == BlkxChunkType.Ignore)
|| (chunk.Type == BlkxChunkType.Uncompressed))
{
streams[i] = rawStream;
}
else
{
Stream compStream = chunk.Type switch
{
BlkxChunkType.AdcCompressed => new ADCStream(rawStream, CompressionMode.Decompress),
BlkxChunkType.ZlibCompressed => new ZlibStream(rawStream, CompressionMode.Decompress),
BlkxChunkType.Bz2Compressed => new BZip2Stream(rawStream, CompressionMode.Decompress, false),
_ => throw new InvalidOperationException("Invalid chunk type")
};
var memStream = new MemoryStream();
compStream.CopyTo(memStream);
compStream.Dispose();
memStream.Position = 0;
streams[i] = memStream;
}
rawStream.Dispose();
rawStreams[i] = null;
}
else
{
streams[i] = null;
}
});
return new CompositeStream((IEnumerable<Stream>)streams.Where(s => s is not null));
}
public override int Read(byte[] buffer, int offset, int count)
{
if (_isEnded) return 0;
int readCount = _chunkStream!.Read(buffer, offset, count);
_chunkPos += readCount;
while (readCount < count)
{
// Current chunk has ended, so we have to continue reading from the next chunk.
_chunkIndex = GetNextChunk();
if (_chunkIndex < 0)
{
// We have reached the last chunk
_isEnded = true;
_chunkPos = 0;
_position += readCount;
return readCount;
}
_chunkStream = GetChunkStream();
int rc = _chunkStream.Read(buffer, offset + readCount, count - readCount);
_chunkPos = rc;
readCount += rc;
}
_position += readCount;
return readCount;
}
public override void Flush()
{ }
public override long Seek(long offset, SeekOrigin origin)
{
switch (origin)
{
case SeekOrigin.Begin:
Position = offset;
break;
case SeekOrigin.Current:
Position += offset;
break;
case SeekOrigin.End:
Position = Length - offset;
break;
}
return Position;
}
public override void SetLength(long value)
=> throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count)
=> throw new NotSupportedException();
protected override void Dispose(bool disposing)
{ }
}
}

View File

@@ -1,52 +0,0 @@
using SharpCompress.Common.Dmg.HFS;
using System;
using System.Collections.Generic;
namespace SharpCompress.Common.Dmg
{
public abstract class DmgEntry : Entry
{
public override string Key { get; }
public override bool IsDirectory { get; }
public override long Size { get; }
public override long CompressedSize { get; }
public override CompressionType CompressionType { get; }
public override DateTime? LastModifiedTime { get; }
public override DateTime? CreatedTime { get; }
public override DateTime? LastAccessedTime { get; }
public override DateTime? ArchivedTime { get; }
public override long Crc { get; } = 0; // Not stored
public override string? LinkTarget { get; } = null;
public override bool IsEncrypted { get; } = false;
public override bool IsSplitAfter { get; } = false;
internal override IEnumerable<FilePart> Parts { get; }
internal DmgEntry(HFSCatalogRecord record, string path, long size, DmgFilePart part)
{
Key = path;
IsDirectory = record.Type == HFSCatalogRecordType.Folder;
Size = CompressedSize = size; // There is no way to get the actual compressed size or the compression type of
CompressionType = CompressionType.Unknown; // a file in a DMG archive since the files are nested inside the HFS partition.
Parts = part.AsEnumerable();
if (IsDirectory)
{
var folder = (HFSCatalogFolder)record;
LastModifiedTime = (folder.AttributeModDate > folder.ContentModDate) ? folder.AttributeModDate : folder.ContentModDate;
CreatedTime = folder.CreateDate;
LastAccessedTime = folder.AccessDate;
ArchivedTime = folder.BackupDate;
}
else
{
var file = (HFSCatalogFile)record;
LastModifiedTime = (file.AttributeModDate > file.ContentModDate) ? file.AttributeModDate : file.ContentModDate;
CreatedTime = file.CreateDate;
LastAccessedTime = file.AccessDate;
ArchivedTime = file.BackupDate;
}
}
}
}

View File

@@ -1,21 +0,0 @@
using System.IO;
namespace SharpCompress.Common.Dmg
{
internal sealed class DmgFilePart : FilePart
{
private readonly Stream _stream;
internal override string FilePartName { get; }
public DmgFilePart(Stream stream, string fileName)
: base(new ArchiveEncoding())
{
_stream = stream;
FilePartName = fileName;
}
internal override Stream GetCompressedStream() => _stream;
internal override Stream? GetRawStream() => null;
}
}

View File

@@ -1,183 +0,0 @@
using SharpCompress.Common.Dmg.Headers;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Text;
using System.Xml.Linq;
namespace SharpCompress.Common.Dmg
{
internal static class DmgUtil
{
private const string MalformedXmlMessage = "Malformed XML block";
private static T[] ParseArray<T>(in XElement parent, in Func<XElement, T> parseElement)
{
var list = new List<T>();
foreach (var node in parent.Elements())
list.Add(parseElement(node));
return list.ToArray();
}
private static Dictionary<string, T> ParseDict<T>(in XElement parent, in Func<XElement, T> parseValue)
{
var dict = new Dictionary<string, T>();
string? key = null;
foreach (var node in parent.Elements())
{
if (string.Equals(node.Name.LocalName, "key", StringComparison.Ordinal))
{
key = node.Value;
}
else if (key is not null)
{
var value = parseValue(node);
dict.Add(key, value);
key = null;
}
}
return dict;
}
private static Dictionary<string, Dictionary<string, Dictionary<string, string>[]>> ParsePList(in XDocument doc)
{
var dictNode = doc.Root?.Element("dict");
if (dictNode is null) throw new InvalidFormatException(MalformedXmlMessage);
static Dictionary<string, string> ParseObject(XElement parent)
=> ParseDict(parent, node => node.Value);
static Dictionary<string, string>[] ParseObjectArray(XElement parent)
=> ParseArray(parent, ParseObject);
static Dictionary<string, Dictionary<string, string>[]> ParseSubDict(XElement parent)
=> ParseDict(parent, ParseObjectArray);
return ParseDict(dictNode, ParseSubDict);
}
private static BlkxData CreateDataFromDict(in Dictionary<string, string> dict)
{
static bool TryParseHex(string? s, out uint value)
{
value = 0;
if (string.IsNullOrEmpty(s)) return false;
if (s!.StartsWith("0x", StringComparison.OrdinalIgnoreCase))
s = s.Substring(2);
return uint.TryParse(s, NumberStyles.HexNumber, CultureInfo.InvariantCulture, out value);
}
if (!dict.TryGetValue("ID", out string? idStr) || !int.TryParse(idStr, out int id))
throw new InvalidFormatException(MalformedXmlMessage);
if (!dict.TryGetValue("Name", out string? name))
throw new InvalidFormatException(MalformedXmlMessage);
if (!dict.TryGetValue("Attributes", out string? attribStr) || !TryParseHex(attribStr, out uint attribs))
throw new InvalidFormatException(MalformedXmlMessage);
if (!dict.TryGetValue("Data", out string? base64Data) || string.IsNullOrEmpty(base64Data))
throw new InvalidFormatException(MalformedXmlMessage);
try
{
var data = Convert.FromBase64String(base64Data);
if (!BlkxTable.TryRead(data, out var table))
throw new InvalidFormatException("Invalid BLKX table");
return new BlkxData(id, name, attribs, table!);
}
catch (FormatException ex)
{
throw new InvalidFormatException(MalformedXmlMessage, ex);
}
}
public static DmgBlockDataStream? LoadHFSPartitionStream(Stream baseStream, DmgHeader header)
{
if ((header.XMLOffset + header.XMLLength) >= (ulong)baseStream.Length)
throw new IncompleteArchiveException("XML block incomplete");
if ((header.DataForkOffset + header.DataForkLength) >= (ulong)baseStream.Length)
throw new IncompleteArchiveException("Data block incomplete");
baseStream.Position = (long)header.XMLOffset;
var xmlBuffer = new byte[header.XMLLength];
baseStream.Read(xmlBuffer, 0, (int)header.XMLLength);
var xml = Encoding.ASCII.GetString(xmlBuffer);
var doc = XDocument.Parse(xml);
var pList = ParsePList(doc);
if (!pList.TryGetValue("resource-fork", out var resDict) || !resDict.TryGetValue("blkx", out var blkxDicts))
throw new InvalidFormatException(MalformedXmlMessage);
var objs = new BlkxData[blkxDicts.Length];
for (int i = 0; i < objs.Length; i++)
objs[i] = CreateDataFromDict(blkxDicts[i]);
// Index 0 is the protective MBR partition
// Index 1 is the GPT header
// Index 2 is the GPT partition table
try
{
var headerData = objs[1];
using var headerStream = new DmgBlockDataStream(baseStream, header, headerData.Table);
if (!GptHeader.TryRead(headerStream, out var gptHeader))
throw new InvalidFormatException("Invalid GPT header");
var tableData = objs[2];
using var tableStream = new DmgBlockDataStream(baseStream, header, tableData.Table);
var gptTable = new GptPartitionEntry[gptHeader!.EntriesCount];
for (int i = 0; i < gptHeader.EntriesCount; i++)
gptTable[i] = GptPartitionEntry.Read(tableStream);
foreach (var entry in gptTable)
{
if (entry.TypeGuid == PartitionFormat.AppleHFS)
{
BlkxData? partitionData = null;
for (int i = 3; i < objs.Length; i++)
{
if (objs[i].Name.StartsWith(entry.Name, StringComparison.Ordinal))
{
partitionData = objs[i];
break;
}
}
if (partitionData is null)
throw new InvalidFormatException($"Missing partition {entry.Name}");
return new DmgBlockDataStream(baseStream, header, partitionData.Table);
}
}
return null;
}
catch (EndOfStreamException ex)
{
throw new IncompleteArchiveException("Partition incomplete", ex);
}
}
private sealed class BlkxData
{
public int Id { get; }
public string Name { get; }
public uint Attributes { get; }
public BlkxTable Table { get; }
public BlkxData(int id, string name, uint attributes, BlkxTable table)
{
Id = id;
Name = name;
Attributes = attributes;
Table = table;
}
}
}
}

View File

@@ -1,38 +0,0 @@
using SharpCompress.Archives.Dmg;
using SharpCompress.Common.Dmg.Headers;
using SharpCompress.Common.Dmg.HFS;
using System;
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Common.Dmg
{
public class DmgVolume : Volume
{
private readonly DmgArchive _archive;
private readonly string _fileName;
internal DmgHeader Header { get; }
public DmgVolume(DmgArchive archive, Stream stream, string fileName, Readers.ReaderOptions readerOptions)
: base(stream, readerOptions)
{
_archive = archive;
_fileName = fileName;
long pos = stream.Length - DmgHeader.HeaderSize;
if (pos < 0) throw new InvalidFormatException("Invalid DMG volume");
stream.Position = pos;
if (DmgHeader.TryRead(stream, out var header)) Header = header!;
else throw new InvalidFormatException("Invalid DMG volume");
}
internal IEnumerable<DmgArchiveEntry> LoadEntries()
{
var partitionStream = DmgUtil.LoadHFSPartitionStream(Stream, Header);
if (partitionStream is null) return Array.Empty<DmgArchiveEntry>();
else return HFSUtil.LoadEntriesFromPartition(partitionStream, _fileName, _archive);
}
}
}

View File

@@ -1,336 +0,0 @@
using System;
namespace SharpCompress.Common.Dmg.HFS
{
internal sealed class HFSCatalogKey : HFSStructBase, IEquatable<HFSCatalogKey>, IComparable<HFSCatalogKey>, IComparable
{
private readonly StringComparer _comparer;
public uint ParentId { get; }
public string Name { get; }
private static StringComparer GetComparer(HFSKeyCompareType compareType, bool isHFSX)
{
if (isHFSX)
{
return compareType switch
{
HFSKeyCompareType.CaseFolding => StringComparer.InvariantCultureIgnoreCase,
HFSKeyCompareType.BinaryCompare => StringComparer.Ordinal,
_ => StringComparer.InvariantCultureIgnoreCase
};
}
else
{
return StringComparer.InvariantCultureIgnoreCase;
}
}
public HFSCatalogKey(uint parentId, string name, HFSKeyCompareType compareType, bool isHFSX)
{
ParentId = parentId;
Name = name;
_comparer = GetComparer(compareType, isHFSX);
}
public HFSCatalogKey(byte[] key, HFSKeyCompareType compareType, bool isHFSX)
{
ReadOnlySpan<byte> data = key.AsSpan();
ParentId = ReadUInt32(ref data);
Name = ReadString(ref data, true);
_comparer = GetComparer(compareType, isHFSX);
}
public bool Equals(HFSCatalogKey? other)
{
if (other is null) return false;
else return (ParentId == other.ParentId) && _comparer.Equals(Name, other.Name);
}
public override bool Equals(object? obj)
{
if (obj is HFSCatalogKey other) return Equals(other);
else return false;
}
public int CompareTo(HFSCatalogKey? other)
{
if (other is null) return 1;
int result = ParentId.CompareTo(other.ParentId);
if (result == 0) result = _comparer.Compare(Name, other.Name);
return result;
}
public int CompareTo(object? obj)
{
if (obj is null) return 1;
else if (obj is HFSCatalogKey other) return CompareTo(other);
else throw new ArgumentException("Object is not of type CatalogKey", nameof(obj));
}
public override int GetHashCode()
=> ParentId.GetHashCode() ^ _comparer.GetHashCode(Name);
public static bool operator ==(HFSCatalogKey? left, HFSCatalogKey? right)
{
if (left is null) return right is null;
else return left.Equals(right);
}
public static bool operator !=(HFSCatalogKey? left, HFSCatalogKey? right)
{
if (left is null) return right is not null;
else return !left.Equals(right);
}
public static bool operator <(HFSCatalogKey? left, HFSCatalogKey? right)
{
if (left is null) return right is not null;
else return left.CompareTo(right) < 0;
}
public static bool operator >(HFSCatalogKey? left, HFSCatalogKey? right)
{
if (left is null) return false;
else return left.CompareTo(right) > 0;
}
public static bool operator <=(HFSCatalogKey? left, HFSCatalogKey? right)
{
if (left is null) return true;
else return left.CompareTo(right) <= 0;
}
public static bool operator >=(HFSCatalogKey? left, HFSCatalogKey? right)
{
if (left is null) return right is null;
else return left.CompareTo(right) >= 0;
}
}
internal enum HFSCatalogRecordType : ushort
{
Folder = 0x0001,
File = 0x0002,
FolderThread = 0x0003,
FileThread = 0x0004
}
internal abstract class HFSCatalogRecord : HFSStructBase
{
public HFSCatalogRecordType Type { get; }
protected HFSCatalogRecord(HFSCatalogRecordType type)
=> Type = type;
public static bool TryRead(ref ReadOnlySpan<byte> data, HFSKeyCompareType compareType, bool isHFSX, out HFSCatalogRecord? record)
{
record = null;
ushort rawType = ReadUInt16(ref data);
if (!Enum.IsDefined(typeof(HFSCatalogRecordType), rawType)) return false;
var type = (HFSCatalogRecordType)rawType;
switch (type)
{
case HFSCatalogRecordType.Folder:
record = HFSCatalogFolder.Read(ref data);
return true;
case HFSCatalogRecordType.File:
record = HFSCatalogFile.Read(ref data);
return true;
case HFSCatalogRecordType.FolderThread:
record = HFSCatalogThread.Read(ref data, false, compareType, isHFSX);
return true;
case HFSCatalogRecordType.FileThread:
record = HFSCatalogThread.Read(ref data, true, compareType, isHFSX);
return true;
}
return false;
}
}
internal sealed class HFSCatalogFolder : HFSCatalogRecord
{
public uint Valence { get; }
public uint FolderId { get; }
public DateTime CreateDate { get; }
public DateTime ContentModDate { get; }
public DateTime AttributeModDate { get; }
public DateTime AccessDate { get; }
public DateTime BackupDate { get; }
public HFSPermissions Permissions { get; }
public HFSFolderInfo Info { get; }
public uint TextEncoding { get; }
private HFSCatalogFolder(
uint valence,
uint folderId,
DateTime createDate,
DateTime contentModDate,
DateTime attributeModDate,
DateTime accessDate,
DateTime backupDate,
HFSPermissions permissions,
HFSFolderInfo info,
uint textEncoding)
: base(HFSCatalogRecordType.Folder)
{
Valence = valence;
FolderId = folderId;
CreateDate = createDate;
ContentModDate = contentModDate;
AttributeModDate = attributeModDate;
AccessDate = accessDate;
BackupDate = backupDate;
Permissions = permissions;
Info = info;
TextEncoding = textEncoding;
}
public static HFSCatalogFolder Read(ref ReadOnlySpan<byte> data)
{
_ = ReadUInt16(ref data); // reserved
uint valence = ReadUInt32(ref data);
uint folderId = ReadUInt32(ref data);
var createDate = ReadDate(ref data);
var contentModDate = ReadDate(ref data);
var attributeModDate = ReadDate(ref data);
var accessDate = ReadDate(ref data);
var backupDate = ReadDate(ref data);
var permissions = HFSPermissions.Read(ref data);
var info = HFSFolderInfo.Read(ref data);
uint textEncoding = ReadUInt32(ref data);
_ = ReadUInt32(ref data); // reserved
return new HFSCatalogFolder(
valence,
folderId,
createDate,
contentModDate,
attributeModDate,
accessDate,
backupDate,
permissions,
info,
textEncoding);
}
}
internal enum HFSFileFlags : ushort
{
LockedBit = 0x0000,
LockedMask = 0x0001,
ThreadExistsBit = 0x0001,
ThreadExistsMask = 0x0002
}
internal sealed class HFSCatalogFile : HFSCatalogRecord
{
public HFSFileFlags Flags { get; }
public uint FileId { get; }
public DateTime CreateDate { get; }
public DateTime ContentModDate { get; }
public DateTime AttributeModDate { get; }
public DateTime AccessDate { get; }
public DateTime BackupDate { get; }
public HFSPermissions Permissions { get; }
public HFSFileInfo Info { get; }
public uint TextEncoding { get; }
public HFSForkData DataFork { get; }
public HFSForkData ResourceFork { get; }
private HFSCatalogFile(
HFSFileFlags flags,
uint fileId,
DateTime createDate,
DateTime contentModDate,
DateTime attributeModDate,
DateTime accessDate,
DateTime backupDate,
HFSPermissions permissions,
HFSFileInfo info,
uint textEncoding,
HFSForkData dataFork,
HFSForkData resourceFork)
:base(HFSCatalogRecordType.File)
{
Flags = flags;
FileId = fileId;
CreateDate = createDate;
ContentModDate = contentModDate;
AttributeModDate = attributeModDate;
AccessDate = accessDate;
BackupDate = backupDate;
Permissions = permissions;
Info = info;
TextEncoding = textEncoding;
DataFork = dataFork;
ResourceFork = resourceFork;
}
public static HFSCatalogFile Read(ref ReadOnlySpan<byte> data)
{
var flags = (HFSFileFlags)ReadUInt16(ref data);
_ = ReadUInt32(ref data); // reserved
uint fileId = ReadUInt32(ref data);
var createDate = ReadDate(ref data);
var contentModDate = ReadDate(ref data);
var attributeModDate = ReadDate(ref data);
var accessDate = ReadDate(ref data);
var backupDate = ReadDate(ref data);
var permissions = HFSPermissions.Read(ref data);
var info = HFSFileInfo.Read(ref data);
uint textEncoding = ReadUInt32(ref data);
_ = ReadUInt32(ref data); // reserved
var dataFork = HFSForkData.Read(ref data);
var resourceFork = HFSForkData.Read(ref data);
return new HFSCatalogFile(
flags,
fileId,
createDate,
contentModDate,
attributeModDate,
accessDate,
backupDate,
permissions,
info,
textEncoding,
dataFork,
resourceFork);
}
}
internal sealed class HFSCatalogThread : HFSCatalogRecord
{
public uint ParentId { get; }
public string NodeName { get; }
public HFSCatalogKey CatalogKey { get; }
private HFSCatalogThread(uint parentId, string nodeName, bool isFile, HFSKeyCompareType compareType, bool isHFSX)
: base(isFile ? HFSCatalogRecordType.FileThread : HFSCatalogRecordType.FolderThread)
{
ParentId = parentId;
NodeName = nodeName;
CatalogKey = new HFSCatalogKey(ParentId, NodeName, compareType, isHFSX);
}
public static HFSCatalogThread Read(ref ReadOnlySpan<byte> data, bool isFile, HFSKeyCompareType compareType, bool isHFSX)
{
_ = ReadInt16(ref data); // reserved
uint parentId = ReadUInt32(ref data);
string nodeName = ReadString(ref data, true);
return new HFSCatalogThread(parentId, nodeName, isFile, compareType, isHFSX);
}
}
}

View File

@@ -1,31 +0,0 @@
using System;
using System.IO;
namespace SharpCompress.Common.Dmg.HFS
{
internal sealed class HFSExtentDescriptor : HFSStructBase
{
public uint StartBlock { get; }
public uint BlockCount { get; }
private HFSExtentDescriptor(uint startBlock, uint blockCount)
{
StartBlock = startBlock;
BlockCount = blockCount;
}
public static HFSExtentDescriptor Read(Stream stream)
{
return new HFSExtentDescriptor(
ReadUInt32(stream),
ReadUInt32(stream));
}
public static HFSExtentDescriptor Read(ref ReadOnlySpan<byte> data)
{
return new HFSExtentDescriptor(
ReadUInt32(ref data),
ReadUInt32(ref data));
}
}
}

View File

@@ -1,115 +0,0 @@
using System;
using System.Collections.Generic;
namespace SharpCompress.Common.Dmg.HFS
{
internal sealed class HFSExtentKey : HFSStructBase, IEquatable<HFSExtentKey>, IComparable<HFSExtentKey>, IComparable
{
public byte ForkType { get; }
public uint FileId { get; }
public uint StartBlock { get; }
public HFSExtentKey(byte forkType, uint fileId, uint startBlock)
{
ForkType = forkType;
FileId = fileId;
StartBlock = startBlock;
}
public HFSExtentKey(byte[] key)
{
ReadOnlySpan<byte> data = key.AsSpan();
ForkType = ReadUInt8(ref data);
_ = ReadUInt8(ref data); // padding
FileId = ReadUInt32(ref data);
StartBlock = ReadUInt32(ref data);
}
public bool Equals(HFSExtentKey? other)
{
if (other is null) return false;
else return (ForkType == other.ForkType) && (FileId == other.FileId) && (StartBlock == other.StartBlock);
}
public override bool Equals(object? obj)
{
if (obj is HFSExtentKey other) return Equals(other);
else return false;
}
public int CompareTo(HFSExtentKey? other)
{
if (other is null) return 1;
int result = FileId.CompareTo(other.FileId);
if (result == 0) result = ForkType.CompareTo(other.ForkType);
if (result == 0) result = StartBlock.CompareTo(other.StartBlock);
return result;
}
public int CompareTo(object? obj)
{
if (obj is null) return 1;
else if (obj is HFSExtentKey other) return CompareTo(other);
else throw new ArgumentException("Object is not of type ExtentKey", nameof(obj));
}
public override int GetHashCode()
=> ForkType.GetHashCode() ^ FileId.GetHashCode() ^ StartBlock.GetHashCode();
public static bool operator ==(HFSExtentKey? left, HFSExtentKey? right)
{
if (left is null) return right is null;
else return left.Equals(right);
}
public static bool operator !=(HFSExtentKey? left, HFSExtentKey? right)
{
if (left is null) return right is not null;
else return !left.Equals(right);
}
public static bool operator <(HFSExtentKey? left, HFSExtentKey? right)
{
if (left is null) return right is not null;
else return left.CompareTo(right) < 0;
}
public static bool operator >(HFSExtentKey? left, HFSExtentKey? right)
{
if (left is null) return false;
else return left.CompareTo(right) > 0;
}
public static bool operator <=(HFSExtentKey? left, HFSExtentKey? right)
{
if (left is null) return true;
else return left.CompareTo(right) <= 0;
}
public static bool operator >=(HFSExtentKey? left, HFSExtentKey? right)
{
if (left is null) return right is null;
else return left.CompareTo(right) >= 0;
}
}
internal sealed class HFSExtentRecord : HFSStructBase
{
private const int ExtentCount = 8;
public IReadOnlyList<HFSExtentDescriptor> Extents { get; }
private HFSExtentRecord(IReadOnlyList<HFSExtentDescriptor> extents)
=> Extents = extents;
public static HFSExtentRecord Read(ref ReadOnlySpan<byte> data)
{
var extents = new HFSExtentDescriptor[ExtentCount];
for (int i = 0; i < ExtentCount; i++)
extents[i] = HFSExtentDescriptor.Read(ref data);
return new HFSExtentRecord(extents);
}
}
}

View File

@@ -1,145 +0,0 @@
using System;
namespace SharpCompress.Common.Dmg.HFS
{
internal struct HFSPoint
{
public short V;
public short H;
}
internal struct HFSRect
{
public short Top;
public short Left;
public short Bottom;
public short Right;
}
[Flags]
internal enum HFSFinderFlags : ushort
{
None = 0x0000,
IsOnDesk = 0x0001, /* Files and folders (System 6) */
Color = 0x000E, /* Files and folders */
IsShared = 0x0040, /* Files only (Applications only) If */
/* clear, the application needs */
/* to write to its resource fork, */
/* and therefore cannot be shared */
/* on a server */
HasNoINITs = 0x0080, /* Files only (Extensions/Control */
/* Panels only) */
/* This file contains no INIT resource */
HasBeenInited = 0x0100, /* Files only. Clear if the file */
/* contains desktop database resources */
/* ('BNDL', 'FREF', 'open', 'kind'...) */
/* that have not been added yet. Set */
/* only by the Finder. */
/* Reserved for folders */
HasCustomIcon = 0x0400, /* Files and folders */
IsStationery = 0x0800, /* Files only */
NameLocked = 0x1000, /* Files and folders */
HasBundle = 0x2000, /* Files only */
IsInvisible = 0x4000, /* Files and folders */
IsAlias = 0x8000 /* Files only */
}
[Flags]
internal enum HFSExtendedFinderFlags : ushort
{
None = 0x0000,
ExtendedFlagsAreInvalid = 0x8000, /* The other extended flags */
/* should be ignored */
HasCustomBadge = 0x0100, /* The file or folder has a */
/* badge resource */
HasRoutingInfo = 0x0004 /* The file contains routing */
/* info resource */
}
internal sealed class HFSFileInfo : HFSStructBase
{
public string FileType { get; } /* The type of the file */
public string FileCreator { get; } /* The file's creator */
public HFSFinderFlags FinderFlags { get; }
public HFSPoint Location { get; } /* File's location in the folder. */
public HFSExtendedFinderFlags ExtendedFinderFlags { get; }
public int PutAwayFolderId { get; }
private HFSFileInfo(
string fileType,
string fileCreator,
HFSFinderFlags finderFlags,
HFSPoint location,
HFSExtendedFinderFlags extendedFinderFlags,
int putAwayFolderId)
{
FileType = fileType;
FileCreator = fileCreator;
FinderFlags = finderFlags;
Location = location;
ExtendedFinderFlags = extendedFinderFlags;
PutAwayFolderId = putAwayFolderId;
}
public static HFSFileInfo Read(ref ReadOnlySpan<byte> data)
{
string fileType = ReadOSType(ref data);
string fileCreator = ReadOSType(ref data);
var finderFlags = (HFSFinderFlags)ReadUInt16(ref data);
var location = ReadPoint(ref data);
_ = ReadUInt16(ref data); // reserved
data = data.Slice(4 * sizeof(short)); // reserved
var extendedFinderFlags = (HFSExtendedFinderFlags)ReadUInt16(ref data);
_ = ReadInt16(ref data); // reserved
int putAwayFolderId = ReadInt32(ref data);
return new HFSFileInfo(fileType, fileCreator, finderFlags, location, extendedFinderFlags, putAwayFolderId);
}
}
internal sealed class HFSFolderInfo : HFSStructBase
{
public HFSRect WindowBounds { get; } /* The position and dimension of the */
/* folder's window */
public HFSFinderFlags FinderFlags { get; }
public HFSPoint Location { get; } /* Folder's location in the parent */
/* folder. If set to {0, 0}, the Finder */
/* will place the item automatically */
public HFSPoint ScrollPosition { get; } /* Scroll position (for icon views) */
public HFSExtendedFinderFlags ExtendedFinderFlags { get; }
public int PutAwayFolderId { get; }
private HFSFolderInfo(
HFSRect windowBounds,
HFSFinderFlags finderFlags,
HFSPoint location,
HFSPoint scrollPosition,
HFSExtendedFinderFlags extendedFinderFlags,
int putAwayFolderId)
{
WindowBounds = windowBounds;
FinderFlags = finderFlags;
Location = location;
ScrollPosition = scrollPosition;
ExtendedFinderFlags = extendedFinderFlags;
PutAwayFolderId = putAwayFolderId;
}
public static HFSFolderInfo Read(ref ReadOnlySpan<byte> data)
{
var windowBounds = ReadRect(ref data);
var finderFlags = (HFSFinderFlags)ReadUInt16(ref data);
var location = ReadPoint(ref data);
_ = ReadUInt16(ref data); // reserved
var scrollPosition = ReadPoint(ref data);
_ = ReadInt32(ref data); // reserved
var extendedFinderFlags = (HFSExtendedFinderFlags)ReadUInt16(ref data);
_ = ReadInt16(ref data); // reserved
int putAwayFolderId = ReadInt32(ref data);
return new HFSFolderInfo(windowBounds, finderFlags, location, scrollPosition, extendedFinderFlags, putAwayFolderId);
}
}
}

View File

@@ -1,50 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Common.Dmg.HFS
{
internal sealed class HFSForkData : HFSStructBase
{
private const int ExtentCount = 8;
public ulong LogicalSize { get; }
public uint ClumpSize { get; }
public uint TotalBlocks { get; }
public IReadOnlyList<HFSExtentDescriptor> Extents { get; }
private HFSForkData(ulong logicalSize, uint clumpSize, uint totalBlocks, IReadOnlyList<HFSExtentDescriptor> extents)
{
LogicalSize = logicalSize;
ClumpSize = clumpSize;
TotalBlocks = totalBlocks;
Extents = extents;
}
public static HFSForkData Read(Stream stream)
{
ulong logicalSize = ReadUInt64(stream);
uint clumpSize = ReadUInt32(stream);
uint totalBlocks = ReadUInt32(stream);
var extents = new HFSExtentDescriptor[ExtentCount];
for (int i = 0; i < ExtentCount; i++)
extents[i] = HFSExtentDescriptor.Read(stream);
return new HFSForkData(logicalSize, clumpSize, totalBlocks, extents);
}
public static HFSForkData Read(ref ReadOnlySpan<byte> data)
{
ulong logicalSize = ReadUInt64(ref data);
uint clumpSize = ReadUInt32(ref data);
uint totalBlocks = ReadUInt32(ref data);
var extents = new HFSExtentDescriptor[ExtentCount];
for (int i = 0; i < ExtentCount; i++)
extents[i] = HFSExtentDescriptor.Read(ref data);
return new HFSForkData(logicalSize, clumpSize, totalBlocks, extents);
}
}
}

View File

@@ -1,196 +0,0 @@
using SharpCompress.IO;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
namespace SharpCompress.Common.Dmg.HFS
{
internal sealed class HFSForkStream : Stream
{
private readonly Stream _baseStream;
private readonly HFSVolumeHeader _volumeHeader;
private readonly IReadOnlyList<HFSExtentDescriptor> _extents;
private long _position;
private bool _isEnded;
private int _extentIndex;
private Stream? _extentStream;
public override bool CanRead => true;
public override bool CanWrite => false;
public override bool CanSeek => true;
public override long Length { get; }
public override long Position
{
get => _position;
set
{
if ((value < 0) || (value > Length)) throw new ArgumentOutOfRangeException(nameof(value));
if (value == Length)
{
// End of the stream
_position = Length;
_isEnded = true;
_extentIndex = -1;
_extentStream = null;
}
else if (value != _position)
{
_position = value;
// We first have to determine in which extent we are now, then we seek to the exact position in that extent.
long offsetInExtent = _position;
for (int i = 0; i < _extents.Count; i++)
{
var extent = _extents[i];
long extentSize = extent.BlockCount * _volumeHeader.BlockSize;
if (extentSize < offsetInExtent)
{
if (i == _extentIndex)
{
// We are in the same extent so just seek to the correct position
_extentStream!.Position = offsetInExtent;
}
else
{
_extentIndex = i;
_extentStream = GetExtentStream();
_extentStream.Position = offsetInExtent;
}
break;
}
else
{
offsetInExtent -= extentSize;
}
}
}
}
}
public HFSForkStream(Stream baseStream, HFSVolumeHeader volumeHeader, HFSForkData forkData)
{
_baseStream = baseStream;
_volumeHeader = volumeHeader;
_extents = forkData.Extents;
Length = (long)forkData.LogicalSize;
_position = 0;
_extentIndex = -1;
_extentIndex = GetNextExtent();
_isEnded = _extentIndex < 0;
if (!_isEnded) _extentStream = GetExtentStream();
}
public HFSForkStream(
Stream baseStream, HFSVolumeHeader volumeHeader, HFSForkData forkData, uint fileId,
IReadOnlyDictionary<HFSExtentKey, HFSExtentRecord> extents)
{
_baseStream = baseStream;
_volumeHeader = volumeHeader;
Length = (long)forkData.LogicalSize;
uint blocks = (uint)forkData.Extents.Sum(e => e.BlockCount);
var totalExtents = new List<HFSExtentDescriptor>(forkData.Extents);
_extents = totalExtents;
var nextKey = new HFSExtentKey(0, fileId, blocks);
while (extents.TryGetValue(nextKey, out var record))
{
blocks += (uint)record.Extents.Sum(e => e.BlockCount);
totalExtents.AddRange(record.Extents);
nextKey = new HFSExtentKey(0, fileId, blocks);
}
_position = 0;
_extentIndex = -1;
_extentIndex = GetNextExtent();
_isEnded = _extentIndex < 0;
if (!_isEnded) _extentStream = GetExtentStream();
}
private int GetNextExtent()
{
int index = _extentIndex + 1;
if (index >= _extents.Count) return -1;
var extent = _extents[index];
if ((extent.StartBlock == 0) && (extent.BlockCount == 0)) return -1;
return index;
}
private Stream GetExtentStream()
{
if (_extentIndex < 0)
throw new InvalidOperationException("Invalid extent index");
var extent = _extents[_extentIndex];
return new HFSExtentStream(_baseStream, _volumeHeader, extent);
}
public override void Flush()
{ }
public override int Read(byte[] buffer, int offset, int count)
{
if (_isEnded) return 0;
count = (int)Math.Min(count, Length - Position);
int readCount = _extentStream!.Read(buffer, offset, count);
while (readCount < count)
{
_extentIndex = GetNextExtent();
if (_extentIndex < 0)
{
_isEnded = true;
return readCount;
}
_extentStream = GetExtentStream();
readCount += _extentStream.Read(buffer, offset + readCount, count - readCount);
}
_position += readCount;
return readCount;
}
public override long Seek(long offset, SeekOrigin origin)
{
switch (origin)
{
case SeekOrigin.Begin:
Position = offset;
break;
case SeekOrigin.Current:
Position += offset;
break;
case SeekOrigin.End:
Position = Length - offset;
break;
}
return Position;
}
public override void SetLength(long value)
=> throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count)
=> throw new NotSupportedException();
private sealed class HFSExtentStream : SeekableSubStream
{
public HFSExtentStream(Stream stream, HFSVolumeHeader volumeHeader, HFSExtentDescriptor extent)
: base(stream, (long)extent.StartBlock * volumeHeader.BlockSize, (long)extent.BlockCount * volumeHeader.BlockSize)
{ }
}
}
}

View File

@@ -1,91 +0,0 @@
using System;
namespace SharpCompress.Common.Dmg.HFS
{
internal abstract class HFSKeyedRecord : HFSStructBase
{
private readonly HFSKeyCompareType _compareType;
private readonly bool _isHFSX;
private HFSCatalogKey? _catalogKey;
private HFSExtentKey? _extentKey;
public byte[] Key { get; }
public HFSCatalogKey GetCatalogKey() => _catalogKey ??= new HFSCatalogKey(Key, _compareType, _isHFSX);
public HFSExtentKey GetExtentKey() => _extentKey ??= new HFSExtentKey(Key);
protected HFSKeyedRecord(byte[] key, HFSKeyCompareType compareType, bool isHFSX)
{
Key = key;
_compareType = compareType;
_isHFSX = isHFSX;
}
}
internal sealed class HFSPointerRecord : HFSKeyedRecord
{
public uint NodeNumber { get; }
private HFSPointerRecord(byte[] key, uint nodeNumber, HFSKeyCompareType compareType, bool isHFSX)
: base(key, compareType, isHFSX)
{
NodeNumber = nodeNumber;
}
public static HFSPointerRecord Read(ref ReadOnlySpan<byte> data, HFSTreeHeaderRecord headerRecord, bool isHFSX)
{
bool isBigKey = headerRecord.Attributes.HasFlag(HFSTreeAttributes.BigKeys);
ushort keyLength = isBigKey ? ReadUInt16(ref data) : ReadUInt8(ref data);
if (!headerRecord.Attributes.HasFlag(HFSTreeAttributes.VariableIndexKeys)) keyLength = headerRecord.MaxKeyLength;
int keySize = (isBigKey ? 2 : 1) + keyLength;
var key = new byte[keyLength];
data.Slice(0, keyLength).CopyTo(key);
data = data.Slice(keyLength);
// data is always aligned to 2 bytes
if (keySize % 2 == 1) data = data.Slice(1);
uint nodeNumber = ReadUInt32(ref data);
return new HFSPointerRecord(key, nodeNumber, headerRecord.KeyCompareType, isHFSX);
}
}
internal sealed class HFSDataRecord : HFSKeyedRecord
{
public byte[] Data { get; }
private HFSDataRecord(byte[] key, byte[] data, HFSKeyCompareType compareType, bool isHFSX)
: base(key, compareType, isHFSX)
{
Data = data;
}
public static HFSDataRecord Read(ref ReadOnlySpan<byte> data, int size, HFSTreeHeaderRecord headerRecord, bool isHFSX)
{
bool isBigKey = headerRecord.Attributes.HasFlag(HFSTreeAttributes.BigKeys);
ushort keyLength = isBigKey ? ReadUInt16(ref data) : ReadUInt8(ref data);
int keySize = (isBigKey ? 2 : 1) + keyLength;
size -= keySize;
var key = new byte[keyLength];
data.Slice(0, keyLength).CopyTo(key);
data = data.Slice(keyLength);
// data is always aligned to 2 bytes
if (keySize % 2 == 1)
{
data = data.Slice(1);
size--;
}
var structData = new byte[size];
data.Slice(0, size).CopyTo(structData);
data = data.Slice(size);
return new HFSDataRecord(key, structData, headerRecord.KeyCompareType, isHFSX);
}
}
}

View File

@@ -1,35 +0,0 @@
using System;
namespace SharpCompress.Common.Dmg.HFS
{
internal sealed class HFSPermissions : HFSStructBase
{
public uint OwnerID { get; }
public uint GroupID { get; }
public byte AdminFlags { get; }
public byte OwnerFlags { get; }
public ushort FileMode { get; }
public uint Special { get; }
private HFSPermissions(uint ownerID, uint groupID, byte adminFlags, byte ownerFlags, ushort fileMode, uint special)
{
OwnerID = ownerID;
GroupID = groupID;
AdminFlags = adminFlags;
OwnerFlags = ownerFlags;
FileMode = fileMode;
Special = special;
}
public static HFSPermissions Read(ref ReadOnlySpan<byte> data)
{
return new HFSPermissions(
ReadUInt32(ref data),
ReadUInt32(ref data),
ReadUInt8(ref data),
ReadUInt8(ref data),
ReadUInt16(ref data),
ReadUInt32(ref data));
}
}
}

View File

@@ -1,187 +0,0 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Text;
namespace SharpCompress.Common.Dmg.HFS
{
internal abstract class HFSStructBase
{
private const int StringSize = 510;
private const int OSTypeSize = 4;
private static readonly DateTime Epoch = new DateTime(1904, 1, 1, 0, 0, 0, DateTimeKind.Utc);
private static readonly byte[] _buffer = new byte[StringSize];
protected static byte ReadUInt8(Stream stream)
{
if (stream.Read(_buffer, 0, sizeof(byte)) != sizeof(byte))
throw new EndOfStreamException();
return _buffer[0];
}
protected static ushort ReadUInt16(Stream stream)
{
if (stream.Read(_buffer, 0, sizeof(ushort)) != sizeof(ushort))
throw new EndOfStreamException();
return BinaryPrimitives.ReadUInt16BigEndian(_buffer);
}
protected static short ReadInt16(Stream stream)
{
if (stream.Read(_buffer, 0, sizeof(short)) != sizeof(short))
throw new EndOfStreamException();
return BinaryPrimitives.ReadInt16BigEndian(_buffer);
}
protected static uint ReadUInt32(Stream stream)
{
if (stream.Read(_buffer, 0, sizeof(uint)) != sizeof(uint))
throw new EndOfStreamException();
return BinaryPrimitives.ReadUInt32BigEndian(_buffer);
}
protected static int ReadInt32(Stream stream)
{
if (stream.Read(_buffer, 0, sizeof(int)) != sizeof(int))
throw new EndOfStreamException();
return BinaryPrimitives.ReadInt32BigEndian(_buffer);
}
protected static ulong ReadUInt64(Stream stream)
{
if (stream.Read(_buffer, 0, sizeof(ulong)) != sizeof(ulong))
throw new EndOfStreamException();
return BinaryPrimitives.ReadUInt64BigEndian(_buffer);
}
protected static long ReadInt64(Stream stream)
{
if (stream.Read(_buffer, 0, sizeof(long)) != sizeof(long))
throw new EndOfStreamException();
return BinaryPrimitives.ReadInt64BigEndian(_buffer);
}
protected static string ReadString(Stream stream)
{
ushort length = ReadUInt16(stream);
if (stream.Read(_buffer, 0, StringSize) != StringSize)
throw new EndOfStreamException();
return Encoding.Unicode.GetString(_buffer, 0, Math.Min(length * 2, StringSize));
}
protected static DateTime ReadDate(Stream stream)
{
uint seconds = ReadUInt32(stream);
var span = TimeSpan.FromSeconds(seconds);
return Epoch + span;
}
protected static byte ReadUInt8(ref ReadOnlySpan<byte> data)
{
byte val = data[0];
data = data.Slice(sizeof(byte));
return val;
}
protected static ushort ReadUInt16(ref ReadOnlySpan<byte> data)
{
ushort val = BinaryPrimitives.ReadUInt16BigEndian(data);
data = data.Slice(sizeof(ushort));
return val;
}
protected static short ReadInt16(ref ReadOnlySpan<byte> data)
{
short val = BinaryPrimitives.ReadInt16BigEndian(data);
data = data.Slice(sizeof(short));
return val;
}
protected static uint ReadUInt32(ref ReadOnlySpan<byte> data)
{
uint val = BinaryPrimitives.ReadUInt32BigEndian(data);
data = data.Slice(sizeof(uint));
return val;
}
protected static int ReadInt32(ref ReadOnlySpan<byte> data)
{
int val = BinaryPrimitives.ReadInt32BigEndian(data);
data = data.Slice(sizeof(int));
return val;
}
protected static ulong ReadUInt64(ref ReadOnlySpan<byte> data)
{
ulong val = BinaryPrimitives.ReadUInt64BigEndian(data);
data = data.Slice(sizeof(ulong));
return val;
}
protected static long ReadInt64(ref ReadOnlySpan<byte> data)
{
long val = BinaryPrimitives.ReadInt64BigEndian(data);
data = data.Slice(sizeof(long));
return val;
}
protected static string ReadString(ref ReadOnlySpan<byte> data, bool truncate)
{
int length = ReadUInt16(ref data);
if (truncate)
{
length = Math.Min(length * 2, StringSize);
data.Slice(0, length).CopyTo(_buffer);
data = data.Slice(length);
return Encoding.BigEndianUnicode.GetString(_buffer, 0, length);
}
else
{
data.Slice(0, StringSize).CopyTo(_buffer);
data = data.Slice(StringSize);
return Encoding.BigEndianUnicode.GetString(_buffer, 0, Math.Min(length * 2, StringSize));
}
}
protected static DateTime ReadDate(ref ReadOnlySpan<byte> data)
{
uint seconds = ReadUInt32(ref data);
var span = TimeSpan.FromSeconds(seconds);
return Epoch + span;
}
protected static string ReadOSType(ref ReadOnlySpan<byte> data)
{
data.Slice(0, OSTypeSize).CopyTo(_buffer);
data = data.Slice(OSTypeSize);
return Encoding.ASCII.GetString(_buffer, 0, OSTypeSize).NullTerminate();
}
protected static HFSPoint ReadPoint(ref ReadOnlySpan<byte> data)
{
return new HFSPoint()
{
V = ReadInt16(ref data),
H = ReadInt16(ref data)
};
}
protected static HFSRect ReadRect(ref ReadOnlySpan<byte> data)
{
return new HFSRect()
{
Top = ReadInt16(ref data),
Left = ReadInt16(ref data),
Bottom = ReadInt16(ref data),
Right = ReadInt16(ref data)
};
}
}
}

View File

@@ -1,108 +0,0 @@
using System;
using System.IO;
namespace SharpCompress.Common.Dmg.HFS
{
internal enum HFSTreeType : byte
{
HFS = 0, // control file
User = 128, // user btree type starts from 128
Reserved = 255
}
internal enum HFSKeyCompareType : byte
{
CaseFolding = 0xCF, // case-insensitive
BinaryCompare = 0xBC // case-sensitive
}
[Flags]
internal enum HFSTreeAttributes : uint
{
None = 0x00000000,
BadClose = 0x00000001,
BigKeys = 0x00000002,
VariableIndexKeys = 0x00000004
}
internal sealed class HFSTreeHeaderRecord : HFSStructBase
{
public ushort TreeDepth;
public uint RootNode;
public uint LeafRecords;
public uint FirstLeafNode;
public uint LastLeafNode;
public ushort NodeSize;
public ushort MaxKeyLength;
public uint TotalNodes;
public uint FreeNodes;
public uint ClumpSize;
public HFSTreeType TreeType;
public HFSKeyCompareType KeyCompareType;
public HFSTreeAttributes Attributes;
private HFSTreeHeaderRecord(
ushort treeDepth,
uint rootNode,
uint leafRecords,
uint firstLeafNode,
uint lastLeafNode,
ushort nodeSize,
ushort maxKeyLength,
uint totalNodes,
uint freeNodes,
uint clumpSize,
HFSTreeType treeType,
HFSKeyCompareType keyCompareType,
HFSTreeAttributes attributes)
{
TreeDepth = treeDepth;
RootNode = rootNode;
LeafRecords = leafRecords;
FirstLeafNode = firstLeafNode;
LastLeafNode = lastLeafNode;
NodeSize = nodeSize;
MaxKeyLength = maxKeyLength;
TotalNodes = totalNodes;
FreeNodes = freeNodes;
ClumpSize = clumpSize;
TreeType = treeType;
KeyCompareType = keyCompareType;
Attributes = attributes;
}
public static HFSTreeHeaderRecord Read(Stream stream)
{
ushort treeDepth = ReadUInt16(stream);
uint rootNode = ReadUInt32(stream);
uint leafRecords = ReadUInt32(stream);
uint firstLeafNode = ReadUInt32(stream);
uint lastLeafNode = ReadUInt32(stream);
ushort nodeSize = ReadUInt16(stream);
ushort maxKeyLength = ReadUInt16(stream);
uint totalNodes = ReadUInt32(stream);
uint freeNodes = ReadUInt32(stream);
_ = ReadUInt16(stream); // reserved
uint clumpSize = ReadUInt32(stream);
var treeType = (HFSTreeType)ReadUInt8(stream);
var keyCompareType = (HFSKeyCompareType)ReadUInt8(stream);
var attributes = (HFSTreeAttributes)ReadUInt32(stream);
for (int i = 0; i < 16; i++) _ = ReadUInt32(stream); // reserved
return new HFSTreeHeaderRecord(
treeDepth,
rootNode,
leafRecords,
firstLeafNode,
lastLeafNode,
nodeSize,
maxKeyLength,
totalNodes,
freeNodes,
clumpSize,
treeType,
keyCompareType,
attributes);
}
}
}

View File

@@ -1,167 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Common.Dmg.HFS
{
internal abstract class HFSTreeNode : HFSStructBase
{
private static byte[]? _buffer = null;
public HFSTreeNodeDescriptor Descriptor { get; }
protected HFSTreeNode(HFSTreeNodeDescriptor descriptor)
=> Descriptor = descriptor;
public static bool TryRead(Stream stream, HFSTreeHeaderRecord headerRecord, bool isHFSX, out HFSTreeNode? node)
{
node = null;
if (!HFSTreeNodeDescriptor.TryRead(stream, out var descriptor)) return false;
int size = (int)headerRecord.NodeSize - HFSTreeNodeDescriptor.Size;
if ((_buffer is null) || (_buffer.Length < size))
_buffer = new byte[size * 2];
if (stream.Read(_buffer, 0, size) != size)
throw new EndOfStreamException();
ReadOnlySpan<byte> data = _buffer.AsSpan(0, size);
switch (descriptor!.Kind)
{
case HFSTreeNodeKind.Leaf:
node = HFSLeafTreeNode.Read(descriptor, data, headerRecord, isHFSX);
return true;
case HFSTreeNodeKind.Index:
node = HFSIndexTreeNode.Read(descriptor, data, headerRecord, isHFSX);
return true;
case HFSTreeNodeKind.Map:
node = HFSMapTreeNode.Read(descriptor, data);
return true;
}
return false;
}
}
internal sealed class HFSHeaderTreeNode : HFSTreeNode
{
private const int UserDataSize = 128;
public HFSTreeHeaderRecord HeaderRecord { get; }
public IReadOnlyList<byte> UserData { get; }
public IReadOnlyList<byte> Map { get; }
private HFSHeaderTreeNode(
HFSTreeNodeDescriptor descriptor,
HFSTreeHeaderRecord headerRecord,
IReadOnlyList<byte> userData,
IReadOnlyList<byte> map)
: base(descriptor)
{
HeaderRecord = headerRecord;
UserData = userData;
Map = map;
}
public static HFSHeaderTreeNode Read(HFSTreeNodeDescriptor descriptor, Stream stream)
{
if (descriptor.Kind != HFSTreeNodeKind.Header)
throw new ArgumentException("Descriptor does not define a header node");
var headerRecord = HFSTreeHeaderRecord.Read(stream);
var userData = new byte[UserDataSize];
if (stream.Read(userData, 0, UserDataSize) != UserDataSize)
throw new EndOfStreamException();
int mapSize = (int)(headerRecord.NodeSize - 256);
var map = new byte[mapSize];
if (stream.Read(map, 0, mapSize) != mapSize)
throw new EndOfStreamException();
// offset values (not required for header node)
_ = ReadUInt16(stream);
_ = ReadUInt16(stream);
_ = ReadUInt16(stream);
_ = ReadUInt16(stream);
return new HFSHeaderTreeNode(descriptor, headerRecord, userData, map);
}
}
internal sealed class HFSMapTreeNode : HFSTreeNode
{
public IReadOnlyList<byte> Map { get; }
private HFSMapTreeNode(HFSTreeNodeDescriptor descriptor, IReadOnlyList<byte> map)
: base(descriptor)
{
Map = map;
}
public static HFSMapTreeNode Read(HFSTreeNodeDescriptor descriptor, ReadOnlySpan<byte> data)
{
int mapSize = data.Length - 6;
var map = new byte[mapSize];
data.Slice(0, mapSize).CopyTo(map);
return new HFSMapTreeNode(descriptor, map);
}
}
internal sealed class HFSIndexTreeNode : HFSTreeNode
{
public IReadOnlyList<HFSPointerRecord> Records { get; }
private HFSIndexTreeNode(HFSTreeNodeDescriptor descriptor, IReadOnlyList<HFSPointerRecord> records)
: base(descriptor)
{
Records = records;
}
public static HFSIndexTreeNode Read(HFSTreeNodeDescriptor descriptor, ReadOnlySpan<byte> data, HFSTreeHeaderRecord headerRecord, bool isHFSX)
{
int recordCount = descriptor.NumRecords;
var records = new HFSPointerRecord[recordCount];
for (int i = 0; i < recordCount; i++)
records[i] = HFSPointerRecord.Read(ref data, headerRecord, isHFSX);
return new HFSIndexTreeNode(descriptor, records);
}
}
internal sealed class HFSLeafTreeNode : HFSTreeNode
{
public IReadOnlyList<HFSDataRecord> Records { get; }
private HFSLeafTreeNode(HFSTreeNodeDescriptor descriptor, IReadOnlyList<HFSDataRecord> records)
: base(descriptor)
{
Records = records;
}
public static HFSLeafTreeNode Read(HFSTreeNodeDescriptor descriptor, ReadOnlySpan<byte> data, HFSTreeHeaderRecord headerRecord, bool isHFSX)
{
int recordCount = descriptor.NumRecords;
var recordOffsets = new int[recordCount + 1];
for (int i = 0; i < recordOffsets.Length; i++)
{
var offsetData = data.Slice(data.Length - (2 * i) - 2);
ushort offset = ReadUInt16(ref offsetData);
recordOffsets[i] = offset;
}
var records = new HFSDataRecord[recordCount];
for (int i = 0; i < recordCount; i++)
{
int size = recordOffsets[i + 1] - recordOffsets[i];
records[i] = HFSDataRecord.Read(ref data, size, headerRecord, isHFSX);
}
return new HFSLeafTreeNode(descriptor, records);
}
}
}

View File

@@ -1,55 +0,0 @@
using System;
using System.IO;
namespace SharpCompress.Common.Dmg.HFS
{
internal enum HFSTreeNodeKind : sbyte
{
Leaf = -1,
Index = 0,
Header = 1,
Map = 2
}
internal sealed class HFSTreeNodeDescriptor : HFSStructBase
{
public const int Size = 14;
public uint FLink { get; }
public uint BLink { get; }
public HFSTreeNodeKind Kind { get; }
public byte Height { get; }
public ushort NumRecords { get; }
private HFSTreeNodeDescriptor(uint fLink, uint bLink, HFSTreeNodeKind kind, byte height, ushort numRecords)
{
FLink = fLink;
BLink = bLink;
Kind = kind;
Height = height;
NumRecords = numRecords;
}
public static bool TryRead(Stream stream, out HFSTreeNodeDescriptor? descriptor)
{
descriptor = null;
uint fLink = ReadUInt32(stream);
uint bLink = ReadUInt32(stream);
sbyte rawKind = (sbyte)ReadUInt8(stream);
if (!Enum.IsDefined(typeof(HFSTreeNodeKind), rawKind)) return false;
var kind = (HFSTreeNodeKind)rawKind;
byte height = ReadUInt8(stream);
if (((kind == HFSTreeNodeKind.Header) || (kind == HFSTreeNodeKind.Map)) && (height != 0)) return false;
if ((kind == HFSTreeNodeKind.Leaf) && (height != 1)) return false;
ushort numRecords = ReadUInt16(stream);
_ = ReadUInt16(stream); // reserved
descriptor = new HFSTreeNodeDescriptor(fLink, bLink, kind, height, numRecords);
return true;
}
}
}

View File

@@ -1,206 +0,0 @@
using SharpCompress.Archives.Dmg;
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace SharpCompress.Common.Dmg.HFS
{
internal static class HFSUtil
{
private const string CorruptHFSMessage = "Corrupt HFS volume";
private static (HFSHeaderTreeNode, IReadOnlyList<HFSTreeNode>) ReadTree(Stream stream, bool isHFSX)
{
if (!HFSTreeNodeDescriptor.TryRead(stream, out var headerDesc))
throw new InvalidFormatException(CorruptHFSMessage);
var header = HFSHeaderTreeNode.Read(headerDesc!, stream);
var nodes = new HFSTreeNode[header.HeaderRecord.TotalNodes];
nodes[0] = header;
for (int i = 1; i < nodes.Length; i++)
{
if (!HFSTreeNode.TryRead(stream, header.HeaderRecord, isHFSX, out var node))
throw new InvalidFormatException(CorruptHFSMessage);
nodes[i] = node!;
}
return (header, nodes);
}
private static void EnumerateExtentsTree(
IReadOnlyList<HFSTreeNode> extentsTree,
IDictionary<HFSExtentKey, HFSExtentRecord> records,
int parentIndex)
{
var parent = extentsTree[parentIndex];
if (parent is HFSLeafTreeNode leafNode)
{
foreach (var record in leafNode.Records)
{
ReadOnlySpan<byte> data = record.Data.AsSpan();
var recordData = HFSExtentRecord.Read(ref data);
var key = record.GetExtentKey();
records.Add(key, recordData);
}
}
else if (parent is HFSIndexTreeNode indexNode)
{
foreach (var record in indexNode.Records)
EnumerateExtentsTree(extentsTree, records, (int)record.NodeNumber);
}
else
{
throw new InvalidFormatException(CorruptHFSMessage);
}
}
private static IReadOnlyDictionary<HFSExtentKey, HFSExtentRecord> LoadExtents(IReadOnlyList<HFSTreeNode> extentsTree, int rootIndex)
{
var records = new Dictionary<HFSExtentKey, HFSExtentRecord>();
if (rootIndex == 0) return records;
EnumerateExtentsTree(extentsTree, records, rootIndex);
return records;
}
private static void EnumerateCatalogTree(
HFSHeaderTreeNode catalogHeader,
IReadOnlyList<HFSTreeNode> catalogTree,
IDictionary<HFSCatalogKey, HFSCatalogRecord> records,
IDictionary<uint, HFSCatalogThread> threads,
int parentIndex,
bool isHFSX)
{
var parent = catalogTree[parentIndex];
if (parent is HFSLeafTreeNode leafNode)
{
foreach (var record in leafNode.Records)
{
ReadOnlySpan<byte> data = record.Data.AsSpan();
if (HFSCatalogRecord.TryRead(ref data, catalogHeader.HeaderRecord.KeyCompareType, isHFSX, out var recordData))
{
var key = record.GetCatalogKey();
if ((recordData!.Type == HFSCatalogRecordType.FileThread) || (recordData!.Type == HFSCatalogRecordType.FolderThread))
{
threads.Add(key.ParentId, (HFSCatalogThread)recordData);
}
else
{
records.Add(key, recordData);
}
}
else
{
throw new InvalidFormatException(CorruptHFSMessage);
}
}
}
else if (parent is HFSIndexTreeNode indexNode)
{
foreach (var record in indexNode.Records)
EnumerateCatalogTree(catalogHeader, catalogTree, records, threads, (int)record.NodeNumber, isHFSX);
}
else
{
throw new InvalidFormatException(CorruptHFSMessage);
}
}
private static (HFSCatalogKey, HFSCatalogRecord) GetRecord(uint id, IDictionary<HFSCatalogKey, HFSCatalogRecord> records, IDictionary<uint, HFSCatalogThread> threads)
{
if (threads.TryGetValue(id, out var thread))
{
if (records.TryGetValue(thread.CatalogKey, out var record))
return (thread.CatalogKey, record!);
}
throw new InvalidFormatException(CorruptHFSMessage);
}
private static string SanitizePath(string path)
{
var sb = new StringBuilder(path.Length);
foreach (char c in path)
{
if (!char.IsControl(c))
sb.Append(c);
}
return sb.ToString();
}
private static string GetPath(HFSCatalogKey key, IDictionary<HFSCatalogKey, HFSCatalogRecord> records, IDictionary<uint, HFSCatalogThread> threads)
{
if (key.ParentId == 1)
{
return key.Name;
}
else
{
var (parentKey, _) = GetRecord(key.ParentId, records, threads);
var path = Path.Combine(GetPath(parentKey, records, threads), key.Name);
return SanitizePath(path);
}
}
private static IEnumerable<DmgArchiveEntry> LoadEntriesFromCatalogTree(
Stream partitionStream,
DmgFilePart filePart,
HFSVolumeHeader volumeHeader,
HFSHeaderTreeNode catalogHeader,
IReadOnlyList<HFSTreeNode> catalogTree,
IReadOnlyDictionary<HFSExtentKey, HFSExtentRecord> extents,
DmgArchive archive,
int rootIndex)
{
if (rootIndex == 0) return Array.Empty<DmgArchiveEntry>();
var records = new Dictionary<HFSCatalogKey, HFSCatalogRecord>();
var threads = new Dictionary<uint, HFSCatalogThread>();
EnumerateCatalogTree(catalogHeader, catalogTree, records, threads, rootIndex, volumeHeader.IsHFSX);
var entries = new List<DmgArchiveEntry>();
foreach (var kvp in records)
{
var key = kvp.Key;
var record = kvp.Value;
string path = GetPath(key, records, threads);
var stream = (record is HFSCatalogFile file) ? new HFSForkStream(partitionStream, volumeHeader, file.DataFork, file.FileId, extents) : null;
var entry = new DmgArchiveEntry(stream, archive, record, path, filePart);
entries.Add(entry);
}
return entries;
}
public static IEnumerable<DmgArchiveEntry> LoadEntriesFromPartition(Stream partitionStream, string fileName, DmgArchive archive)
{
if (!HFSVolumeHeader.TryRead(partitionStream, out var volumeHeader))
throw new InvalidFormatException(CorruptHFSMessage);
var filePart = new DmgFilePart(partitionStream, fileName);
var extentsFile = volumeHeader!.ExtentsFile;
var extentsStream = new HFSForkStream(partitionStream, volumeHeader, extentsFile);
var (extentsHeader, extentsTree) = ReadTree(extentsStream, volumeHeader.IsHFSX);
var extents = LoadExtents(extentsTree, (int)extentsHeader.HeaderRecord.RootNode);
var catalogFile = volumeHeader!.CatalogFile;
var catalogStream = new HFSForkStream(partitionStream, volumeHeader, catalogFile);
var (catalogHeader, catalogTree) = ReadTree(catalogStream, volumeHeader.IsHFSX);
return LoadEntriesFromCatalogTree(
partitionStream,
filePart,
volumeHeader,
catalogHeader,
catalogTree,
extents,
archive,
(int)catalogHeader.HeaderRecord.RootNode);
}
}
}

View File

@@ -1,179 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Common.Dmg.HFS
{
internal sealed class HFSVolumeHeader : HFSStructBase
{
private const ushort SignaturePlus = 0x482B;
private const ushort SignatureX = 0x4858;
private const int FinderInfoCount = 8;
public bool IsHFSX { get; }
public ushort Version { get; }
public uint Attributes { get; }
public uint LastMountedVersion { get; }
public uint JournalInfoBlock { get; }
public DateTime CreateDate { get; }
public DateTime ModifyDate { get; }
public DateTime BackupDate { get; }
public DateTime CheckedDate { get; }
public uint FileCount { get; }
public uint FolderCount { get; }
public uint BlockSize { get; }
public uint TotalBlocks { get; }
public uint FreeBlocks { get; }
public uint NextAllocation { get; }
public uint RsrcClumpSize { get; }
public uint DataClumpSize { get; }
public uint NextCatalogID { get; }
public uint WriteCount { get; }
public ulong EncodingsBitmap { get; }
public IReadOnlyList<uint> FinderInfo { get; }
public HFSForkData AllocationFile { get; }
public HFSForkData ExtentsFile { get; }
public HFSForkData CatalogFile { get; }
public HFSForkData AttributesFile { get; }
public HFSForkData StartupFile { get; }
public HFSVolumeHeader(
bool isHFSX,
ushort version,
uint attributes,
uint lastMountedVersion,
uint journalInfoBlock,
DateTime createDate,
DateTime modifyDate,
DateTime backupDate,
DateTime checkedDate,
uint fileCount,
uint folderCount,
uint blockSize,
uint totalBlocks,
uint freeBlocks,
uint nextAllocation,
uint rsrcClumpSize,
uint dataClumpSize,
uint nextCatalogID,
uint writeCount,
ulong encodingsBitmap,
IReadOnlyList<uint> finderInfo,
HFSForkData allocationFile,
HFSForkData extentsFile,
HFSForkData catalogFile,
HFSForkData attributesFile,
HFSForkData startupFile)
{
IsHFSX = isHFSX;
Version = version;
Attributes = attributes;
LastMountedVersion = lastMountedVersion;
JournalInfoBlock = journalInfoBlock;
CreateDate = createDate;
ModifyDate = modifyDate;
BackupDate = backupDate;
CheckedDate = checkedDate;
FileCount = fileCount;
FolderCount = folderCount;
BlockSize = blockSize;
TotalBlocks = totalBlocks;
FreeBlocks = freeBlocks;
NextAllocation = nextAllocation;
RsrcClumpSize = rsrcClumpSize;
DataClumpSize = dataClumpSize;
NextCatalogID = nextCatalogID;
WriteCount = writeCount;
EncodingsBitmap = encodingsBitmap;
FinderInfo = finderInfo;
AllocationFile = allocationFile;
ExtentsFile = extentsFile;
CatalogFile = catalogFile;
AttributesFile = attributesFile;
StartupFile = startupFile;
}
private static IReadOnlyList<uint> ReadFinderInfo(Stream stream)
{
var finderInfo = new uint[FinderInfoCount];
for (int i = 0; i < FinderInfoCount; i++)
finderInfo[i] = ReadUInt32(stream);
return finderInfo;
}
public static bool TryRead(Stream stream, out HFSVolumeHeader? header)
{
header = null;
stream.Skip(1024); // reserved bytes
bool isHFSX;
ushort sig = ReadUInt16(stream);
if (sig == SignaturePlus) isHFSX = false;
else if (sig == SignatureX) isHFSX = true;
else return false;
ushort version = ReadUInt16(stream);
uint attributes = ReadUInt32(stream);
uint lastMountedVersion = ReadUInt32(stream);
uint journalInfoBlock = ReadUInt32(stream);
DateTime createDate = ReadDate(stream);
DateTime modifyDate = ReadDate(stream);
DateTime backupDate = ReadDate(stream);
DateTime checkedDate = ReadDate(stream);
uint fileCount = ReadUInt32(stream);
uint folderCount = ReadUInt32(stream);
uint blockSize = ReadUInt32(stream);
uint totalBlocks = ReadUInt32(stream);
uint freeBlocks = ReadUInt32(stream);
uint nextAllocation = ReadUInt32(stream);
uint rsrcClumpSize = ReadUInt32(stream);
uint dataClumpSize = ReadUInt32(stream);
uint nextCatalogID = ReadUInt32(stream);
uint writeCount = ReadUInt32(stream);
ulong encodingsBitmap = ReadUInt64(stream);
IReadOnlyList<uint> finderInfo = ReadFinderInfo(stream);
HFSForkData allocationFile = HFSForkData.Read(stream);
HFSForkData extentsFile = HFSForkData.Read(stream);
HFSForkData catalogFile = HFSForkData.Read(stream);
HFSForkData attributesFile = HFSForkData.Read(stream);
HFSForkData startupFile = HFSForkData.Read(stream);
header = new HFSVolumeHeader(
isHFSX,
version,
attributes,
lastMountedVersion,
journalInfoBlock,
createDate,
modifyDate,
backupDate,
checkedDate,
fileCount,
folderCount,
blockSize,
totalBlocks,
freeBlocks,
nextAllocation,
rsrcClumpSize,
dataClumpSize,
nextCatalogID,
writeCount,
encodingsBitmap,
finderInfo,
allocationFile,
extentsFile,
catalogFile,
attributesFile,
startupFile);
return true;
}
}
}

View File

@@ -1,49 +0,0 @@
using System;
namespace SharpCompress.Common.Dmg.Headers
{
internal enum BlkxChunkType : uint
{
Zero = 0x00000000u,
Uncompressed = 0x00000001u,
Ignore = 0x00000002u,
AdcCompressed = 0x80000004u,
ZlibCompressed = 0x80000005u,
Bz2Compressed = 0x80000006u,
Comment = 0x7FFFFFFEu,
Last = 0xFFFFFFFFu,
}
internal sealed class BlkxChunk : DmgStructBase
{
private const int SectorSize = 512;
public BlkxChunkType Type { get; } // Compression type used or chunk type
public uint Comment { get; } // "+beg" or "+end", if EntryType is comment (0x7FFFFFFE). Else reserved.
public ulong UncompressedOffset { get; } // Start sector of this chunk
public ulong UncompressedLength { get; } // Number of sectors in this chunk
public ulong CompressedOffset { get; } // Start of chunk in data fork
public ulong CompressedLength { get; } // Count of bytes of chunk, in data fork
private BlkxChunk(BlkxChunkType type, uint comment, ulong sectorNumber, ulong sectorCount, ulong compressedOffset, ulong compressedLength)
{
Type = type;
Comment = comment;
UncompressedOffset = sectorNumber * SectorSize;
UncompressedLength = sectorCount * SectorSize;
CompressedOffset = compressedOffset;
CompressedLength = compressedLength;
}
public static bool TryRead(ref ReadOnlySpan<byte> data, out BlkxChunk? chunk)
{
chunk = null;
var type = (BlkxChunkType)ReadUInt32(ref data);
if (!Enum.IsDefined(typeof(BlkxChunkType), type)) return false;
chunk = new BlkxChunk(type, ReadUInt32(ref data), ReadUInt64(ref data), ReadUInt64(ref data), ReadUInt64(ref data), ReadUInt64(ref data));
return true;
}
}
}

View File

@@ -1,75 +0,0 @@
using System;
using System.Collections.Generic;
namespace SharpCompress.Common.Dmg.Headers
{
internal sealed class BlkxTable : DmgStructBase
{
private const uint Signature = 0x6d697368u;
public uint Version { get; } // Current version is 1
public ulong SectorNumber { get; } // Starting disk sector in this blkx descriptor
public ulong SectorCount { get; } // Number of disk sectors in this blkx descriptor
public ulong DataOffset { get; }
public uint BuffersNeeded { get; }
public uint BlockDescriptors { get; } // Number of descriptors
public UdifChecksum Checksum { get; }
public IReadOnlyList<BlkxChunk> Chunks { get; }
private BlkxTable(
uint version,
ulong sectorNumber,
ulong sectorCount,
ulong dataOffset,
uint buffersNeeded,
uint blockDescriptors,
UdifChecksum checksum,
IReadOnlyList<BlkxChunk> chunks)
{
Version = version;
SectorNumber = sectorNumber;
SectorCount = sectorCount;
DataOffset = dataOffset;
BuffersNeeded = buffersNeeded;
BlockDescriptors = blockDescriptors;
Checksum = checksum;
Chunks = chunks;
}
public static bool TryRead(in byte[] buffer, out BlkxTable? header)
{
header = null;
ReadOnlySpan<byte> data = buffer.AsSpan();
uint sig = ReadUInt32(ref data);
if (sig != Signature) return false;
uint version = ReadUInt32(ref data);
ulong sectorNumber = ReadUInt64(ref data);
ulong sectorCount = ReadUInt64(ref data);
ulong dataOffset = ReadUInt64(ref data);
uint buffersNeeded = ReadUInt32(ref data);
uint blockDescriptors = ReadUInt32(ref data);
data = data.Slice(6 * sizeof(uint)); // reserved
var checksum = UdifChecksum.Read(ref data);
uint chunkCount = ReadUInt32(ref data);
var chunks = new BlkxChunk[chunkCount];
for (int i = 0; i < chunkCount; i++)
{
if (!BlkxChunk.TryRead(ref data, out var chunk)) return false;
chunks[i] = chunk!;
}
header = new BlkxTable(version, sectorNumber, sectorCount, dataOffset, buffersNeeded, blockDescriptors, checksum, chunks);
return true;
}
}
}

View File

@@ -1,138 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Common.Dmg.Headers
{
internal sealed class DmgHeader : DmgStructBase
{
public const int HeaderSize = 512;
private const uint Signature = 0x6B6F6C79u;
private const int UuidSize = 16; // 128 bit
public uint Version { get; } // Current version is 4
public uint Flags { get; } // Flags
public ulong RunningDataForkOffset { get; } //
public ulong DataForkOffset { get; } // Data fork offset (usually 0, beginning of file)
public ulong DataForkLength { get; } // Size of data fork (usually up to the XMLOffset, below)
public ulong RsrcForkOffset { get; } // Resource fork offset, if any
public ulong RsrcForkLength { get; } // Resource fork length, if any
public uint SegmentNumber { get; } // Usually 1, may be 0
public uint SegmentCount { get; } // Usually 1, may be 0
public IReadOnlyList<byte> SegmentID { get; } // 128-bit GUID identifier of segment (if SegmentNumber !=0)
public UdifChecksum DataChecksum { get; }
public ulong XMLOffset { get; } // Offset of property list in DMG, from beginning
public ulong XMLLength { get; } // Length of property list
public UdifChecksum Checksum { get; }
public uint ImageVariant { get; } // Commonly 1
public ulong SectorCount { get; } // Size of DMG when expanded, in sectors
private DmgHeader(
uint version,
uint flags,
ulong runningDataForkOffset,
ulong dataForkOffset,
ulong dataForkLength,
ulong rsrcForkOffset,
ulong rsrcForkLength,
uint segmentNumber,
uint segmentCount,
IReadOnlyList<byte> segmentID,
UdifChecksum dataChecksum,
ulong xMLOffset,
ulong xMLLength,
UdifChecksum checksum,
uint imageVariant,
ulong sectorCount)
{
Version = version;
Flags = flags;
RunningDataForkOffset = runningDataForkOffset;
DataForkOffset = dataForkOffset;
DataForkLength = dataForkLength;
RsrcForkOffset = rsrcForkOffset;
RsrcForkLength = rsrcForkLength;
SegmentNumber = segmentNumber;
SegmentCount = segmentCount;
SegmentID = segmentID;
DataChecksum = dataChecksum;
XMLOffset = xMLOffset;
XMLLength = xMLLength;
Checksum = checksum;
ImageVariant = imageVariant;
SectorCount = sectorCount;
}
private static void ReadUuid(ref ReadOnlySpan<byte> data, byte[] buffer)
{
data.Slice(0, UuidSize).CopyTo(buffer);
data = data.Slice(UuidSize);
}
internal static bool TryRead(Stream input, out DmgHeader? header)
{
header = null;
var buffer = new byte[HeaderSize];
int count = input.Read(buffer, 0, HeaderSize);
if (count != HeaderSize) return false;
ReadOnlySpan<byte> data = buffer.AsSpan();
uint sig = ReadUInt32(ref data);
if (sig != Signature) return false;
uint version = ReadUInt32(ref data);
uint size = ReadUInt32(ref data);
if (size != (uint)HeaderSize) return false;
uint flags = ReadUInt32(ref data);
ulong runningDataForkOffset = ReadUInt64(ref data);
ulong dataForkOffset = ReadUInt64(ref data);
ulong dataForkLength = ReadUInt64(ref data);
ulong rsrcForkOffset = ReadUInt64(ref data);
ulong rsrcForkLength = ReadUInt64(ref data);
uint segmentNumber = ReadUInt32(ref data);
uint segmentCount = ReadUInt32(ref data);
var segmentID = new byte[UuidSize];
ReadUuid(ref data, segmentID);
var dataChecksum = UdifChecksum.Read(ref data);
ulong xmlOffset = ReadUInt64(ref data);
ulong xmlLength = ReadUInt64(ref data);
data = data.Slice(120); // Reserved bytes
var checksum = UdifChecksum.Read(ref data);
uint imageVariant = ReadUInt32(ref data);
ulong sectorCount = ReadUInt64(ref data);
header = new DmgHeader(
version,
flags,
runningDataForkOffset,
dataForkOffset,
dataForkLength,
rsrcForkOffset,
rsrcForkLength,
segmentNumber,
segmentCount,
segmentID,
dataChecksum,
xmlOffset,
xmlLength,
checksum,
imageVariant,
sectorCount);
return true;
}
}
}

View File

@@ -1,22 +0,0 @@
using System;
using System.Buffers.Binary;
namespace SharpCompress.Common.Dmg.Headers
{
internal abstract class DmgStructBase
{
protected static uint ReadUInt32(ref ReadOnlySpan<byte> data)
{
uint val = BinaryPrimitives.ReadUInt32BigEndian(data);
data = data.Slice(sizeof(uint));
return val;
}
protected static ulong ReadUInt64(ref ReadOnlySpan<byte> data)
{
ulong val = BinaryPrimitives.ReadUInt64BigEndian(data);
data = data.Slice(sizeof(ulong));
return val;
}
}
}

View File

@@ -1,90 +0,0 @@
using System;
using System.Buffers.Binary;
using System.IO;
namespace SharpCompress.Common.Dmg.Headers
{
internal sealed class GptHeader : GptStructBase
{
private const int HeaderSize = 92;
private static readonly ulong Signature = BinaryPrimitives.ReadUInt64LittleEndian(new byte[] { 69, 70, 73, 32, 80, 65, 82, 84 });
public uint Revision { get; }
public uint Crc32Header { get; }
public ulong CurrentLba { get; }
public ulong BackupLba { get; }
public ulong FirstUsableLba { get; }
public ulong LastUsableLba { get; }
public Guid DiskGuid { get; }
public ulong EntriesStart { get; }
public uint EntriesCount { get; }
public uint EntriesSize { get; }
public uint Crc32Array { get; }
private GptHeader(
uint revision,
uint crc32Header,
ulong currentLba,
ulong backupLba,
ulong firstUsableLba,
ulong lastUsableLba,
Guid diskGuid,
ulong entriesStart,
uint entriesCount,
uint entriesSize,
uint crc32Array)
{
Revision = revision;
Crc32Header = crc32Header;
CurrentLba = currentLba;
BackupLba = backupLba;
FirstUsableLba = firstUsableLba;
LastUsableLba = lastUsableLba;
DiskGuid = diskGuid;
EntriesStart = entriesStart;
EntriesCount = entriesCount;
EntriesSize = entriesSize;
Crc32Array = crc32Array;
}
public static bool TryRead(Stream stream, out GptHeader? header)
{
header = null;
ulong sig = ReadUInt64(stream);
if (sig != Signature) return false;
uint revision = ReadUInt32(stream);
uint headerSize = ReadUInt32(stream);
if (headerSize != HeaderSize) return false;
uint crc32Header = ReadUInt32(stream);
_ = ReadUInt32(stream); // reserved
ulong currentLba = ReadUInt64(stream);
ulong backupLba = ReadUInt64(stream);
ulong firstUsableLba = ReadUInt64(stream);
ulong lastUsableLba = ReadUInt64(stream);
Guid diskGuid = ReadGuid(stream);
ulong entriesStart = ReadUInt64(stream);
uint entriesCount = ReadUInt32(stream);
uint entriesSize = ReadUInt32(stream);
uint crc32Array = ReadUInt32(stream);
header = new GptHeader(
revision,
crc32Header,
currentLba,
backupLba,
firstUsableLba,
lastUsableLba,
diskGuid,
entriesStart,
entriesCount,
entriesSize,
crc32Array);
return true;
}
}
}

View File

@@ -1,36 +0,0 @@
using System;
using System.IO;
namespace SharpCompress.Common.Dmg.Headers
{
internal sealed class GptPartitionEntry : GptStructBase
{
public Guid TypeGuid { get; }
public Guid Guid { get; }
public ulong FirstLba { get; }
public ulong LastLba { get; }
public ulong Attributes { get; }
public string Name { get; }
private GptPartitionEntry(Guid typeGuid, Guid guid, ulong firstLba, ulong lastLba, ulong attributes, string name)
{
TypeGuid = typeGuid;
Guid = guid;
FirstLba = firstLba;
LastLba = lastLba;
Attributes = attributes;
Name = name;
}
public static GptPartitionEntry Read(Stream stream)
{
return new GptPartitionEntry(
ReadGuid(stream),
ReadGuid(stream),
ReadUInt64(stream),
ReadUInt64(stream),
ReadUInt64(stream),
ReadString(stream, 72));
}
}
}

View File

@@ -1,56 +0,0 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Text;
namespace SharpCompress.Common.Dmg.Headers
{
internal abstract class GptStructBase
{
private static readonly byte[] _buffer = new byte[8];
protected static ushort ReadUInt16(Stream stream)
{
if (stream.Read(_buffer, 0, sizeof(ushort)) != sizeof(ushort))
throw new EndOfStreamException();
return BinaryPrimitives.ReadUInt16LittleEndian(_buffer);
}
protected static uint ReadUInt32(Stream stream)
{
if (stream.Read(_buffer, 0, sizeof(uint)) != sizeof(uint))
throw new EndOfStreamException();
return BinaryPrimitives.ReadUInt32LittleEndian(_buffer);
}
protected static ulong ReadUInt64(Stream stream)
{
if (stream.Read(_buffer, 0, sizeof(ulong)) != sizeof(ulong))
throw new EndOfStreamException();
return BinaryPrimitives.ReadUInt64LittleEndian(_buffer);
}
protected static Guid ReadGuid(Stream stream)
{
int a = (int)ReadUInt32(stream);
short b = (short)ReadUInt16(stream);
short c = (short)ReadUInt16(stream);
if (stream.Read(_buffer, 0, 8) != 8)
throw new EndOfStreamException();
return new Guid(a, b, c, _buffer);
}
protected static string ReadString(Stream stream, int byteSize)
{
var buffer = new byte[byteSize];
if (stream.Read(buffer, 0, byteSize) != byteSize)
throw new EndOfStreamException();
return Encoding.Unicode.GetString(buffer).NullTerminate();
}
}
}

View File

@@ -1,33 +0,0 @@
using System;
using System.Collections.Generic;
namespace SharpCompress.Common.Dmg.Headers
{
internal sealed class UdifChecksum : DmgStructBase
{
private const int MaxSize = 32; // * 4 to get byte size
public uint Type { get; }
public uint Size { get; } // in bits
public IReadOnlyList<uint> Bits { get; }
private UdifChecksum(uint type, uint size, IReadOnlyList<uint> bits)
{
Type = type;
Size = size;
Bits = bits;
}
public static UdifChecksum Read(ref ReadOnlySpan<byte> data)
{
uint type = ReadUInt32(ref data);
uint size = ReadUInt32(ref data);
var bits = new uint[MaxSize];
for (int i = 0; i < MaxSize; i++)
bits[i] = ReadUInt32(ref data);
return new UdifChecksum(type, size, bits);
}
}
}

View File

@@ -1,14 +0,0 @@
using System;
namespace SharpCompress.Common.Dmg
{
internal static class PartitionFormat
{
public static readonly Guid AppleHFS = new Guid("48465300-0000-11AA-AA11-00306543ECAC");
public static readonly Guid AppleUFS = new Guid("55465300-0000-11AA-AA11-00306543ECAC");
public static readonly Guid AppleBoot = new Guid("426F6F74-0000-11AA-AA11-00306543ECAC");
public static readonly Guid AppleRaid = new Guid("52414944-0000-11AA-AA11-00306543ECAC");
public static readonly Guid AppleRaidOffline = new Guid("52414944-5F4F-11AA-AA11-00306543ECAC");
public static readonly Guid AppleLabel = new Guid("4C616265-6C00-11AA-AA11-00306543ECAC");
}
}

View File

@@ -1,5 +1,6 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
namespace SharpCompress.Common
{
@@ -77,8 +78,9 @@ namespace SharpCompress.Common
internal bool IsSolid { get; set; }
internal virtual void Close()
internal virtual ValueTask CloseAsync()
{
return new ();
}
/// <summary>

View File

@@ -1,10 +1,13 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common
{
public class EntryStream : Stream
public class EntryStream : AsyncStream
{
private readonly IReader _reader;
private readonly Stream _stream;
@@ -20,25 +23,24 @@ namespace SharpCompress.Common
/// <summary>
/// When reading a stream from OpenEntryStream, the stream must be completed so use this to finish reading the entire entry.
/// </summary>
public void SkipEntry()
public async ValueTask SkipEntryAsync(CancellationToken cancellationToken = default)
{
this.Skip();
await this.SkipAsync(cancellationToken);
_completed = true;
}
protected override void Dispose(bool disposing)
public override async ValueTask DisposeAsync()
{
if (!(_completed || _reader.Cancelled))
{
SkipEntry();
await SkipEntryAsync();
}
if (_isDisposed)
{
return;
}
_isDisposed = true;
base.Dispose(disposing);
_stream.Dispose();
await _stream.DisposeAsync();
}
public override bool CanRead => true;
@@ -46,18 +48,13 @@ namespace SharpCompress.Common
public override bool CanSeek => false;
public override bool CanWrite => false;
public override void Flush()
{
}
public override long Length => _stream.Length;
public override long Position { get => throw new NotSupportedException(); set => throw new NotSupportedException(); }
public override int Read(byte[] buffer, int offset, int count)
public override async ValueTask<int> ReadAsync(Memory<byte> buffer, CancellationToken cancellationToken = default)
{
int read = _stream.Read(buffer, offset, count);
int read = await _stream.ReadAsync(buffer, cancellationToken);
if (read <= 0)
{
_completed = true;
@@ -65,14 +62,14 @@ namespace SharpCompress.Common
return read;
}
public override int ReadByte()
public override ValueTask WriteAsync(ReadOnlyMemory<byte> buffer, CancellationToken cancellationToken = default)
{
int value = _stream.ReadByte();
if (value == -1)
{
_completed = true;
}
return value;
throw new NotSupportedException();
}
public override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
throw new NotSupportedException();
}
public override long Seek(long offset, SeekOrigin origin)
@@ -84,10 +81,5 @@ namespace SharpCompress.Common
{
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException();
}
}
}

View File

@@ -1,5 +1,7 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common
{
@@ -8,10 +10,11 @@ namespace SharpCompress.Common
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteEntryToDirectory(IEntry entry,
public static async ValueTask WriteEntryToDirectoryAsync(IEntry entry,
string destinationDirectory,
ExtractionOptions? options,
Action<string, ExtractionOptions?> write)
Func<string, ExtractionOptions?, CancellationToken, ValueTask> write,
CancellationToken cancellationToken = default)
{
string destinationFileName;
string file = Path.GetFileName(entry.Key);
@@ -52,7 +55,7 @@ namespace SharpCompress.Common
{
throw new ExtractionException("Entry is trying to write a file outside of the destination directory.");
}
write(destinationFileName, options);
await write(destinationFileName, options, cancellationToken);
}
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
{
@@ -60,11 +63,12 @@ namespace SharpCompress.Common
}
}
public static void WriteEntryToFile(IEntry entry, string destinationFileName,
public static async ValueTask WriteEntryToFileAsync(IEntry entry, string destinationFileName,
ExtractionOptions? options,
Action<string, FileMode> openAndWrite)
Func<string, FileMode, CancellationToken, ValueTask> openAndWrite,
CancellationToken cancellationToken = default)
{
if (entry.LinkTarget != null)
if (entry.LinkTarget is not null)
{
if (options?.WriteSymbolicLink is null)
{
@@ -85,7 +89,7 @@ namespace SharpCompress.Common
fm = FileMode.CreateNew;
}
openAndWrite(destinationFileName, fm);
await openAndWrite(destinationFileName, fm, cancellationToken);
entry.PreserveExtractionOptions(destinationFileName, options);
}
}

View File

@@ -1,4 +1,6 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common
{
@@ -11,9 +13,9 @@ namespace SharpCompress.Common
internal ArchiveEncoding ArchiveEncoding { get; }
internal abstract string FilePartName { get; }
internal abstract string? FilePartName { get; }
internal abstract Stream GetCompressedStream();
internal abstract ValueTask<Stream> GetCompressedStreamAsync(CancellationToken cancellationToken);
internal abstract Stream? GetRawStream();
internal bool Skipped { get; set; }
}

View File

@@ -1,6 +1,8 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Runtime.CompilerServices;
using System.Threading;
namespace SharpCompress.Common.GZip
{
@@ -17,7 +19,7 @@ namespace SharpCompress.Common.GZip
public override long Crc => _filePart.Crc ?? 0;
public override string Key => _filePart.FilePartName;
public override string Key => _filePart.FilePartName ?? string.Empty;
public override string? LinkTarget => null;
@@ -41,9 +43,12 @@ namespace SharpCompress.Common.GZip
internal override IEnumerable<FilePart> Parts => _filePart.AsEnumerable<FilePart>();
internal static IEnumerable<GZipEntry> GetEntries(Stream stream, OptionsBase options)
internal static async IAsyncEnumerable<GZipEntry> GetEntries(Stream stream, OptionsBase options,
[EnumeratorCancellation] CancellationToken cancellationToken)
{
yield return new GZipEntry(new GZipFilePart(stream, options.ArchiveEncoding));
var part = new GZipFilePart(options.ArchiveEncoding);
await part.Initialize(stream, cancellationToken);
yield return new GZipEntry(part);
}
}
}

View File

@@ -1,7 +1,10 @@
using System;
using System.Buffers;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
@@ -11,34 +14,44 @@ namespace SharpCompress.Common.GZip
internal sealed class GZipFilePart : FilePart
{
private string? _name;
private readonly Stream _stream;
//init only
#nullable disable
private Stream _stream;
#nullable enable
internal GZipFilePart(Stream stream, ArchiveEncoding archiveEncoding)
internal GZipFilePart(ArchiveEncoding archiveEncoding)
: base(archiveEncoding)
{
}
internal async ValueTask Initialize(Stream stream, CancellationToken cancellationToken)
{
_stream = stream;
ReadAndValidateGzipHeader();
if (stream.CanSeek)
{
long position = stream.Position;
stream.Position = stream.Length - 8;
ReadTrailer();
await ReadTrailerAsync(cancellationToken);
stream.Position = position;
}
EntryStartPosition = stream.Position;
}
internal long EntryStartPosition { get; }
internal long EntryStartPosition { get; private set; }
internal DateTime? DateModified { get; private set; }
internal int? Crc { get; private set; }
internal int? UncompressedSize { get; private set; }
internal override string FilePartName => _name!;
internal override string? FilePartName => _name;
internal override Stream GetCompressedStream()
internal override async ValueTask<Stream> GetCompressedStreamAsync(CancellationToken cancellationToken)
{
return new DeflateStream(_stream, CompressionMode.Decompress, CompressionLevel.Default);
var stream = new GZipStream(_stream, CompressionMode.Decompress, CompressionLevel.Default);
await stream.ReadAsync(Array.Empty<byte>(), 0, 0, cancellationToken);
_name = stream.FileName;
DateModified = stream.LastModified;
return stream;
}
internal override Stream GetRawStream()
@@ -46,93 +59,12 @@ namespace SharpCompress.Common.GZip
return _stream;
}
private void ReadTrailer()
private async ValueTask ReadTrailerAsync(CancellationToken cancellationToken)
{
// Read and potentially verify the GZIP trailer: CRC32 and size mod 2^32
Span<byte> trailer = stackalloc byte[8];
int n = _stream.Read(trailer);
Crc = BinaryPrimitives.ReadInt32LittleEndian(trailer);
UncompressedSize = BinaryPrimitives.ReadInt32LittleEndian(trailer.Slice(4));
}
private void ReadAndValidateGzipHeader()
{
// read the header on the first read
Span<byte> header = stackalloc byte[10];
int n = _stream.Read(header);
// workitem 8501: handle edge case (decompress empty stream)
if (n == 0)
{
return;
}
if (n != 10)
{
throw new ZlibException("Not a valid GZIP stream.");
}
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
throw new ZlibException("Bad GZIP header.");
}
int timet = BinaryPrimitives.ReadInt32LittleEndian(header.Slice(4));
DateModified = TarHeader.EPOCH.AddSeconds(timet);
if ((header[3] & 0x04) == 0x04)
{
// read and discard extra field
n = _stream.Read(header.Slice(0, 2)); // 2-byte length field
short extraLength = (short)(header[0] + header[1] * 256);
byte[] extra = new byte[extraLength];
if (!_stream.ReadFully(extra))
{
throw new ZlibException("Unexpected end-of-file reading GZIP header.");
}
n = extraLength;
}
if ((header[3] & 0x08) == 0x08)
{
_name = ReadZeroTerminatedString(_stream);
}
if ((header[3] & 0x10) == 0x010)
{
ReadZeroTerminatedString(_stream);
}
if ((header[3] & 0x02) == 0x02)
{
_stream.ReadByte(); // CRC16, ignore
}
}
private string ReadZeroTerminatedString(Stream stream)
{
Span<byte> buf1 = stackalloc byte[1];
var list = new List<byte>();
bool done = false;
do
{
// workitem 7740
int n = stream.Read(buf1);
if (n != 1)
{
throw new ZlibException("Unexpected EOF reading GZIP header.");
}
if (buf1[0] == 0)
{
done = true;
}
else
{
list.Add(buf1[0]);
}
}
while (!done);
byte[] buffer = list.ToArray();
return ArchiveEncoding.Decode(buffer);
Crc = await _stream.ReadInt32(cancellationToken);
UncompressedSize = await _stream.ReadInt32(cancellationToken);
}
}
}

View File

@@ -2,7 +2,7 @@
namespace SharpCompress.Common
{
public interface IVolume : IDisposable
public interface IVolume : IAsyncDisposable
{
}
}

View File

@@ -1,6 +1,4 @@
using System;
namespace SharpCompress.Common
namespace SharpCompress.Common
{
public class IncompleteArchiveException : ArchiveException
{
@@ -8,10 +6,5 @@ namespace SharpCompress.Common
: base(message)
{
}
public IncompleteArchiveException(string message, Exception inner)
: base(message, inner)
{
}
}
}

View File

@@ -5,6 +5,8 @@ using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.LZMA.Utilites;
using SharpCompress.IO;
@@ -783,7 +785,7 @@ namespace SharpCompress.Common.SevenZip
}
}
private List<byte[]> ReadAndDecodePackedStreams(long baseOffset, IPasswordProvider pass)
private async ValueTask<List<byte[]>> ReadAndDecodePackedStreams(long baseOffset, IPasswordProvider pass, CancellationToken cancellationToken)
{
#if DEBUG
Log.WriteLine("-- ReadAndDecodePackedStreams --");
@@ -815,8 +817,8 @@ namespace SharpCompress.Common.SevenZip
dataStartPos += packSize;
}
var outStream = DecoderStreamHelper.CreateDecoderStream(_stream, oldDataStartPos, myPackSizes,
folder, pass);
var outStream = await DecoderStreamHelper.CreateDecoderStream(_stream, oldDataStartPos, myPackSizes,
folder, pass, cancellationToken);
int unpackSize = checked((int)folder.GetUnpackSize());
byte[] data = new byte[unpackSize];
@@ -845,7 +847,7 @@ namespace SharpCompress.Common.SevenZip
}
}
private void ReadHeader(ArchiveDatabase db, IPasswordProvider getTextPassword)
private async ValueTask ReadHeader(ArchiveDatabase db, IPasswordProvider getTextPassword, CancellationToken cancellationToken)
{
#if DEBUG
Log.WriteLine("-- ReadHeader --");
@@ -864,7 +866,7 @@ namespace SharpCompress.Common.SevenZip
List<byte[]> dataVector = null;
if (type == BlockType.AdditionalStreamsInfo)
{
dataVector = ReadAndDecodePackedStreams(db._startPositionAfterHeader, getTextPassword);
dataVector = await ReadAndDecodePackedStreams(db._startPositionAfterHeader, getTextPassword, cancellationToken);
type = ReadId();
}

View File

@@ -32,7 +32,7 @@ namespace SharpCompress.Common.SevenZip
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => FilePart.IsEncrypted;
public override bool IsEncrypted => false;
public override bool IsDirectory => FilePart.Header.IsDir;

View File

@@ -1,6 +1,8 @@
using System;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Common.SevenZip
@@ -35,11 +37,11 @@ namespace SharpCompress.Common.SevenZip
return null;
}
internal override Stream GetCompressedStream()
internal override async ValueTask<Stream> GetCompressedStreamAsync(CancellationToken cancellationToken)
{
if (!Header.HasStream)
{
return null!;
return Stream.Null;
}
var folderStream = _database.GetFolderStream(_stream, Folder!, _database.PasswordProvider);
@@ -52,7 +54,7 @@ namespace SharpCompress.Common.SevenZip
}
if (skipSize > 0)
{
folderStream.Skip(skipSize);
await folderStream.SkipAsync(skipSize, cancellationToken);
}
return new ReadOnlySubStream(folderStream, Header.Size);
}
@@ -102,7 +104,5 @@ namespace SharpCompress.Common.SevenZip
throw new NotImplementedException();
}
}
internal bool IsEncrypted => Folder!._coders.FindIndex(c => c._methodId._id == CMethodId.K_AES_ID) != -1;
}
}

View File

@@ -1,9 +1,12 @@
#nullable disable
using System;
using System.Buffers;
using System.Buffers.Binary;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Tar.Headers
{
@@ -32,48 +35,48 @@ namespace SharpCompress.Common.Tar.Headers
internal const int BLOCK_SIZE = 512;
internal void Write(Stream output)
internal async Task WriteAsync(Stream output)
{
byte[] buffer = new byte[BLOCK_SIZE];
using var buffer = MemoryPool<byte>.Shared.Rent(BLOCK_SIZE);
WriteOctalBytes(511, buffer, 100, 8); // file mode
WriteOctalBytes(0, buffer, 108, 8); // owner ID
WriteOctalBytes(0, buffer, 116, 8); // group ID
WriteOctalBytes(511, buffer.Memory.Span, 100, 8); // file mode
WriteOctalBytes(0, buffer.Memory.Span, 108, 8); // owner ID
WriteOctalBytes(0, buffer.Memory.Span, 116, 8); // group ID
//ArchiveEncoding.UTF8.GetBytes("magic").CopyTo(buffer, 257);
var nameByteCount = ArchiveEncoding.GetEncoding().GetByteCount(Name);
if (nameByteCount > 100)
{
// Set mock filename and filetype to indicate the next block is the actual name of the file
WriteStringBytes("././@LongLink", buffer, 0, 100);
buffer[156] = (byte)EntryType.LongName;
WriteOctalBytes(nameByteCount + 1, buffer, 124, 12);
WriteStringBytes("././@LongLink", buffer.Memory.Span, 0, 100);
buffer.Memory.Span[156] = (byte)EntryType.LongName;
WriteOctalBytes(nameByteCount + 1, buffer.Memory.Span, 124, 12);
}
else
{
WriteStringBytes(ArchiveEncoding.Encode(Name), buffer, 100);
WriteOctalBytes(Size, buffer, 124, 12);
WriteStringBytes(ArchiveEncoding.Encode(Name), buffer.Memory, 100);
WriteOctalBytes(Size, buffer.Memory.Span, 124, 12);
var time = (long)(LastModifiedTime.ToUniversalTime() - EPOCH).TotalSeconds;
WriteOctalBytes(time, buffer, 136, 12);
buffer[156] = (byte)EntryType;
WriteOctalBytes(time, buffer.Memory.Span, 136, 12);
buffer.Memory.Span[156] = (byte)EntryType;
if (Size >= 0x1FFFFFFFF)
{
Span<byte> bytes12 = stackalloc byte[12];
BinaryPrimitives.WriteInt64BigEndian(bytes12.Slice(4), Size);
bytes12[0] |= 0x80;
bytes12.CopyTo(buffer.AsSpan(124));
using var bytes12 = MemoryPool<byte>.Shared.Rent(12);
BinaryPrimitives.WriteInt64BigEndian(bytes12.Memory.Span.Slice(4), Size);
bytes12.Memory.Span[0] |= 0x80;
bytes12.Memory.CopyTo(buffer.Memory.Slice(124));
}
}
int crc = RecalculateChecksum(buffer);
WriteOctalBytes(crc, buffer, 148, 8);
int crc = RecalculateChecksum(buffer.Memory);
WriteOctalBytes(crc, buffer.Memory.Span, 148, 8);
output.Write(buffer, 0, buffer.Length);
await output.WriteAsync(buffer.Memory.Slice(0, BLOCK_SIZE));
if (nameByteCount > 100)
{
WriteLongFilenameHeader(output);
await WriteLongFilenameHeaderAsync(output);
// update to short name lower than 100 - [max bytes of one character].
// subtracting bytes is needed because preventing infinite loop(example code is here).
//
@@ -82,14 +85,14 @@ namespace SharpCompress.Common.Tar.Headers
//
// and then infinite recursion is occured in WriteLongFilenameHeader because truncated.Length is 102.
Name = ArchiveEncoding.Decode(ArchiveEncoding.Encode(Name), 0, 100 - ArchiveEncoding.GetEncoding().GetMaxByteCount(1));
Write(output);
await WriteAsync(output);
}
}
private void WriteLongFilenameHeader(Stream output)
private async Task WriteLongFilenameHeaderAsync(Stream output)
{
byte[] nameBytes = ArchiveEncoding.Encode(Name);
output.Write(nameBytes, 0, nameBytes.Length);
await output.WriteAsync(nameBytes.AsMemory());
// pad to multiple of BlockSize bytes, and make sure a terminating null is added
int numPaddingBytes = BLOCK_SIZE - (nameBytes.Length % BLOCK_SIZE);
@@ -97,48 +100,56 @@ namespace SharpCompress.Common.Tar.Headers
{
numPaddingBytes = BLOCK_SIZE;
}
output.Write(stackalloc byte[numPaddingBytes]);
using var padding = MemoryPool<byte>.Shared.Rent(numPaddingBytes);
padding.Memory.Span.Clear();
await output.WriteAsync(padding.Memory.Slice(0, numPaddingBytes));
}
internal bool Read(BinaryReader reader)
internal async ValueTask<bool> Read(Stream stream, CancellationToken cancellationToken)
{
var buffer = ReadBlock(reader);
if (buffer.Length == 0)
var block = MemoryPool<byte>.Shared.Rent(BLOCK_SIZE);
bool readFullyAsync = await stream.ReadAsync(block.Memory.Slice(0, BLOCK_SIZE), cancellationToken) == BLOCK_SIZE;
if (readFullyAsync is false)
{
return false;
}
// for symlinks, additionally read the linkname
if (ReadEntryType(buffer) == EntryType.SymLink)
if (ReadEntryType(block.Memory.Span) == EntryType.SymLink)
{
LinkName = ArchiveEncoding.Decode(buffer, 157, 100).TrimNulls();
LinkName = ArchiveEncoding.Decode(block.Memory.Span.Slice(157, 100)).TrimNulls();
}
if (ReadEntryType(buffer) == EntryType.LongName)
if (ReadEntryType(block.Memory.Span) == EntryType.LongName)
{
Name = ReadLongName(reader, buffer);
buffer = ReadBlock(reader);
Name = await ReadLongName(stream, block.Memory.Slice(0,BLOCK_SIZE), cancellationToken);
readFullyAsync = await stream.ReadAsync(block.Memory.Slice(0, BLOCK_SIZE), cancellationToken) == BLOCK_SIZE;
if (readFullyAsync is false)
{
return false;
}
}
else
{
Name = ArchiveEncoding.Decode(buffer, 0, 100).TrimNulls();
Name = ArchiveEncoding.Decode(block.Memory.Span.Slice( 0, 100)).TrimNulls();
}
EntryType = ReadEntryType(buffer);
Size = ReadSize(buffer);
EntryType = ReadEntryType(block.Memory.Span);
Size = ReadSize(block.Memory.Slice(0, BLOCK_SIZE));
//Mode = ReadASCIIInt32Base8(buffer, 100, 7);
//UserId = ReadASCIIInt32Base8(buffer, 108, 7);
//GroupId = ReadASCIIInt32Base8(buffer, 116, 7);
long unixTimeStamp = ReadAsciiInt64Base8(buffer, 136, 11);
long unixTimeStamp = ReadAsciiInt64Base8(block.Memory.Span.Slice(136, 11));
LastModifiedTime = EPOCH.AddSeconds(unixTimeStamp).ToLocalTime();
Magic = ArchiveEncoding.Decode(buffer, 257, 6).TrimNulls();
Magic = ArchiveEncoding.Decode(block.Memory.Span.Slice( 257, 6)).TrimNulls();
if (!string.IsNullOrEmpty(Magic)
&& "ustar".Equals(Magic))
{
string namePrefix = ArchiveEncoding.Decode(buffer, 345, 157);
string namePrefix = ArchiveEncoding.Decode(block.Memory.Span.Slice( 345, 157));
namePrefix = namePrefix.TrimNulls();
if (!string.IsNullOrEmpty(namePrefix))
{
@@ -153,55 +164,46 @@ namespace SharpCompress.Common.Tar.Headers
return true;
}
private string ReadLongName(BinaryReader reader, byte[] buffer)
private async ValueTask<string> ReadLongName(Stream reader, ReadOnlyMemory<byte> buffer, CancellationToken cancellationToken)
{
var size = ReadSize(buffer);
var nameLength = (int)size;
var nameBytes = reader.ReadBytes(nameLength);
using var rented = MemoryPool<byte>.Shared.Rent(nameLength);
var nameBytes = rented.Memory.Slice(0, nameLength);
await reader.ReadAsync(nameBytes, cancellationToken);
var remainingBytesToRead = BLOCK_SIZE - (nameLength % BLOCK_SIZE);
// Read the rest of the block and discard the data
if (remainingBytesToRead < BLOCK_SIZE)
{
reader.ReadBytes(remainingBytesToRead);
using var remaining = MemoryPool<byte>.Shared.Rent(remainingBytesToRead);
await reader.ReadAsync(remaining.Memory.Slice(0, remainingBytesToRead), cancellationToken);
}
return ArchiveEncoding.Decode(nameBytes, 0, nameBytes.Length).TrimNulls();
return ArchiveEncoding.Decode(nameBytes.Span).TrimNulls();
}
private static EntryType ReadEntryType(byte[] buffer)
private static EntryType ReadEntryType(Span<byte> buffer)
{
return (EntryType)buffer[156];
}
private long ReadSize(byte[] buffer)
private long ReadSize(ReadOnlyMemory<byte> buffer)
{
if ((buffer[124] & 0x80) == 0x80) // if size in binary
if ((buffer.Span[124] & 0x80) == 0x80) // if size in binary
{
return BinaryPrimitives.ReadInt64BigEndian(buffer.AsSpan(0x80));
return BinaryPrimitives.ReadInt64BigEndian(buffer.Span.Slice(0x80));
}
return ReadAsciiInt64Base8(buffer, 124, 11);
return ReadAsciiInt64Base8(buffer.Span.Slice(124, 11));
}
private static byte[] ReadBlock(BinaryReader reader)
private static void WriteStringBytes(ReadOnlySpan<byte> name, Memory<byte> buffer, int length)
{
byte[] buffer = reader.ReadBytes(BLOCK_SIZE);
if (buffer.Length != 0 && buffer.Length < BLOCK_SIZE)
{
throw new InvalidOperationException("Buffer is invalid size");
}
return buffer;
}
private static void WriteStringBytes(ReadOnlySpan<byte> name, Span<byte> buffer, int length)
{
name.CopyTo(buffer);
name.CopyTo(buffer.Span.Slice(0));
int i = Math.Min(length, name.Length);
buffer.Slice(i, length - i).Clear();
buffer.Slice(i, length - i).Span.Clear();
}
private static void WriteStringBytes(string name, byte[] buffer, int offset, int length)
private static void WriteStringBytes(string name, Span<byte> buffer, int offset, int length)
{
int i;
@@ -216,7 +218,7 @@ namespace SharpCompress.Common.Tar.Headers
}
}
private static void WriteOctalBytes(long value, byte[] buffer, int offset, int length)
private static void WriteOctalBytes(long value, Span<byte> buffer, int offset, int length)
{
string val = Convert.ToString(value, 8);
int shift = length - val.Length - 1;
@@ -230,19 +232,9 @@ namespace SharpCompress.Common.Tar.Headers
}
}
private static int ReadAsciiInt32Base8(byte[] buffer, int offset, int count)
private static long ReadAsciiInt64Base8(ReadOnlySpan<byte> buffer)
{
string s = Encoding.UTF8.GetString(buffer, offset, count).TrimNulls();
if (string.IsNullOrEmpty(s))
{
return 0;
}
return Convert.ToInt32(s, 8);
}
private static long ReadAsciiInt64Base8(byte[] buffer, int offset, int count)
{
string s = Encoding.UTF8.GetString(buffer, offset, count).TrimNulls();
string s = Encoding.UTF8.GetString(buffer).TrimNulls();
if (string.IsNullOrEmpty(s))
{
return 0;
@@ -266,38 +258,20 @@ namespace SharpCompress.Common.Tar.Headers
(byte)' ', (byte)' ', (byte)' ', (byte)' '
};
internal static int RecalculateChecksum(byte[] buf)
private static int RecalculateChecksum(Memory<byte> buf)
{
// Set default value for checksum. That is 8 spaces.
eightSpaces.CopyTo(buf, 148);
eightSpaces.CopyTo(buf.Slice(148));
// Calculate checksum
int headerChecksum = 0;
foreach (byte b in buf)
foreach (byte b in buf.Span)
{
headerChecksum += b;
}
return headerChecksum;
}
internal static int RecalculateAltChecksum(byte[] buf)
{
eightSpaces.CopyTo(buf, 148);
int headerChecksum = 0;
foreach (byte b in buf)
{
if ((b & 0x80) == 0x80)
{
headerChecksum -= b ^ 0x80;
}
else
{
headerChecksum += b;
}
}
return headerChecksum;
}
public long? DataStartPosition { get; set; }
public string Magic { get; set; }

View File

@@ -3,6 +3,8 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Runtime.CompilerServices;
using System.Threading;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
@@ -46,10 +48,11 @@ namespace SharpCompress.Common.Tar
internal override IEnumerable<FilePart> Parts => _filePart.AsEnumerable<FilePart>();
internal static IEnumerable<TarEntry> GetEntries(StreamingMode mode, Stream stream,
CompressionType compressionType, ArchiveEncoding archiveEncoding)
internal static async IAsyncEnumerable<TarEntry> GetEntries(StreamingMode mode, Stream stream,
CompressionType compressionType, ArchiveEncoding archiveEncoding,
[EnumeratorCancellation]CancellationToken cancellationToken)
{
foreach (TarHeader h in TarHeaderFactory.ReadHeader(mode, stream, archiveEncoding))
await foreach (TarHeader h in TarHeaderFactory.ReadHeader(mode, stream, archiveEncoding, cancellationToken))
{
if (h != null)
{

View File

@@ -1,6 +1,7 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
namespace SharpCompress.Common.Tar
{
@@ -19,14 +20,14 @@ namespace SharpCompress.Common.Tar
internal override string FilePartName => Header.Name;
internal override Stream GetCompressedStream()
internal override ValueTask<Stream> GetCompressedStreamAsync(CancellationToken cancellationToken)
{
if (_seekableStream != null)
{
_seekableStream.Position = Header.DataStartPosition!.Value;
return new TarReadOnlySubStream(_seekableStream, Header.Size);
return new(new TarReadOnlySubStream(_seekableStream, Header.Size));
}
return Header.PackedStream;
return new(Header.PackedStream);
}
internal override Stream? GetRawStream()

View File

@@ -1,5 +1,7 @@
using System.Collections.Generic;
using System.IO;
using System.Runtime.CompilerServices;
using System.Threading;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
@@ -7,17 +9,17 @@ namespace SharpCompress.Common.Tar
{
internal static class TarHeaderFactory
{
internal static IEnumerable<TarHeader?> ReadHeader(StreamingMode mode, Stream stream, ArchiveEncoding archiveEncoding)
internal static async IAsyncEnumerable<TarHeader?> ReadHeader(StreamingMode mode, Stream stream, ArchiveEncoding archiveEncoding,
[EnumeratorCancellation]CancellationToken cancellationToken)
{
while (true)
{
TarHeader? header = null;
try
{
BinaryReader reader = new BinaryReader(stream);
header = new TarHeader(archiveEncoding);
if (!header.Read(reader))
if (!await header.Read(stream, cancellationToken))
{
yield break;
}
@@ -25,10 +27,10 @@ namespace SharpCompress.Common.Tar
{
case StreamingMode.Seekable:
{
header.DataStartPosition = reader.BaseStream.Position;
header.DataStartPosition = stream.Position;
//skip to nearest 512
reader.BaseStream.Position += PadTo512(header.Size);
stream.Position += PadTo512(header.Size);
}
break;
case StreamingMode.Streaming:

View File

@@ -1,6 +1,8 @@
using SharpCompress.IO;
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Tar
{
@@ -14,7 +16,7 @@ namespace SharpCompress.Common.Tar
BytesLeftToRead = bytesToRead;
}
protected override void Dispose(bool disposing)
public override async ValueTask DisposeAsync()
{
if (_isDisposed)
{
@@ -23,22 +25,17 @@ namespace SharpCompress.Common.Tar
_isDisposed = true;
if (disposing)
// Ensure we read all remaining blocks for this entry.
await Stream.SkipAsync(BytesLeftToRead);
_amountRead += BytesLeftToRead;
// If the last block wasn't a full 512 bytes, skip the remaining padding bytes.
var bytesInLastBlock = _amountRead % 512;
if (bytesInLastBlock != 0)
{
// Ensure we read all remaining blocks for this entry.
Stream.Skip(BytesLeftToRead);
_amountRead += BytesLeftToRead;
// If the last block wasn't a full 512 bytes, skip the remaining padding bytes.
var bytesInLastBlock = _amountRead % 512;
if (bytesInLastBlock != 0)
{
Stream.Skip(512 - bytesInLastBlock);
}
await Stream.SkipAsync(512 - bytesInLastBlock);
}
base.Dispose(disposing);
}
private long BytesLeftToRead { get; set; }
@@ -49,22 +46,18 @@ namespace SharpCompress.Common.Tar
public override bool CanWrite => false;
public override void Flush()
{
throw new NotSupportedException();
}
public override long Length => throw new NotSupportedException();
public override long Position { get => throw new NotSupportedException(); set => throw new NotSupportedException(); }
public override int Read(byte[] buffer, int offset, int count)
public override async ValueTask<int> ReadAsync(Memory<byte> buffer, CancellationToken cancellationToken = default)
{
if (BytesLeftToRead < count)
var count = buffer.Length;
if (BytesLeftToRead < buffer.Length)
{
count = (int)BytesLeftToRead;
}
int read = Stream.Read(buffer, offset, count);
int read = await Stream.ReadAsync(buffer.Slice(0, count), cancellationToken);
if (read > 0)
{
BytesLeftToRead -= read;
@@ -73,20 +66,9 @@ namespace SharpCompress.Common.Tar
return read;
}
public override int ReadByte()
public override Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
if (BytesLeftToRead <= 0)
{
return -1;
}
int value = Stream.ReadByte();
if (value != -1)
{
--BytesLeftToRead;
++_amountRead;
}
return value;
throw new NotSupportedException();
}
public override long Seek(long offset, SeekOrigin origin)
@@ -98,10 +80,5 @@ namespace SharpCompress.Common.Tar
{
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException();
}
}
}

View File

@@ -1,5 +1,5 @@
using System;
using System.IO;
using System.IO;
using System.Threading.Tasks;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -33,19 +33,10 @@ namespace SharpCompress.Common
/// RarArchive is part of a multi-part archive.
/// </summary>
public virtual bool IsMultiVolume => true;
protected virtual void Dispose(bool disposing)
public ValueTask DisposeAsync()
{
if (disposing)
{
_actualStream.Dispose();
}
}
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
return _actualStream.DisposeAsync();
}
}
}

View File

@@ -1,4 +1,6 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers
{
@@ -9,29 +11,29 @@ namespace SharpCompress.Common.Zip.Headers
{
}
internal override void Read(BinaryReader reader)
internal override async ValueTask Read(Stream stream, CancellationToken cancellationToken)
{
VolumeNumber = reader.ReadUInt16();
FirstVolumeWithDirectory = reader.ReadUInt16();
TotalNumberOfEntriesInDisk = reader.ReadUInt16();
TotalNumberOfEntries = reader.ReadUInt16();
DirectorySize = reader.ReadUInt32();
DirectoryStartOffsetRelativeToDisk = reader.ReadUInt32();
CommentLength = reader.ReadUInt16();
Comment = reader.ReadBytes(CommentLength);
VolumeNumber = await stream.ReadUInt16(cancellationToken);
FirstVolumeWithDirectory = await stream.ReadUInt16(cancellationToken);
TotalNumberOfEntriesInDisk = await stream.ReadUInt16(cancellationToken);
TotalNumberOfEntries = await stream.ReadUInt16(cancellationToken);
DirectorySize = await stream.ReadUInt32(cancellationToken);
DirectoryStartOffsetRelativeToDisk = await stream.ReadUInt32(cancellationToken);
CommentLength = await stream.ReadUInt16(cancellationToken);
Comment = await stream.ReadBytes(CommentLength ?? 0, cancellationToken);
}
public ushort VolumeNumber { get; private set; }
public ushort? VolumeNumber { get; private set; }
public ushort FirstVolumeWithDirectory { get; private set; }
public ushort? FirstVolumeWithDirectory { get; private set; }
public ushort TotalNumberOfEntriesInDisk { get; private set; }
public ushort? TotalNumberOfEntriesInDisk { get; private set; }
public uint DirectorySize { get; private set; }
public uint? DirectorySize { get; private set; }
public uint DirectoryStartOffsetRelativeToDisk { get; private set; }
public uint? DirectoryStartOffsetRelativeToDisk { get; private set; }
public ushort CommentLength { get; private set; }
public ushort? CommentLength { get; private set; }
public byte[]? Comment { get; private set; }

View File

@@ -1,5 +1,7 @@
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers
{
@@ -10,28 +12,28 @@ namespace SharpCompress.Common.Zip.Headers
{
}
internal override void Read(BinaryReader reader)
internal override async ValueTask Read(Stream stream, CancellationToken cancellationToken)
{
Version = reader.ReadUInt16();
VersionNeededToExtract = reader.ReadUInt16();
Flags = (HeaderFlags)reader.ReadUInt16();
CompressionMethod = (ZipCompressionMethod)reader.ReadUInt16();
LastModifiedTime = reader.ReadUInt16();
LastModifiedDate = reader.ReadUInt16();
Crc = reader.ReadUInt32();
CompressedSize = reader.ReadUInt32();
UncompressedSize = reader.ReadUInt32();
ushort nameLength = reader.ReadUInt16();
ushort extraLength = reader.ReadUInt16();
ushort commentLength = reader.ReadUInt16();
DiskNumberStart = reader.ReadUInt16();
InternalFileAttributes = reader.ReadUInt16();
ExternalFileAttributes = reader.ReadUInt32();
RelativeOffsetOfEntryHeader = reader.ReadUInt32();
Version = await stream.ReadUInt16(cancellationToken);
VersionNeededToExtract = await stream.ReadUInt16(cancellationToken);
Flags = (HeaderFlags)await stream.ReadUInt16(cancellationToken);
CompressionMethod = (ZipCompressionMethod)await stream.ReadUInt16(cancellationToken);
LastModifiedTime = await stream.ReadUInt16(cancellationToken);
LastModifiedDate = await stream.ReadUInt16(cancellationToken);
Crc = await stream.ReadUInt32(cancellationToken);
CompressedSize = await stream.ReadUInt32(cancellationToken);
UncompressedSize = await stream.ReadUInt32(cancellationToken);
ushort nameLength = await stream.ReadUInt16(cancellationToken);
ushort extraLength = await stream.ReadUInt16(cancellationToken);
ushort commentLength = await stream.ReadUInt16(cancellationToken);
DiskNumberStart = await stream.ReadUInt16(cancellationToken);
InternalFileAttributes = await stream.ReadUInt16(cancellationToken);
ExternalFileAttributes = await stream.ReadUInt32(cancellationToken);
RelativeOffsetOfEntryHeader = await stream.ReadUInt32(cancellationToken);
byte[] name = reader.ReadBytes(nameLength);
byte[] extra = reader.ReadBytes(extraLength);
byte[] comment = reader.ReadBytes(commentLength);
byte[] name = await stream.ReadBytes(nameLength, cancellationToken);
byte[] extra = await stream.ReadBytes(extraLength, cancellationToken);
byte[] comment = await stream.ReadBytes(commentLength, cancellationToken);
// According to .ZIP File Format Specification
//
@@ -63,8 +65,6 @@ namespace SharpCompress.Common.Zip.Headers
var zip64ExtraData = Extra.OfType<Zip64ExtendedInformationExtraField>().FirstOrDefault();
if (zip64ExtraData != null)
{
zip64ExtraData.Process(UncompressedSize, CompressedSize, RelativeOffsetOfEntryHeader, DiskNumberStart);
if (CompressedSize == uint.MaxValue)
{
CompressedSize = zip64ExtraData.CompressedSize;

View File

@@ -1,4 +1,6 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers
{
@@ -9,8 +11,9 @@ namespace SharpCompress.Common.Zip.Headers
{
}
internal override void Read(BinaryReader reader)
internal override ValueTask Read(Stream stream, CancellationToken cancellationToken)
{
return new();
}
}
}

View File

@@ -1,5 +1,7 @@
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers
{
@@ -10,20 +12,20 @@ namespace SharpCompress.Common.Zip.Headers
{
}
internal override void Read(BinaryReader reader)
internal override async ValueTask Read(Stream stream, CancellationToken cancellationToken)
{
Version = reader.ReadUInt16();
Flags = (HeaderFlags)reader.ReadUInt16();
CompressionMethod = (ZipCompressionMethod)reader.ReadUInt16();
LastModifiedTime = reader.ReadUInt16();
LastModifiedDate = reader.ReadUInt16();
Crc = reader.ReadUInt32();
CompressedSize = reader.ReadUInt32();
UncompressedSize = reader.ReadUInt32();
ushort nameLength = reader.ReadUInt16();
ushort extraLength = reader.ReadUInt16();
byte[] name = reader.ReadBytes(nameLength);
byte[] extra = reader.ReadBytes(extraLength);
Version = await stream.ReadUInt16(cancellationToken);
Flags = (HeaderFlags)await stream.ReadUInt16(cancellationToken);
CompressionMethod = (ZipCompressionMethod)await stream.ReadUInt16(cancellationToken);
LastModifiedTime = await stream.ReadUInt16(cancellationToken);
LastModifiedDate = await stream.ReadUInt16(cancellationToken);
Crc = await stream.ReadUInt32(cancellationToken);
CompressedSize = await stream.ReadUInt32(cancellationToken);
UncompressedSize = await stream.ReadUInt32(cancellationToken);
ushort nameLength = await stream.ReadUInt16(cancellationToken);
ushort extraLength = await stream.ReadUInt16(cancellationToken);
byte[] name = await stream.ReadBytes(nameLength, cancellationToken);
byte[] extra = await stream.ReadBytes(extraLength, cancellationToken);
// According to .ZIP File Format Specification
//
@@ -53,8 +55,6 @@ namespace SharpCompress.Common.Zip.Headers
var zip64ExtraData = Extra.OfType<Zip64ExtendedInformationExtraField>().FirstOrDefault();
if (zip64ExtraData != null)
{
zip64ExtraData.Process(UncompressedSize, CompressedSize, 0, 0);
if (CompressedSize == uint.MaxValue)
{
CompressedSize = zip64ExtraData.CompressedSize;

View File

@@ -66,74 +66,46 @@ namespace SharpCompress.Common.Zip.Headers
public Zip64ExtendedInformationExtraField(ExtraDataType type, ushort length, byte[] dataBytes)
: base(type, length, dataBytes)
{
Process();
}
// From the spec, values are only in the extradata if the standard
// value is set to 0xFFFFFFFF (or 0xFFFF for the Disk Start Number).
// Values, if present, must appear in the following order:
// - Original Size
// - Compressed Size
// - Relative Header Offset
// - Disk Start Number
public void Process(long uncompressedFileSize, long compressedFileSize, long relativeHeaderOffset, ushort diskNumber)
private void Process()
{
var bytesRequired = ((uncompressedFileSize == uint.MaxValue) ? 8 : 0)
+ ((compressedFileSize == uint.MaxValue) ? 8 : 0)
+ ((relativeHeaderOffset == uint.MaxValue) ? 8 : 0)
+ ((diskNumber == ushort.MaxValue) ? 4 : 0);
var currentIndex = 0;
if (bytesRequired > DataBytes.Length)
if (DataBytes.Length >= 8)
{
throw new ArchiveException("Zip64 extended information extra field is not large enough for the required information");
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
}
if (uncompressedFileSize == uint.MaxValue)
if (DataBytes.Length >= 16)
{
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(currentIndex));
currentIndex += 8;
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
}
if (compressedFileSize == uint.MaxValue)
if (DataBytes.Length >= 24)
{
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(currentIndex));
currentIndex += 8;
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(16));
}
if (relativeHeaderOffset == uint.MaxValue)
if (DataBytes.Length >= 28)
{
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(currentIndex));
currentIndex += 8;
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes.AsSpan(24));
}
if (diskNumber == ushort.MaxValue)
switch (DataBytes.Length)
{
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes.AsSpan(currentIndex));
case 8:
case 16:
case 24:
case 28:
break;
default:
throw new ArchiveException($"Unexpected size of of Zip64 extended information extra field: {DataBytes.Length}");
}
}
/// <summary>
/// Uncompressed file size. Only valid after <see cref="Process(long, long, long, ushort)"/> has been called and if the
/// original entry header had a corresponding 0xFFFFFFFF value.
/// </summary>
public long UncompressedSize { get; private set; }
/// <summary>
/// Compressed file size. Only valid after <see cref="Process(long, long, long, ushort)"/> has been called and if the
/// original entry header had a corresponding 0xFFFFFFFF value.
/// </summary>
public long CompressedSize { get; private set; }
/// <summary>
/// Relative offset of the entry header. Only valid after <see cref="Process(long, long, long, ushort)"/> has been called and if the
/// original entry header had a corresponding 0xFFFFFFFF value.
/// </summary>
public long RelativeOffsetOfEntryHeader { get; private set; }
/// <summary>
/// Volume number. Only valid after <see cref="Process(long, long, long, ushort)"/> has been called and if the
/// original entry header had a corresponding 0xFFFF value.
/// </summary>
public uint VolumeNumber { get; private set; }
}

View File

@@ -1,5 +1,7 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers
{
@@ -10,7 +12,7 @@ namespace SharpCompress.Common.Zip.Headers
{
}
internal override void Read(BinaryReader reader)
internal override ValueTask Read(Stream stream, CancellationToken cancellationToken)
{
throw new NotImplementedException();
}

View File

@@ -1,4 +1,6 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers
{
@@ -9,18 +11,18 @@ namespace SharpCompress.Common.Zip.Headers
{
}
internal override void Read(BinaryReader reader)
internal override async ValueTask Read(Stream stream, CancellationToken cancellationToken)
{
SizeOfDirectoryEndRecord = (long)reader.ReadUInt64();
VersionMadeBy = reader.ReadUInt16();
VersionNeededToExtract = reader.ReadUInt16();
VolumeNumber = reader.ReadUInt32();
FirstVolumeWithDirectory = reader.ReadUInt32();
TotalNumberOfEntriesInDisk = (long)reader.ReadUInt64();
TotalNumberOfEntries = (long)reader.ReadUInt64();
DirectorySize = (long)reader.ReadUInt64();
DirectoryStartOffsetRelativeToDisk = (long)reader.ReadUInt64();
DataSector = reader.ReadBytes((int)(SizeOfDirectoryEndRecord - SIZE_OF_FIXED_HEADER_DATA_EXCEPT_SIGNATURE_AND_SIZE_FIELDS));
SizeOfDirectoryEndRecord = (long)await stream.ReadUInt64(cancellationToken);
VersionMadeBy = await stream.ReadUInt16(cancellationToken);
VersionNeededToExtract = await stream.ReadUInt16(cancellationToken);
VolumeNumber = await stream.ReadUInt32(cancellationToken);
FirstVolumeWithDirectory = await stream.ReadUInt32(cancellationToken);
TotalNumberOfEntriesInDisk = (long)await stream.ReadUInt64(cancellationToken);
TotalNumberOfEntries = (long)await stream.ReadUInt64(cancellationToken);
DirectorySize = (long)await stream.ReadUInt64(cancellationToken);
DirectoryStartOffsetRelativeToDisk = (long)await stream.ReadUInt64(cancellationToken);
DataSector = await stream.ReadBytes((int)(SizeOfDirectoryEndRecord - SIZE_OF_FIXED_HEADER_DATA_EXCEPT_SIGNATURE_AND_SIZE_FIELDS), cancellationToken);
}
private const int SIZE_OF_FIXED_HEADER_DATA_EXCEPT_SIGNATURE_AND_SIZE_FIELDS = 44;

View File

@@ -1,4 +1,6 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers
{
@@ -9,11 +11,11 @@ namespace SharpCompress.Common.Zip.Headers
{
}
internal override void Read(BinaryReader reader)
internal override async ValueTask Read(Stream stream, CancellationToken cancellationToken)
{
FirstVolumeWithDirectory = reader.ReadUInt32();
RelativeOffsetOfTheEndOfDirectoryRecord = (long)reader.ReadUInt64();
TotalNumberOfVolumes = reader.ReadUInt32();
FirstVolumeWithDirectory = await stream.ReadUInt32(cancellationToken);
RelativeOffsetOfTheEndOfDirectoryRecord = (long)await stream.ReadUInt64(cancellationToken);
TotalNumberOfVolumes = await stream.ReadUInt32(cancellationToken);
}
public uint FirstVolumeWithDirectory { get; private set; }

View File

@@ -105,6 +105,6 @@ namespace SharpCompress.Common.Zip.Headers
internal ZipFilePart Part { get; set; }
internal bool IsZip64 => CompressedSize >= uint.MaxValue;
internal bool IsZip64 => CompressedSize == uint.MaxValue;
}
}

View File

@@ -1,4 +1,6 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers
{
@@ -12,7 +14,7 @@ namespace SharpCompress.Common.Zip.Headers
internal ZipHeaderType ZipHeaderType { get; }
internal abstract void Read(BinaryReader reader);
internal abstract ValueTask Read(Stream stream, CancellationToken cancellationToken);
internal bool HasData { get; set; }
}

View File

@@ -1,4 +1,6 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
@@ -17,22 +19,22 @@ namespace SharpCompress.Common.Zip
_directoryEntryHeader = header;
}
internal override Stream GetCompressedStream()
internal override async ValueTask<Stream> GetCompressedStreamAsync(CancellationToken cancellationToken)
{
if (!_isLocalHeaderLoaded)
{
LoadLocalHeader();
await LoadLocalHeader(cancellationToken);
_isLocalHeaderLoaded = true;
}
return base.GetCompressedStream();
return await base.GetCompressedStreamAsync(cancellationToken);
}
internal string? Comment => ((DirectoryEntryHeader)Header).Comment;
private void LoadLocalHeader()
private async ValueTask LoadLocalHeader(CancellationToken cancellationToken)
{
bool hasData = Header.HasData;
Header = _headerFactory.GetLocalHeader(BaseStream, ((DirectoryEntryHeader)Header));
Header = await _headerFactory.GetLocalHeader(BaseStream, (DirectoryEntryHeader)Header, cancellationToken);
Header.HasData = hasData;
}

View File

@@ -1,7 +1,12 @@
using System;
using System.Buffers;
using System.Collections.Generic;
using System.IO;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors.Xz;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip
@@ -19,15 +24,13 @@ namespace SharpCompress.Common.Zip
{
}
internal IEnumerable<ZipHeader> ReadSeekableHeader(Stream stream)
internal async IAsyncEnumerable<ZipHeader> ReadSeekableHeader(Stream stream, [EnumeratorCancellation]CancellationToken cancellationToken)
{
var reader = new BinaryReader(stream);
SeekBackToHeader(stream, reader);
await SeekBackToHeaderAsync(stream);
var eocd_location = stream.Position;
var entry = new DirectoryEndHeader();
entry.Read(reader);
await entry.Read(stream, cancellationToken);
if (entry.IsZip64)
{
@@ -35,37 +38,37 @@ namespace SharpCompress.Common.Zip
// ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR should be before the EOCD
stream.Seek(eocd_location - ZIP64_EOCD_LENGTH - 4, SeekOrigin.Begin);
uint zip64_locator = reader.ReadUInt32();
uint zip64_locator = await stream.ReadUInt32(cancellationToken);
if( zip64_locator != ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR )
{
throw new ArchiveException("Failed to locate the Zip64 Directory Locator");
}
var zip64Locator = new Zip64DirectoryEndLocatorHeader();
zip64Locator.Read(reader);
await zip64Locator.Read(stream, cancellationToken);
stream.Seek(zip64Locator.RelativeOffsetOfTheEndOfDirectoryRecord, SeekOrigin.Begin);
uint zip64Signature = reader.ReadUInt32();
uint zip64Signature = await stream.ReadUInt32(cancellationToken);
if (zip64Signature != ZIP64_END_OF_CENTRAL_DIRECTORY)
{
throw new ArchiveException("Failed to locate the Zip64 Header");
}
var zip64Entry = new Zip64DirectoryEndHeader();
zip64Entry.Read(reader);
await zip64Entry.Read(stream, cancellationToken);
stream.Seek(zip64Entry.DirectoryStartOffsetRelativeToDisk, SeekOrigin.Begin);
}
else
{
stream.Seek(entry.DirectoryStartOffsetRelativeToDisk, SeekOrigin.Begin);
stream.Seek(entry.DirectoryStartOffsetRelativeToDisk ?? 0, SeekOrigin.Begin);
}
long position = stream.Position;
while (true)
{
stream.Position = position;
uint signature = reader.ReadUInt32();
var nextHeader = ReadHeader(signature, reader, _zip64);
uint signature = await stream.ReadUInt32(cancellationToken);
var nextHeader = await ReadHeader(signature, stream, cancellationToken, _zip64);
position = stream.Position;
if (nextHeader is null)
@@ -86,7 +89,7 @@ namespace SharpCompress.Common.Zip
}
}
private static bool IsMatch( byte[] haystack, int position, byte[] needle)
private static bool IsMatch (Span<byte> haystack, int position, byte[] needle)
{
for( int i = 0; i < needle.Length; i++ )
{
@@ -98,7 +101,7 @@ namespace SharpCompress.Common.Zip
return true;
}
private static void SeekBackToHeader(Stream stream, BinaryReader reader)
private static async ValueTask SeekBackToHeaderAsync(Stream stream)
{
// Minimum EOCD length
if (stream.Length < MINIMUM_EOCD_LENGTH)
@@ -112,16 +115,18 @@ namespace SharpCompress.Common.Zip
stream.Seek(-len, SeekOrigin.End);
byte[] seek = reader.ReadBytes(len);
using var rented = MemoryPool<byte>.Shared.Rent(len);
var buffer = rented.Memory.Slice(0, len);
await stream.ReadAsync(buffer);
// Search in reverse
Array.Reverse(seek);
buffer.Span.Reverse();
var max_search_area = len - MINIMUM_EOCD_LENGTH;
for( int pos_from_end = 0; pos_from_end < max_search_area; ++pos_from_end)
{
if( IsMatch(seek, pos_from_end, needle) )
if( IsMatch( buffer.Span, pos_from_end, needle) )
{
stream.Seek(-pos_from_end, SeekOrigin.End);
return;
@@ -131,12 +136,11 @@ namespace SharpCompress.Common.Zip
throw new ArchiveException("Failed to locate the Zip Header");
}
internal LocalEntryHeader GetLocalHeader(Stream stream, DirectoryEntryHeader directoryEntryHeader)
internal async ValueTask<LocalEntryHeader> GetLocalHeader(Stream stream, DirectoryEntryHeader directoryEntryHeader, CancellationToken cancellationToken)
{
stream.Seek(directoryEntryHeader.RelativeOffsetOfEntryHeader, SeekOrigin.Begin);
BinaryReader reader = new BinaryReader(stream);
uint signature = reader.ReadUInt32();
var localEntryHeader = ReadHeader(signature, reader, _zip64) as LocalEntryHeader;
uint signature = await stream.ReadUInt32(cancellationToken);
var localEntryHeader = await ReadHeader(signature, stream, cancellationToken, _zip64) as LocalEntryHeader;
if (localEntryHeader is null)
{
throw new InvalidOperationException();

View File

@@ -1,4 +1,6 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors.Deflate;
using SharpCompress.IO;
@@ -19,13 +21,13 @@ namespace SharpCompress.Common.Zip
return Header.PackedStream;
}
internal override Stream GetCompressedStream()
internal override async ValueTask<Stream> GetCompressedStreamAsync(CancellationToken cancellationToken)
{
if (!Header.HasData)
{
return Stream.Null;
}
_decompressionStream = CreateDecompressionStream(GetCryptoStream(CreateBaseStream()), Header.CompressionMethod);
_decompressionStream = await CreateDecompressionStream(GetCryptoStream(CreateBaseStream()), Header.CompressionMethod, cancellationToken);
if (LeaveStreamOpen)
{
return new NonDisposingStream(_decompressionStream);
@@ -33,17 +35,17 @@ namespace SharpCompress.Common.Zip
return _decompressionStream;
}
internal BinaryReader FixStreamedFileLocation(ref RewindableStream rewindableStream)
internal async ValueTask FixStreamedFileLocation(RewindableStream rewindableStream, CancellationToken cancellationToken)
{
if (Header.IsDirectory)
{
return new BinaryReader(rewindableStream);
return;
}
if (Header.HasData && !Skipped)
{
_decompressionStream ??= GetCompressedStream();
_decompressionStream ??= await GetCompressedStreamAsync(cancellationToken);
_decompressionStream.Skip();
await _decompressionStream.SkipAsync(cancellationToken);
if (_decompressionStream is DeflateStream deflateStream)
{
@@ -51,9 +53,7 @@ namespace SharpCompress.Common.Zip
}
Skipped = true;
}
var reader = new BinaryReader(rewindableStream);
_decompressionStream = null;
return reader;
}
}
}

View File

@@ -1,5 +1,6 @@
using System.Collections.Generic;
using System.IO;
using System.Runtime.CompilerServices;
using System.Threading;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
@@ -12,43 +13,36 @@ namespace SharpCompress.Common.Zip
{
}
internal IEnumerable<ZipHeader> ReadStreamHeader(Stream stream)
internal async IAsyncEnumerable<ZipHeader> ReadStreamHeader(RewindableStream rewindableStream, [EnumeratorCancellation] CancellationToken cancellationToken)
{
RewindableStream rewindableStream;
if (stream is RewindableStream rs)
{
rewindableStream = rs;
}
else
{
rewindableStream = new RewindableStream(stream);
}
while (true)
{
ZipHeader? header;
BinaryReader reader = new BinaryReader(rewindableStream);
if (_lastEntryHeader != null &&
(FlagUtility.HasFlag(_lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor) || _lastEntryHeader.IsZip64))
{
reader = ((StreamingZipFilePart)_lastEntryHeader.Part).FixStreamedFileLocation(ref rewindableStream);
await ((StreamingZipFilePart)_lastEntryHeader.Part).FixStreamedFileLocation(rewindableStream, cancellationToken);
long? pos = rewindableStream.CanSeek ? (long?)rewindableStream.Position : null;
uint crc = reader.ReadUInt32();
uint crc = await rewindableStream.ReadUInt32(cancellationToken);
if (crc == POST_DATA_DESCRIPTOR)
{
crc = reader.ReadUInt32();
crc = await rewindableStream.ReadUInt32(cancellationToken);
}
_lastEntryHeader.Crc = crc;
_lastEntryHeader.CompressedSize = reader.ReadUInt32();
_lastEntryHeader.UncompressedSize = reader.ReadUInt32();
_lastEntryHeader.CompressedSize = await rewindableStream.ReadUInt32(cancellationToken);
_lastEntryHeader.UncompressedSize = await rewindableStream.ReadUInt32(cancellationToken);
if (pos.HasValue)
{
_lastEntryHeader.DataStartPosition = pos - _lastEntryHeader.CompressedSize;
}
}
_lastEntryHeader = null;
uint headerBytes = reader.ReadUInt32();
header = ReadHeader(headerBytes, reader);
var headerBytes = await rewindableStream.ReadUInt32OrNull(cancellationToken);
if (headerBytes is null)
{
yield break;
}
header = await ReadHeader(headerBytes.Value, rewindableStream, cancellationToken);
if (header is null)
{
yield break;
@@ -71,10 +65,10 @@ namespace SharpCompress.Common.Zip
{
rewindableStream.StartRecording();
}
uint nextHeaderBytes = reader.ReadUInt32();
uint nextHeaderBytes = await rewindableStream.ReadUInt32(cancellationToken);
// Check if next data is PostDataDescriptor, streamed file with 0 length
header.HasData = !IsHeader(nextHeaderBytes);
header.HasData = nextHeaderBytes != POST_DATA_DESCRIPTOR;
rewindableStream.Rewind(!isRecording);
}
else // We are not streaming and compressed size is 0, we have no data

View File

@@ -2,13 +2,15 @@
using System.Buffers.Binary;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.BZip2;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Compressors.Deflate64;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.PPMd;
//using SharpCompress.Compressors.PPMd;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip
@@ -28,13 +30,13 @@ namespace SharpCompress.Common.Zip
internal override string FilePartName => Header.Name;
internal override Stream GetCompressedStream()
internal override async ValueTask<Stream> GetCompressedStreamAsync(CancellationToken cancellationToken)
{
if (!Header.HasData)
{
return Stream.Null;
}
Stream decompressionStream = CreateDecompressionStream(GetCryptoStream(CreateBaseStream()), Header.CompressionMethod);
Stream decompressionStream = await CreateDecompressionStream(GetCryptoStream(CreateBaseStream()), Header.CompressionMethod, cancellationToken);
if (LeaveStreamOpen)
{
return new NonDisposingStream(decompressionStream);
@@ -55,7 +57,7 @@ namespace SharpCompress.Common.Zip
protected bool LeaveStreamOpen => FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor) || Header.IsZip64;
protected Stream CreateDecompressionStream(Stream stream, ZipCompressionMethod method)
protected async ValueTask<Stream> CreateDecompressionStream(Stream stream, ZipCompressionMethod method, CancellationToken cancellationToken)
{
switch (method)
{
@@ -73,30 +75,29 @@ namespace SharpCompress.Common.Zip
}
case ZipCompressionMethod.BZip2:
{
return new BZip2Stream(stream, CompressionMode.Decompress, false);
}
return await BZip2Stream.CreateAsync(stream, CompressionMode.Decompress, false, cancellationToken);
}
case ZipCompressionMethod.LZMA:
{
if (FlagUtility.HasFlag(Header.Flags, HeaderFlags.Encrypted))
{
throw new NotSupportedException("LZMA with pkware encryption.");
}
var reader = new BinaryReader(stream);
reader.ReadUInt16(); //LZMA version
var props = new byte[reader.ReadUInt16()];
reader.Read(props, 0, props.Length);
return new LzmaStream(props, stream,
await stream.ReadUInt16(cancellationToken); //LZMA version
var props = new byte[await stream.ReadUInt16(cancellationToken)];
await stream.ReadAsync(props, 0, props.Length, cancellationToken);
return await LzmaStream.CreateAsync(props, stream,
Header.CompressedSize > 0 ? Header.CompressedSize - 4 - props.Length : -1,
FlagUtility.HasFlag(Header.Flags, HeaderFlags.Bit1)
? -1
: (long)Header.UncompressedSize);
: (long)Header.UncompressedSize,
cancellationToken: cancellationToken);
}
case ZipCompressionMethod.PPMd:
{
Span<byte> props = stackalloc byte[2];
stream.ReadFully(props);
/* case ZipCompressionMethod.PPMd:
{
var props = await stream.ReadBytes(2, cancellationToken);
return new PpmdStream(new PpmdProperties(props), stream, false);
}
} */
case ZipCompressionMethod.WinzipAes:
{
ExtraData? data = Header.Extra.SingleOrDefault(x => x.Type == ExtraDataType.WinZipAes);
@@ -120,7 +121,7 @@ namespace SharpCompress.Common.Zip
{
throw new InvalidFormatException("Unexpected vendor ID for WinZip AES metadata");
}
return CreateDecompressionStream(stream, (ZipCompressionMethod)BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes.AsSpan(5)));
return await CreateDecompressionStream(stream, (ZipCompressionMethod)BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes.AsSpan(5)), cancellationToken);
}
default:
{

View File

@@ -1,6 +1,8 @@
using System;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
@@ -30,15 +32,15 @@ namespace SharpCompress.Common.Zip
this._archiveEncoding = archiveEncoding;
}
protected ZipHeader? ReadHeader(uint headerBytes, BinaryReader reader, bool zip64 = false)
protected async ValueTask<ZipHeader?> ReadHeader(uint headerBytes, Stream stream, CancellationToken cancellationToken, bool zip64 = false)
{
switch (headerBytes)
{
case ENTRY_HEADER_BYTES:
{
var entryHeader = new LocalEntryHeader(_archiveEncoding);
entryHeader.Read(reader);
LoadHeader(entryHeader, reader.BaseStream);
await entryHeader.Read(stream, cancellationToken);
await LoadHeader(entryHeader, stream, cancellationToken);
_lastEntryHeader = entryHeader;
return entryHeader;
@@ -46,20 +48,20 @@ namespace SharpCompress.Common.Zip
case DIRECTORY_START_HEADER_BYTES:
{
var entry = new DirectoryEntryHeader(_archiveEncoding);
entry.Read(reader);
await entry.Read(stream, cancellationToken);
return entry;
}
case POST_DATA_DESCRIPTOR:
{
if (FlagUtility.HasFlag(_lastEntryHeader!.Flags, HeaderFlags.UsePostDataDescriptor))
{
_lastEntryHeader.Crc = reader.ReadUInt32();
_lastEntryHeader.CompressedSize = zip64 ? (long)reader.ReadUInt64() : reader.ReadUInt32();
_lastEntryHeader.UncompressedSize = zip64 ? (long)reader.ReadUInt64() : reader.ReadUInt32();
_lastEntryHeader.Crc = await stream.ReadUInt32(cancellationToken);
_lastEntryHeader.CompressedSize = zip64 ? (long)await stream.ReadUInt64(cancellationToken) : await stream.ReadUInt32(cancellationToken);
_lastEntryHeader.UncompressedSize = zip64 ? (long)await stream.ReadUInt64(cancellationToken) : await stream.ReadUInt32(cancellationToken);
}
else
{
reader.ReadBytes(zip64 ? 20 : 12);
await stream.ReadBytes(zip64 ? 20 : 12, cancellationToken);
}
return null;
}
@@ -68,7 +70,7 @@ namespace SharpCompress.Common.Zip
case DIRECTORY_END_HEADER_BYTES:
{
var entry = new DirectoryEndHeader();
entry.Read(reader);
await entry.Read(stream, cancellationToken);
return entry;
}
case SPLIT_ARCHIVE_HEADER_BYTES:
@@ -78,13 +80,13 @@ namespace SharpCompress.Common.Zip
case ZIP64_END_OF_CENTRAL_DIRECTORY:
{
var entry = new Zip64DirectoryEndHeader();
entry.Read(reader);
await entry.Read(stream, cancellationToken);
return entry;
}
case ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR:
{
var entry = new Zip64DirectoryEndLocatorHeader();
entry.Read(reader);
await entry.Read(stream, cancellationToken);
return entry;
}
default:
@@ -110,7 +112,7 @@ namespace SharpCompress.Common.Zip
}
}
private void LoadHeader(ZipFileEntry entryHeader, Stream stream)
private async ValueTask LoadHeader(ZipFileEntry entryHeader, Stream stream, CancellationToken cancellationToken)
{
if (FlagUtility.HasFlag(entryHeader.Flags, HeaderFlags.Encrypted))
{
@@ -134,10 +136,8 @@ namespace SharpCompress.Common.Zip
{
var keySize = (WinzipAesKeySize)data.DataBytes[4];
var salt = new byte[WinzipAesEncryptionData.KeyLengthInBytes(keySize) / 2];
var passwordVerifyValue = new byte[2];
stream.Read(salt, 0, salt.Length);
stream.Read(passwordVerifyValue, 0, 2);
var salt = await stream.ReadBytes(WinzipAesEncryptionData.KeyLengthInBytes(keySize) / 2, cancellationToken);
var passwordVerifyValue = await stream.ReadBytes(2, cancellationToken);
entryHeader.WinzipAesEncryptionData =
new WinzipAesEncryptionData(keySize, salt, passwordVerifyValue, _password);

View File

@@ -1,9 +1,13 @@
using System;
using System.Buffers;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Compressors.BZip2
{
public sealed class BZip2Stream : Stream
public sealed class BZip2Stream : AsyncStream
{
private readonly Stream stream;
private bool isDisposed;
@@ -33,17 +37,14 @@ namespace SharpCompress.Compressors.BZip2
(stream as CBZip2OutputStream)?.Finish();
}
protected override void Dispose(bool disposing)
public override async ValueTask DisposeAsync()
{
if (isDisposed)
{
return;
}
isDisposed = true;
if (disposing)
{
stream.Dispose();
}
await stream.DisposeAsync();
}
public CompressionMode Mode { get; }
@@ -54,23 +55,18 @@ namespace SharpCompress.Compressors.BZip2
public override bool CanWrite => stream.CanWrite;
public override void Flush()
public override Task FlushAsync(CancellationToken cancellationToken)
{
stream.Flush();
return stream.FlushAsync(cancellationToken);
}
public override long Length => stream.Length;
public override long Position { get => stream.Position; set => stream.Position = value; }
public override int Read(byte[] buffer, int offset, int count)
public override ValueTask<int> ReadAsync(Memory<byte> buffer, CancellationToken cancellationToken = default)
{
return stream.Read(buffer, offset, count);
}
public override int ReadByte()
{
return stream.ReadByte();
return stream.ReadAsync(buffer, cancellationToken);
}
public override long Seek(long offset, SeekOrigin origin)
@@ -83,28 +79,14 @@ namespace SharpCompress.Compressors.BZip2
stream.SetLength(value);
}
#if !NET461 && !NETSTANDARD2_0
public override int Read(Span<byte> buffer)
public override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
return stream.Read(buffer);
return stream.WriteAsync(buffer, offset, count, cancellationToken);
}
public override void Write(ReadOnlySpan<byte> buffer)
public override ValueTask WriteAsync(ReadOnlyMemory<byte> buffer, CancellationToken cancellationToken = new CancellationToken())
{
stream.Write(buffer);
}
#endif
public override void Write(byte[] buffer, int offset, int count)
{
stream.Write(buffer, offset, count);
}
public override void WriteByte(byte value)
{
stream.WriteByte(value);
return stream.WriteAsync(buffer, cancellationToken);
}
/// <summary>
@@ -112,11 +94,12 @@ namespace SharpCompress.Compressors.BZip2
/// </summary>
/// <param name="stream"></param>
/// <returns></returns>
public static bool IsBZip2(Stream stream)
public static async ValueTask<bool> IsBZip2Async(Stream stream, CancellationToken cancellationToken)
{
BinaryReader br = new BinaryReader(stream);
byte[] chars = br.ReadBytes(2);
if (chars.Length < 2 || chars[0] != 'B' || chars[1] != 'Z')
using var rented = MemoryPool<byte>.Shared.Rent(2);
var chars = rented.Memory.Slice(0, 2);
await stream.ReadAsync(chars, cancellationToken);
if (chars.Length < 2 || chars.Span[0] != 'B' || chars.Span[1] != 'Z')
{
return false;
}

View File

@@ -27,10 +27,13 @@
using System;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Compressors.Deflate
{
public class DeflateStream : Stream
public class DeflateStream : AsyncStream
{
private readonly ZlibBaseStream _baseStream;
private bool _disposed;
@@ -216,35 +219,25 @@ namespace SharpCompress.Compressors.Deflate
/// <remarks>
/// This may or may not result in a <c>Close()</c> call on the captive stream.
/// </remarks>
protected override void Dispose(bool disposing)
public override async ValueTask DisposeAsync()
{
try
if (!_disposed)
{
if (!_disposed)
{
if (disposing)
{
_baseStream?.Dispose();
}
_disposed = true;
}
}
finally
{
base.Dispose(disposing);
await _baseStream.DisposeAsync();
_disposed = true;
}
}
/// <summary>
/// Flush the stream.
/// </summary>
public override void Flush()
public override async Task FlushAsync(CancellationToken cancellationToken)
{
if (_disposed)
{
throw new ObjectDisposedException("DeflateStream");
}
_baseStream.Flush();
await _baseStream.FlushAsync(cancellationToken);
}
/// <summary>
@@ -273,24 +266,14 @@ namespace SharpCompress.Compressors.Deflate
/// <param name="offset">the offset within that data array to put the first byte read.</param>
/// <param name="count">the number of bytes to read.</param>
/// <returns>the number of bytes actually read</returns>
public override int Read(byte[] buffer, int offset, int count)
public override async Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
if (_disposed)
{
throw new ObjectDisposedException("DeflateStream");
}
return _baseStream.Read(buffer, offset, count);
return await _baseStream.ReadAsync(buffer, offset, count, cancellationToken);
}
public override int ReadByte()
{
if (_disposed)
{
throw new ObjectDisposedException("DeflateStream");
}
return _baseStream.ReadByte();
}
/// <summary>
/// Calling this method always throws a <see cref="NotImplementedException"/>.
/// </summary>
@@ -340,22 +323,13 @@ namespace SharpCompress.Compressors.Deflate
/// <param name="buffer">The buffer holding data to write to the stream.</param>
/// <param name="offset">the offset within that data array to find the first byte to write.</param>
/// <param name="count">the number of bytes to write.</param>
public override void Write(byte[] buffer, int offset, int count)
public override async Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
if (_disposed)
{
throw new ObjectDisposedException("DeflateStream");
}
_baseStream.Write(buffer, offset, count);
}
public override void WriteByte(byte value)
{
if (_disposed)
{
throw new ObjectDisposedException("DeflateStream");
}
_baseStream.WriteByte(value);
await _baseStream.WriteAsync(buffer, offset, count, cancellationToken);
}
#endregion

View File

@@ -27,21 +27,25 @@
// ------------------------------------------------------------------
using System;
using System.Buffers;
using System.Buffers.Binary;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Compressors.Deflate
{
public class GZipStream : Stream
public class GZipStream : AsyncStream
{
internal static readonly DateTime UNIX_EPOCH = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
private static readonly DateTime UNIX_EPOCH = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
private string? _comment;
private string? _fileName;
private DateTime? _lastModified;
internal ZlibBaseStream BaseStream;
private readonly ZlibBaseStream _baseStream;
private bool _disposed;
private bool _firstReadDone;
private int _headerByteCount;
@@ -60,7 +64,7 @@ namespace SharpCompress.Compressors.Deflate
public GZipStream(Stream stream, CompressionMode mode, CompressionLevel level, Encoding encoding)
{
BaseStream = new ZlibBaseStream(stream, mode, level, ZlibStreamFlavor.GZIP, encoding);
_baseStream = new ZlibBaseStream(stream, mode, level, ZlibStreamFlavor.GZIP, encoding);
_encoding = encoding;
}
@@ -68,27 +72,27 @@ namespace SharpCompress.Compressors.Deflate
public virtual FlushType FlushMode
{
get => (BaseStream._flushMode);
get => (_baseStream._flushMode);
set
{
if (_disposed)
{
throw new ObjectDisposedException("GZipStream");
}
BaseStream._flushMode = value;
_baseStream._flushMode = value;
}
}
public int BufferSize
{
get => BaseStream._bufferSize;
get => _baseStream._bufferSize;
set
{
if (_disposed)
{
throw new ObjectDisposedException("GZipStream");
}
if (BaseStream._workingBuffer != null)
if (_baseStream._workingBuffer != null)
{
throw new ZlibException("The working buffer is already set.");
}
@@ -98,13 +102,13 @@ namespace SharpCompress.Compressors.Deflate
String.Format("Don't be silly. {0} bytes?? Use a bigger buffer, at least {1}.", value,
ZlibConstants.WorkingBufferSizeMin));
}
BaseStream._bufferSize = value;
_baseStream._bufferSize = value;
}
}
internal virtual long TotalIn => BaseStream._z.TotalBytesIn;
internal virtual long TotalIn => _baseStream._z.TotalBytesIn;
internal virtual long TotalOut => BaseStream._z.TotalBytesOut;
internal virtual long TotalOut => _baseStream._z.TotalBytesOut;
#endregion
@@ -124,7 +128,7 @@ namespace SharpCompress.Compressors.Deflate
{
throw new ObjectDisposedException("GZipStream");
}
return BaseStream._stream.CanRead;
return _baseStream._stream.CanRead;
}
}
@@ -150,7 +154,7 @@ namespace SharpCompress.Compressors.Deflate
{
throw new ObjectDisposedException("GZipStream");
}
return BaseStream._stream.CanWrite;
return _baseStream._stream.CanWrite;
}
}
@@ -174,13 +178,13 @@ namespace SharpCompress.Compressors.Deflate
{
get
{
if (BaseStream._streamMode == ZlibBaseStream.StreamMode.Writer)
if (_baseStream._streamMode == ZlibBaseStream.StreamMode.Writer)
{
return BaseStream._z.TotalBytesOut + _headerByteCount;
return _baseStream._z.TotalBytesOut + _headerByteCount;
}
if (BaseStream._streamMode == ZlibBaseStream.StreamMode.Reader)
if (_baseStream._streamMode == ZlibBaseStream.StreamMode.Reader)
{
return BaseStream._z.TotalBytesIn + BaseStream._gzipHeaderByteCount;
return _baseStream._z.TotalBytesIn + _baseStream._gzipHeaderByteCount;
}
return 0;
}
@@ -194,36 +198,29 @@ namespace SharpCompress.Compressors.Deflate
/// <remarks>
/// This may or may not result in a <c>Close()</c> call on the captive stream.
/// </remarks>
protected override void Dispose(bool disposing)
{
try
public override async ValueTask DisposeAsync()
{
if (!_disposed)
{
if (disposing && (BaseStream != null))
if (_baseStream is not null)
{
BaseStream.Dispose();
Crc32 = BaseStream.Crc32;
await _baseStream.DisposeAsync();
Crc32 = _baseStream.Crc32;
}
_disposed = true;
}
}
finally
{
base.Dispose(disposing);
}
}
/// <summary>
/// Flush the stream.
/// </summary>
public override void Flush()
public override Task FlushAsync(CancellationToken cancellationToken)
{
if (_disposed)
{
throw new ObjectDisposedException("GZipStream");
}
BaseStream.Flush();
return _baseStream.FlushAsync(cancellationToken);
}
/// <summary>
@@ -257,13 +254,13 @@ namespace SharpCompress.Compressors.Deflate
/// <param name="offset">the offset within that data array to put the first byte read.</param>
/// <param name="count">the number of bytes to read.</param>
/// <returns>the number of bytes actually read</returns>
public override int Read(byte[] buffer, int offset, int count)
public override async Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
if (_disposed)
{
throw new ObjectDisposedException("GZipStream");
}
int n = BaseStream.Read(buffer, offset, count);
int n = await _baseStream.ReadAsync(buffer, offset, count, cancellationToken);
// Console.WriteLine("GZipStream::Read(buffer, off({0}), c({1}) = {2}", offset, count, n);
// Console.WriteLine( Util.FormatByteArray(buffer, offset, n) );
@@ -271,9 +268,9 @@ namespace SharpCompress.Compressors.Deflate
if (!_firstReadDone)
{
_firstReadDone = true;
FileName = BaseStream._GzipFileName;
Comment = BaseStream._GzipComment;
LastModified = BaseStream._GzipMtime;
FileName = _baseStream._GzipFileName;
Comment = _baseStream._GzipComment;
LastModified = _baseStream._GzipMtime;
}
return n;
}
@@ -320,19 +317,19 @@ namespace SharpCompress.Compressors.Deflate
/// <param name="buffer">The buffer holding data to write to the stream.</param>
/// <param name="offset">the offset within that data array to find the first byte to write.</param>
/// <param name="count">the number of bytes to write.</param>
public override void Write(byte[] buffer, int offset, int count)
public override async Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
if (_disposed)
{
throw new ObjectDisposedException("GZipStream");
}
if (BaseStream._streamMode == ZlibBaseStream.StreamMode.Undefined)
if (_baseStream._streamMode == ZlibBaseStream.StreamMode.Undefined)
{
//Console.WriteLine("GZipStream: First write");
if (BaseStream._wantCompress)
if (_baseStream._wantCompress)
{
// first write in compression, therefore, emit the GZIP header
_headerByteCount = EmitHeader();
_headerByteCount = await EmitHeaderAsync();
}
else
{
@@ -340,7 +337,7 @@ namespace SharpCompress.Compressors.Deflate
}
}
BaseStream.Write(buffer, offset, count);
await _baseStream.WriteAsync(buffer, offset, count, cancellationToken);
}
#endregion Stream methods
@@ -405,7 +402,7 @@ namespace SharpCompress.Compressors.Deflate
public int Crc32 { get; private set; }
private int EmitHeader()
private async ValueTask<int> EmitHeaderAsync()
{
byte[]? commentBytes = (Comment is null) ? null
: _encoding.GetBytes(Comment);
@@ -474,7 +471,7 @@ namespace SharpCompress.Compressors.Deflate
header[i++] = 0; // terminate
}
BaseStream._stream.Write(header, 0, header.Length);
await _baseStream._stream.WriteAsync(header, 0, header.Length);
return header.Length; // bytes written
}

View File

@@ -27,11 +27,15 @@
// ------------------------------------------------------------------
using System;
using System.Buffers;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Tar.Headers;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Compressors.Deflate
{
@@ -42,18 +46,18 @@ namespace SharpCompress.Compressors.Deflate
GZIP = 1952
}
internal class ZlibBaseStream : Stream
internal class ZlibBaseStream : AsyncStream
{
protected internal ZlibCodec _z; // deferred init... new ZlibCodec();
protected internal StreamMode _streamMode = StreamMode.Undefined;
protected internal FlushType _flushMode;
protected internal ZlibStreamFlavor _flavor;
protected internal CompressionMode _compressionMode;
protected internal CompressionLevel _level;
private readonly ZlibStreamFlavor _flavor;
private readonly CompressionMode _compressionMode;
private readonly CompressionLevel _level;
protected internal byte[] _workingBuffer;
protected internal int _bufferSize = ZlibConstants.WorkingBufferSizeDefault;
protected internal byte[] _buf1 = new byte[1];
private readonly byte[] _buf1 = new byte[1];
protected internal Stream _stream;
protected internal CompressionStrategy Strategy = CompressionStrategy.Default;
@@ -116,19 +120,13 @@ namespace SharpCompress.Compressors.Deflate
}
}
private byte[] workingBuffer
{
get => _workingBuffer ??= new byte[_bufferSize];
}
private byte[] workingBuffer => _workingBuffer ??= new byte[_bufferSize];
public override void Write(byte[] buffer, int offset, int count)
public override async Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
// workitem 7159
// calculate the CRC on the unccompressed data (before writing)
if (crc != null)
{
crc.SlurpBlock(buffer, offset, count);
}
crc?.SlurpBlock(buffer, offset, count);
if (_streamMode == StreamMode.Undefined)
{
@@ -148,7 +146,7 @@ namespace SharpCompress.Compressors.Deflate
z.InputBuffer = buffer;
_z.NextIn = offset;
_z.AvailableBytesIn = count;
bool done = false;
var done = false;
do
{
_z.OutputBuffer = workingBuffer;
@@ -163,7 +161,7 @@ namespace SharpCompress.Compressors.Deflate
}
//if (_workingBuffer.Length - _z.AvailableBytesOut > 0)
_stream.Write(_workingBuffer, 0, _workingBuffer.Length - _z.AvailableBytesOut);
await _stream.WriteAsync(_workingBuffer, 0, _workingBuffer.Length - _z.AvailableBytesOut, cancellationToken);
done = _z.AvailableBytesIn == 0 && _z.AvailableBytesOut != 0;
@@ -176,7 +174,7 @@ namespace SharpCompress.Compressors.Deflate
while (!done);
}
private void finish()
private async Task FinishAsync()
{
if (_z is null)
{
@@ -185,7 +183,7 @@ namespace SharpCompress.Compressors.Deflate
if (_streamMode == StreamMode.Writer)
{
bool done = false;
var done = false;
do
{
_z.OutputBuffer = workingBuffer;
@@ -200,14 +198,14 @@ namespace SharpCompress.Compressors.Deflate
string verb = (_wantCompress ? "de" : "in") + "flating";
if (_z.Message is null)
{
throw new ZlibException(String.Format("{0}: (rc = {1})", verb, rc));
throw new ZlibException($"{verb}: (rc = {rc})");
}
throw new ZlibException(verb + ": " + _z.Message);
}
if (_workingBuffer.Length - _z.AvailableBytesOut > 0)
{
_stream.Write(_workingBuffer, 0, _workingBuffer.Length - _z.AvailableBytesOut);
await _stream.WriteAsync(_workingBuffer, 0, _workingBuffer.Length - _z.AvailableBytesOut);
}
done = _z.AvailableBytesIn == 0 && _z.AvailableBytesOut != 0;
@@ -220,7 +218,7 @@ namespace SharpCompress.Compressors.Deflate
}
while (!done);
Flush();
await FlushAsync();
// workitem 7159
if (_flavor == ZlibStreamFlavor.GZIP)
@@ -228,12 +226,13 @@ namespace SharpCompress.Compressors.Deflate
if (_wantCompress)
{
// Emit the GZIP trailer: CRC32 and size mod 2^32
Span<byte> intBuf = stackalloc byte[4];
BinaryPrimitives.WriteInt32LittleEndian(intBuf, crc.Crc32Result);
_stream.Write(intBuf);
using var rented = MemoryPool<byte>.Shared.Rent(4);
var intBuf = rented.Memory.Slice(0, 4);
BinaryPrimitives.WriteInt32LittleEndian(intBuf.Span, crc.Crc32Result);
await _stream.WriteAsync(intBuf, CancellationToken.None);
int c2 = (int)(crc.TotalBytesRead & 0x00000000FFFFFFFF);
BinaryPrimitives.WriteInt32LittleEndian(intBuf, c2);
_stream.Write(intBuf);
BinaryPrimitives.WriteInt32LittleEndian(intBuf.Span, c2);
await _stream.WriteAsync(intBuf, CancellationToken.None);
}
else
{
@@ -256,44 +255,41 @@ namespace SharpCompress.Compressors.Deflate
}
// Read and potentially verify the GZIP trailer: CRC32 and size mod 2^32
Span<byte> trailer = stackalloc byte[8];
using var rented = MemoryPool<byte>.Shared.Rent(8);
var trailer = rented.Memory.Slice(0, 8);
// workitem 8679
if (_z.AvailableBytesIn != 8)
{
// Make sure we have read to the end of the stream
_z.InputBuffer.AsSpan(_z.NextIn, _z.AvailableBytesIn).CopyTo(trailer);
_z.InputBuffer.AsSpan(_z.NextIn, _z.AvailableBytesIn).CopyTo(trailer.Span);
int bytesNeeded = 8 - _z.AvailableBytesIn;
int bytesRead = _stream.Read(trailer.Slice(_z.AvailableBytesIn, bytesNeeded));
int bytesRead = await _stream.ReadAsync(trailer.Slice(_z.AvailableBytesIn, bytesNeeded));
if (bytesNeeded != bytesRead)
{
throw new ZlibException(String.Format(
"Protocol error. AvailableBytesIn={0}, expected 8",
_z.AvailableBytesIn + bytesRead));
throw new ZlibException($"Protocol error. AvailableBytesIn={_z.AvailableBytesIn + bytesRead}, expected 8");
}
}
else
{
_z.InputBuffer.AsSpan(_z.NextIn, trailer.Length).CopyTo(trailer);
_z.InputBuffer.AsSpan(_z.NextIn, trailer.Length).CopyTo(trailer.Span);
}
Int32 crc32_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer);
Int32 crc32_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer.Span);
Int32 crc32_actual = crc.Crc32Result;
Int32 isize_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer.Slice(4));
Int32 isize_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer.Span.Slice(4));
Int32 isize_actual = (Int32)(_z.TotalBytesOut & 0x00000000FFFFFFFF);
if (crc32_actual != crc32_expected)
{
throw new ZlibException(
String.Format("Bad CRC32 in GZIP stream. (actual({0:X8})!=expected({1:X8}))",
crc32_actual, crc32_expected));
$"Bad CRC32 in GZIP stream. (actual({crc32_actual:X8})!=expected({crc32_expected:X8}))");
}
if (isize_actual != isize_expected)
{
throw new ZlibException(
String.Format("Bad size in GZIP stream. (actual({0})!=expected({1}))", isize_actual,
isize_expected));
$"Bad size in GZIP stream. (actual({isize_actual})!=expected({isize_expected}))");
}
}
else
@@ -304,7 +300,7 @@ namespace SharpCompress.Compressors.Deflate
}
}
private void end()
private void End()
{
if (z is null)
{
@@ -321,36 +317,32 @@ namespace SharpCompress.Compressors.Deflate
_z = null;
}
protected override void Dispose(bool disposing)
public override async ValueTask DisposeAsync()
{
if (isDisposed)
if (_isDisposed)
{
return;
}
isDisposed = true;
base.Dispose(disposing);
if (disposing)
{
_isDisposed = true;
if (_stream is null)
{
return;
}
try
{
finish();
await FinishAsync();
}
finally
{
end();
_stream?.Dispose();
End();
_stream?.DisposeAsync();
_stream = null;
}
}
}
public override void Flush()
public override Task FlushAsync(CancellationToken cancellationToken)
{
_stream.Flush();
return _stream.FlushAsync(cancellationToken);
}
public override Int64 Seek(Int64 offset, SeekOrigin origin)
@@ -365,7 +357,7 @@ namespace SharpCompress.Compressors.Deflate
_stream.SetLength(value);
}
#if NOT
/*
public int Read()
{
if (Read(_buf1, 0, 1) == 0)
@@ -375,19 +367,19 @@ namespace SharpCompress.Compressors.Deflate
crc.SlurpBlock(_buf1,0,1);
return (_buf1[0] & 0xFF);
}
#endif
*/
private bool nomoreinput;
private bool isDisposed;
private bool _nomoreinput;
private bool _isDisposed;
private string ReadZeroTerminatedString()
private async Task<string> ReadZeroTerminatedStringAsync()
{
var list = new List<byte>();
bool done = false;
var done = false;
do
{
// workitem 7740
int n = _stream.Read(_buf1, 0, 1);
int n = await _stream.ReadAsync(_buf1, 0, 1);
if (n != 1)
{
throw new ZlibException("Unexpected EOF reading GZIP header.");
@@ -406,13 +398,14 @@ namespace SharpCompress.Compressors.Deflate
return _encoding.GetString(buffer, 0, buffer.Length);
}
private int _ReadAndValidateGzipHeader()
private async Task<int> ReadAndValidateGzipHeaderAsync(CancellationToken cancellationToken)
{
int totalBytesRead = 0;
var totalBytesRead = 0;
// read the header on the first read
Span<byte> header = stackalloc byte[10];
int n = _stream.Read(header);
using var rented = MemoryPool<byte>.Shared.Rent(10);
var header = rented.Memory.Slice(0, 10);
int n = await _stream.ReadAsync(header, cancellationToken);
// workitem 8501: handle edge case (decompress empty stream)
if (n == 0)
@@ -425,46 +418,46 @@ namespace SharpCompress.Compressors.Deflate
throw new ZlibException("Not a valid GZIP stream.");
}
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
if (header.Span[0] != 0x1F || header.Span[1] != 0x8B || header.Span[2] != 8)
{
throw new ZlibException("Bad GZIP header.");
}
int timet = BinaryPrimitives.ReadInt32LittleEndian(header.Slice(4));
int timet = BinaryPrimitives.ReadInt32LittleEndian(header.Span.Slice(4));
_GzipMtime = TarHeader.EPOCH.AddSeconds(timet);
totalBytesRead += n;
if ((header[3] & 0x04) == 0x04)
if ((header.Span[3] & 0x04) == 0x04)
{
// read and discard extra field
n = _stream.Read(header.Slice(0, 2)); // 2-byte length field
n = await _stream.ReadAsync(header.Slice(0, 2), cancellationToken); // 2-byte length field
totalBytesRead += n;
short extraLength = (short)(header[0] + header[1] * 256);
short extraLength = (short)(header.Span[0] + header.Span[1] * 256);
byte[] extra = new byte[extraLength];
n = _stream.Read(extra, 0, extra.Length);
n = await _stream.ReadAsync(extra, 0, extra.Length, cancellationToken);
if (n != extraLength)
{
throw new ZlibException("Unexpected end-of-file reading GZIP header.");
}
totalBytesRead += n;
}
if ((header[3] & 0x08) == 0x08)
if ((header.Span[3] & 0x08) == 0x08)
{
_GzipFileName = ReadZeroTerminatedString();
_GzipFileName = await ReadZeroTerminatedStringAsync();
}
if ((header[3] & 0x10) == 0x010)
if ((header.Span[3] & 0x10) == 0x010)
{
_GzipComment = ReadZeroTerminatedString();
_GzipComment = await ReadZeroTerminatedStringAsync();
}
if ((header[3] & 0x02) == 0x02)
if ((header.Span[3] & 0x02) == 0x02)
{
Read(_buf1, 0, 1); // CRC16, ignore
await ReadAsync(_buf1, 0, 1, cancellationToken); // CRC16, ignore
}
return totalBytesRead;
}
public override Int32 Read(Byte[] buffer, Int32 offset, Int32 count)
public override async Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
// According to MS documentation, any implementation of the IO.Stream.Read function must:
// (a) throw an exception if offset & count reference an invalid part of the buffer,
@@ -487,7 +480,7 @@ namespace SharpCompress.Compressors.Deflate
z.AvailableBytesIn = 0;
if (_flavor == ZlibStreamFlavor.GZIP)
{
_gzipHeaderByteCount = _ReadAndValidateGzipHeader();
_gzipHeaderByteCount = await ReadAndValidateGzipHeaderAsync(cancellationToken);
// workitem 8501: handle edge case (decompress empty stream)
if (_gzipHeaderByteCount == 0)
@@ -506,7 +499,7 @@ namespace SharpCompress.Compressors.Deflate
{
return 0;
}
if (nomoreinput && _wantCompress)
if (_nomoreinput && _wantCompress)
{
return 0; // workitem 8557
}
@@ -527,7 +520,7 @@ namespace SharpCompress.Compressors.Deflate
throw new ArgumentOutOfRangeException(nameof(count));
}
int rc = 0;
var rc = 0;
// set up the output of the deflate/inflate codec:
_z.OutputBuffer = buffer;
@@ -542,14 +535,14 @@ namespace SharpCompress.Compressors.Deflate
do
{
// need data in _workingBuffer in order to deflate/inflate. Here, we check if we have any.
if ((_z.AvailableBytesIn == 0) && (!nomoreinput))
if ((_z.AvailableBytesIn == 0) && (!_nomoreinput))
{
// No data available, so try to Read data from the captive stream.
_z.NextIn = 0;
_z.AvailableBytesIn = _stream.Read(_workingBuffer, 0, _workingBuffer.Length);
_z.AvailableBytesIn = await _stream.ReadAsync(_workingBuffer, 0, _workingBuffer.Length, cancellationToken);
if (_z.AvailableBytesIn == 0)
{
nomoreinput = true;
_nomoreinput = true;
}
}
@@ -558,23 +551,22 @@ namespace SharpCompress.Compressors.Deflate
? _z.Deflate(_flushMode)
: _z.Inflate(_flushMode);
if (nomoreinput && (rc == ZlibConstants.Z_BUF_ERROR))
if (_nomoreinput && (rc == ZlibConstants.Z_BUF_ERROR))
{
return 0;
}
if (rc != ZlibConstants.Z_OK && rc != ZlibConstants.Z_STREAM_END)
{
throw new ZlibException(String.Format("{0}flating: rc={1} msg={2}", (_wantCompress ? "de" : "in"),
rc, _z.Message));
throw new ZlibException($"{(_wantCompress ? "de" : "in")}flating: rc={rc} msg={_z.Message}");
}
if ((nomoreinput || rc == ZlibConstants.Z_STREAM_END) && (_z.AvailableBytesOut == count))
if ((_nomoreinput || rc == ZlibConstants.Z_STREAM_END) && (_z.AvailableBytesOut == count))
{
break; // nothing more to read
}
} //while (_z.AvailableBytesOut == count && rc == ZlibConstants.Z_OK);
while (_z.AvailableBytesOut > 0 && !nomoreinput && rc == ZlibConstants.Z_OK);
while (_z.AvailableBytesOut > 0 && !_nomoreinput && rc == ZlibConstants.Z_OK);
// workitem 8557
// is there more room in output?
@@ -586,7 +578,7 @@ namespace SharpCompress.Compressors.Deflate
}
// are we completely done reading?
if (nomoreinput)
if (_nomoreinput)
{
// and in compression?
if (_wantCompress)
@@ -597,7 +589,7 @@ namespace SharpCompress.Compressors.Deflate
if (rc != ZlibConstants.Z_OK && rc != ZlibConstants.Z_STREAM_END)
{
throw new ZlibException(String.Format("Deflating: rc={0} msg={1}", rc, _z.Message));
throw new ZlibException($"Deflating: rc={rc} msg={_z.Message}");
}
}
}
@@ -606,10 +598,7 @@ namespace SharpCompress.Compressors.Deflate
rc = (count - _z.AvailableBytesOut);
// calculate CRC after reading
if (crc != null)
{
crc.SlurpBlock(buffer, offset, rc);
}
crc?.SlurpBlock(buffer, offset, rc);
return rc;
}

View File

@@ -28,10 +28,13 @@
using System;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Compressors.Deflate
{
public class ZlibStream : Stream
public class ZlibStream : AsyncStream
{
private readonly ZlibBaseStream _baseStream;
private bool _disposed;
@@ -204,35 +207,25 @@ namespace SharpCompress.Compressors.Deflate
/// <remarks>
/// This may or may not result in a <c>Close()</c> call on the captive stream.
/// </remarks>
protected override void Dispose(bool disposing)
public override async ValueTask DisposeAsync()
{
try
if (!_disposed)
{
if (!_disposed)
{
if (disposing)
{
_baseStream?.Dispose();
}
_disposed = true;
}
}
finally
{
base.Dispose(disposing);
await _baseStream.DisposeAsync();
_disposed = true;
}
}
/// <summary>
/// Flush the stream.
/// </summary>
public override void Flush()
public override Task FlushAsync(CancellationToken cancellationToken)
{
if (_disposed)
{
throw new ObjectDisposedException("ZlibStream");
}
_baseStream.Flush();
return _baseStream.FlushAsync(cancellationToken);
}
/// <summary>
@@ -261,22 +254,13 @@ namespace SharpCompress.Compressors.Deflate
/// <param name="buffer">The buffer into which the read data should be placed.</param>
/// <param name="offset">the offset within that data array to put the first byte read.</param>
/// <param name="count">the number of bytes to read.</param>
public override int Read(byte[] buffer, int offset, int count)
public override async Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
if (_disposed)
{
throw new ObjectDisposedException("ZlibStream");
}
return _baseStream.Read(buffer, offset, count);
}
public override int ReadByte()
{
if (_disposed)
{
throw new ObjectDisposedException("ZlibStream");
}
return _baseStream.ReadByte();
return await _baseStream.ReadAsync(buffer, offset, count, cancellationToken);
}
/// <summary>
@@ -321,24 +305,14 @@ namespace SharpCompress.Compressors.Deflate
/// <param name="buffer">The buffer holding data to write to the stream.</param>
/// <param name="offset">the offset within that data array to find the first byte to write.</param>
/// <param name="count">the number of bytes to write.</param>
public override void Write(byte[] buffer, int offset, int count)
public override async Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
if (_disposed)
{
throw new ObjectDisposedException("ZlibStream");
}
_baseStream.Write(buffer, offset, count);
await _baseStream.WriteAsync(buffer, offset, count, cancellationToken);
}
public override void WriteByte(byte value)
{
if (_disposed)
{
throw new ObjectDisposedException("ZlibStream");
}
_baseStream.WriteByte(value);
}
#endregion System.IO.Stream methods
}
}

View File

@@ -1,5 +1,7 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.SevenZip;
using SharpCompress.Compressors.LZMA.Utilites;
using SharpCompress.IO;
@@ -91,8 +93,9 @@ namespace SharpCompress.Compressors.LZMA
}
}
private static Stream CreateDecoderStream(Stream[] packStreams, long[] packSizes, Stream[] outStreams,
CFolder folderInfo, int coderIndex, IPasswordProvider pass)
private static async ValueTask<Stream> CreateDecoderStream(Stream[] packStreams, long[] packSizes, Stream[] outStreams,
CFolder folderInfo, int coderIndex, IPasswordProvider pass,
CancellationToken cancellationToken)
{
var coderInfo = folderInfo._coders[coderIndex];
if (coderInfo._numOutStreams != 1)
@@ -127,8 +130,8 @@ namespace SharpCompress.Compressors.LZMA
}
int otherCoderIndex = FindCoderIndexForOutStreamIndex(folderInfo, pairedOutIndex);
inStreams[i] = CreateDecoderStream(packStreams, packSizes, outStreams, folderInfo, otherCoderIndex,
pass);
inStreams[i] = await CreateDecoderStream(packStreams, packSizes, outStreams, folderInfo, otherCoderIndex,
pass, cancellationToken);
//inStreamSizes[i] = folderInfo.UnpackSizes[pairedOutIndex];
@@ -154,11 +157,11 @@ namespace SharpCompress.Compressors.LZMA
}
long unpackSize = folderInfo._unpackSizes[outStreamId];
return DecoderRegistry.CreateDecoderStream(coderInfo._methodId, inStreams, coderInfo._props, pass, unpackSize);
return await DecoderRegistry.CreateDecoderStream(coderInfo._methodId, inStreams, coderInfo._props, pass, unpackSize, cancellationToken);
}
internal static Stream CreateDecoderStream(Stream inStream, long startPos, long[] packSizes, CFolder folderInfo,
IPasswordProvider pass)
internal static async ValueTask<Stream> CreateDecoderStream(Stream inStream, long startPos, long[] packSizes, CFolder folderInfo,
IPasswordProvider pass, CancellationToken cancellationToken)
{
if (!folderInfo.CheckStructure())
{
@@ -176,7 +179,7 @@ namespace SharpCompress.Compressors.LZMA
int primaryCoderIndex, primaryOutStreamIndex;
FindPrimaryOutStreamIndex(folderInfo, out primaryCoderIndex, out primaryOutStreamIndex);
return CreateDecoderStream(inStreams, packSizes, outStreams, folderInfo, primaryCoderIndex, pass);
return await CreateDecoderStream(inStreams, packSizes, outStreams, folderInfo, primaryCoderIndex, pass, cancellationToken);
}
}
}

View File

@@ -1,5 +1,7 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Compressors.LZMA
{
@@ -59,8 +61,8 @@ namespace SharpCompress.Compressors.LZMA
/// <param name="progress">
/// callback progress reference.
/// </param>
void Code(Stream inStream, Stream outStream,
Int64 inSize, Int64 outSize, ICodeProgress progress);
ValueTask CodeAsync(Stream inStream, Stream outStream,
Int64 inSize, Int64 outSize, ICodeProgress progress, CancellationToken cancellationToken);
}
/*

View File

@@ -1,6 +1,9 @@
using System;
using System.Buffers;
using System.Buffers.Binary;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Crypto;
using SharpCompress.IO;
@@ -14,61 +17,70 @@ namespace SharpCompress.Compressors.LZMA
/// <summary>
/// Stream supporting the LZIP format, as documented at http://www.nongnu.org/lzip/manual/lzip_manual.html
/// </summary>
public sealed class LZipStream : Stream
public sealed class LZipStream : AsyncStream
{
private readonly Stream _stream;
private readonly CountingWritableSubStream? _countingWritableSubStream;
#nullable disable
private Stream _stream;
#nullable enable
private CountingWritableSubStream? _countingWritableSubStream;
private bool _disposed;
private bool _finished;
private long _writeCount;
public LZipStream(Stream stream, CompressionMode mode)
private LZipStream()
{
Mode = mode;
}
public static async ValueTask<LZipStream> CreateAsync(Stream stream, CompressionMode mode)
{
var lzip = new LZipStream();
lzip.Mode = mode;
if (mode == CompressionMode.Decompress)
{
int dSize = ValidateAndReadSize(stream);
int dSize = await ValidateAndReadSize(stream);
if (dSize == 0)
{
throw new IOException("Not an LZip stream");
}
byte[] properties = GetProperties(dSize);
_stream = new LzmaStream(properties, stream);
lzip._stream = await LzmaStream.CreateAsync(properties, stream);
}
else
{
//default
int dSize = 104 * 1024;
WriteHeaderSize(stream);
await WriteHeaderSizeAsync(stream);
_countingWritableSubStream = new CountingWritableSubStream(stream);
_stream = new Crc32Stream(new LzmaStream(new LzmaEncoderProperties(true, dSize), false, _countingWritableSubStream));
lzip._countingWritableSubStream = new CountingWritableSubStream(stream);
lzip._stream = new Crc32Stream(new LzmaStream(new LzmaEncoderProperties(true, dSize), false, lzip._countingWritableSubStream));
}
return lzip;
}
public void Finish()
public async ValueTask FinishAsync()
{
if (!_finished)
{
if (Mode == CompressionMode.Compress)
{
var crc32Stream = (Crc32Stream)_stream;
crc32Stream.WrappedStream.Dispose();
crc32Stream.Dispose();
await crc32Stream.WrappedStream.DisposeAsync();
await crc32Stream.DisposeAsync();
var compressedCount = _countingWritableSubStream!.Count;
Span<byte> intBuf = stackalloc byte[8];
byte[] intBuf = new byte[8];
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, crc32Stream.Crc);
_countingWritableSubStream.Write(intBuf.Slice(0, 4));
await _countingWritableSubStream.WriteAsync(intBuf, 0, 4);
BinaryPrimitives.WriteInt64LittleEndian(intBuf, _writeCount);
_countingWritableSubStream.Write(intBuf);
await _countingWritableSubStream.WriteAsync(intBuf, 0, 8);
//total with headers
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, compressedCount + 6 + 20);
_countingWritableSubStream.Write(intBuf);
await _countingWritableSubStream.WriteAsync(intBuf, 0, 8);
}
_finished = true;
}
@@ -76,21 +88,18 @@ namespace SharpCompress.Compressors.LZMA
#region Stream methods
protected override void Dispose(bool disposing)
public override async ValueTask DisposeAsync()
{
if (_disposed)
{
return;
}
_disposed = true;
if (disposing)
{
Finish();
_stream.Dispose();
}
await FinishAsync();
await _stream.DisposeAsync();
}
public CompressionMode Mode { get; }
public CompressionMode Mode { get; private set; }
public override bool CanRead => Mode == CompressionMode.Decompress;
@@ -98,54 +107,38 @@ namespace SharpCompress.Compressors.LZMA
public override bool CanWrite => Mode == CompressionMode.Compress;
public override void Flush()
public override Task FlushAsync(CancellationToken cancellationToken)
{
_stream.Flush();
return _stream.FlushAsync(cancellationToken);
}
// TODO: Both Length and Position are sometimes feasible, but would require
// reading the output length when we initialize.
public override long Length => throw new NotImplementedException();
public override long Length => throw new NotSupportedException();
public override long Position { get => throw new NotImplementedException(); set => throw new NotImplementedException(); }
public override long Position { get => throw new NotImplementedException(); set => throw new NotSupportedException(); }
public override int Read(byte[] buffer, int offset, int count) => _stream.Read(buffer, offset, count);
public override int ReadByte() => _stream.ReadByte();
public override ValueTask<int> ReadAsync(Memory<byte> buffer, CancellationToken cancellationToken = new CancellationToken())
{
return _stream.ReadAsync(buffer, cancellationToken);
}
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
public override void SetLength(long value) => throw new NotImplementedException();
public override void SetLength(long value) => throw new NotSupportedException();
#if !NET461 && !NETSTANDARD2_0
public override int Read(Span<byte> buffer)
public override async ValueTask WriteAsync(ReadOnlyMemory<byte> buffer, CancellationToken cancellationToken = new CancellationToken())
{
return _stream.Read(buffer);
}
public override void Write(ReadOnlySpan<byte> buffer)
{
_stream.Write(buffer);
await _stream.WriteAsync(buffer, cancellationToken);
_writeCount += buffer.Length;
}
#endif
public override void Write(byte[] buffer, int offset, int count)
public override async Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
_stream.Write(buffer, offset, count);
await _stream.WriteAsync(buffer, offset, count, cancellationToken);
_writeCount += count;
}
public override void WriteByte(byte value)
{
_stream.WriteByte(value);
++_writeCount;
}
#endregion
/// <summary>
@@ -155,14 +148,14 @@ namespace SharpCompress.Compressors.LZMA
/// </summary>
/// <param name="stream">The stream to read from. Must not be null.</param>
/// <returns><c>true</c> if the given stream is an LZip file, <c>false</c> otherwise.</returns>
public static bool IsLZipFile(Stream stream) => ValidateAndReadSize(stream) != 0;
public static async ValueTask<bool> IsLZipFileAsync(Stream stream) => await ValidateAndReadSize(stream) != 0;
/// <summary>
/// Reads the 6-byte header of the stream, and returns 0 if either the header
/// couldn't be read or it isn't a validate LZIP header, or the dictionary
/// size if it *is* a valid LZIP file.
/// </summary>
public static int ValidateAndReadSize(Stream stream)
private static async ValueTask<int> ValidateAndReadSize(Stream stream)
{
if (stream is null)
{
@@ -170,8 +163,9 @@ namespace SharpCompress.Compressors.LZMA
}
// Read the header
Span<byte> header = stackalloc byte[6];
int n = stream.Read(header);
using var buffer = MemoryPool<byte>.Shared.Rent(6);
var header = buffer.Memory.Slice(0,6);
int n = await stream.ReadAsync(header);
// TODO: Handle reading only part of the header?
@@ -180,18 +174,18 @@ namespace SharpCompress.Compressors.LZMA
return 0;
}
if (header[0] != 'L' || header[1] != 'Z' || header[2] != 'I' || header[3] != 'P' || header[4] != 1 /* version 1 */)
if (header.Span[0] != 'L' || header.Span[1] != 'Z' || header.Span[2] != 'I' || header.Span[3] != 'P' || header.Span[4] != 1 /* version 1 */)
{
return 0;
}
int basePower = header[5] & 0x1F;
int subtractionNumerator = (header[5] & 0xE0) >> 5;
int basePower = header.Span[5] & 0x1F;
int subtractionNumerator = (header.Span[5] & 0xE0) >> 5;
return (1 << basePower) - subtractionNumerator * (1 << (basePower - 4));
}
private static readonly byte[] headerBytes = new byte[6] { (byte)'L', (byte)'Z', (byte)'I', (byte)'P', 1, 113 };
public static void WriteHeaderSize(Stream stream)
public static async ValueTask WriteHeaderSizeAsync(Stream stream)
{
if (stream is null)
{
@@ -199,7 +193,7 @@ namespace SharpCompress.Compressors.LZMA
}
// hard coding the dictionary size encoding
stream.Write(headerBytes, 0, 6);
await stream.WriteAsync(headerBytes, 0, 6);
}
/// <summary>

View File

@@ -1,7 +1,7 @@
#nullable disable
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.LZMA.LZ;
using SharpCompress.Compressors.LZMA.RangeCoder;
@@ -11,11 +11,11 @@ namespace SharpCompress.Compressors.LZMA
{
private class LenDecoder
{
private BitDecoder _choice = new BitDecoder();
private BitDecoder _choice2 = new BitDecoder();
private BitDecoder _choice = new();
private BitDecoder _choice2 = new();
private readonly BitTreeDecoder[] _lowCoder = new BitTreeDecoder[Base.K_NUM_POS_STATES_MAX];
private readonly BitTreeDecoder[] _midCoder = new BitTreeDecoder[Base.K_NUM_POS_STATES_MAX];
private BitTreeDecoder _highCoder = new BitTreeDecoder(Base.K_NUM_HIGH_LEN_BITS);
private BitTreeDecoder _highCoder = new(Base.K_NUM_HIGH_LEN_BITS);
private uint _numPosStates;
public void Create(uint numPosStates)
@@ -40,21 +40,21 @@ namespace SharpCompress.Compressors.LZMA
_highCoder.Init();
}
public uint Decode(RangeCoder.Decoder rangeDecoder, uint posState)
public async ValueTask<uint> DecodeAsync(RangeCoder.Decoder rangeDecoder, uint posState, CancellationToken cancellationToken)
{
if (_choice.Decode(rangeDecoder) == 0)
if (await _choice.DecodeAsync(rangeDecoder, cancellationToken) == 0)
{
return _lowCoder[posState].Decode(rangeDecoder);
return await _lowCoder[posState].DecodeAsync(rangeDecoder, cancellationToken);
}
uint symbol = Base.K_NUM_LOW_LEN_SYMBOLS;
if (_choice2.Decode(rangeDecoder) == 0)
if (await _choice2.DecodeAsync(rangeDecoder, cancellationToken) == 0)
{
symbol += _midCoder[posState].Decode(rangeDecoder);
symbol += await _midCoder[posState].DecodeAsync(rangeDecoder, cancellationToken);
}
else
{
symbol += Base.K_NUM_MID_LEN_SYMBOLS;
symbol += _highCoder.Decode(rangeDecoder);
symbol += await _highCoder.DecodeAsync(rangeDecoder, cancellationToken);
}
return symbol;
}
@@ -79,31 +79,31 @@ namespace SharpCompress.Compressors.LZMA
}
}
public byte DecodeNormal(RangeCoder.Decoder rangeDecoder)
public async ValueTask<byte> DecodeNormalAsync(RangeCoder.Decoder rangeDecoder, CancellationToken cancellationToken)
{
uint symbol = 1;
do
{
symbol = (symbol << 1) | _decoders[symbol].Decode(rangeDecoder);
symbol = (symbol << 1) | await _decoders[symbol].DecodeAsync(rangeDecoder, cancellationToken);
}
while (symbol < 0x100);
return (byte)symbol;
}
public byte DecodeWithMatchByte(RangeCoder.Decoder rangeDecoder, byte matchByte)
public async ValueTask<byte> DecodeWithMatchByteAsync(RangeCoder.Decoder rangeDecoder, byte matchByte, CancellationToken cancellationToken)
{
uint symbol = 1;
do
{
uint matchBit = (uint)(matchByte >> 7) & 1;
matchByte <<= 1;
uint bit = _decoders[((1 + matchBit) << 8) + symbol].Decode(rangeDecoder);
uint bit = await _decoders[((1 + matchBit) << 8) + symbol].DecodeAsync(rangeDecoder, cancellationToken);
symbol = (symbol << 1) | bit;
if (matchBit != bit)
{
while (symbol < 0x100)
{
symbol = (symbol << 1) | _decoders[symbol].Decode(rangeDecoder);
symbol = (symbol << 1) | await _decoders[symbol].DecodeAsync(rangeDecoder, cancellationToken);
}
break;
}
@@ -113,12 +113,12 @@ namespace SharpCompress.Compressors.LZMA
}
}
private Decoder2[] _coders;
private int _numPrevBits;
private int _numPosBits;
private uint _posMask;
public void Create(int numPosBits, int numPrevBits)
private readonly Decoder2[]_coders;
private readonly int _numPrevBits;
private readonly int _numPosBits;
private readonly uint _posMask;
public LiteralDecoder(int numPosBits, int numPrevBits)
{
if (_coders != null && _numPrevBits == numPrevBits &&
_numPosBits == numPosBits)
@@ -150,18 +150,18 @@ namespace SharpCompress.Compressors.LZMA
return ((pos & _posMask) << _numPrevBits) + (uint)(prevByte >> (8 - _numPrevBits));
}
public byte DecodeNormal(RangeCoder.Decoder rangeDecoder, uint pos, byte prevByte)
public ValueTask<byte> DecodeNormalAsync(RangeCoder.Decoder rangeDecoder, uint pos, byte prevByte, CancellationToken cancellationToken)
{
return _coders[GetState(pos, prevByte)].DecodeNormal(rangeDecoder);
return _coders[GetState(pos, prevByte)].DecodeNormalAsync(rangeDecoder, cancellationToken);
}
public byte DecodeWithMatchByte(RangeCoder.Decoder rangeDecoder, uint pos, byte prevByte, byte matchByte)
public ValueTask<byte> DecodeWithMatchByteAsync(RangeCoder.Decoder rangeDecoder, uint pos, byte prevByte, byte matchByte, CancellationToken cancellationToken)
{
return _coders[GetState(pos, prevByte)].DecodeWithMatchByte(rangeDecoder, matchByte);
return _coders[GetState(pos, prevByte)].DecodeWithMatchByteAsync(rangeDecoder, matchByte, cancellationToken);
}
}
private OutWindow _outWindow;
private OutWindow? _outWindow;
private readonly BitDecoder[] _isMatchDecoders = new BitDecoder[Base.K_NUM_STATES << Base.K_NUM_POS_STATES_BITS_MAX];
private readonly BitDecoder[] _isRepDecoders = new BitDecoder[Base.K_NUM_STATES];
@@ -173,18 +173,18 @@ namespace SharpCompress.Compressors.LZMA
private readonly BitTreeDecoder[] _posSlotDecoder = new BitTreeDecoder[Base.K_NUM_LEN_TO_POS_STATES];
private readonly BitDecoder[] _posDecoders = new BitDecoder[Base.K_NUM_FULL_DISTANCES - Base.K_END_POS_MODEL_INDEX];
private BitTreeDecoder _posAlignDecoder = new BitTreeDecoder(Base.K_NUM_ALIGN_BITS);
private BitTreeDecoder _posAlignDecoder = new(Base.K_NUM_ALIGN_BITS);
private readonly LenDecoder _lenDecoder = new LenDecoder();
private readonly LenDecoder _repLenDecoder = new LenDecoder();
private readonly LenDecoder _lenDecoder = new();
private readonly LenDecoder _repLenDecoder = new();
private readonly LiteralDecoder _literalDecoder = new LiteralDecoder();
private LiteralDecoder? _literalDecoder;
private int _dictionarySize;
private uint _posStateMask;
private Base.State _state = new Base.State();
private Base.State _state = new();
private uint _rep0, _rep1, _rep2, _rep3;
public Decoder()
@@ -196,15 +196,16 @@ namespace SharpCompress.Compressors.LZMA
}
}
private void CreateDictionary()
private OutWindow CreateDictionary()
{
if (_dictionarySize < 0)
{
throw new InvalidParamException();
}
_outWindow = new OutWindow();
var outWindow = new OutWindow();
int blockSize = Math.Max(_dictionarySize, (1 << 12));
_outWindow.Create(blockSize);
outWindow.Create(blockSize);
return outWindow;
}
private void SetLiteralProperties(int lp, int lc)
@@ -217,7 +218,7 @@ namespace SharpCompress.Compressors.LZMA
{
throw new InvalidParamException();
}
_literalDecoder.Create(lp, lc);
_literalDecoder = new(lp, lc);
}
private void SetPosBitsProperties(int pb)
@@ -249,7 +250,7 @@ namespace SharpCompress.Compressors.LZMA
_isRepG2Decoders[i].Init();
}
_literalDecoder.Init();
_literalDecoder!.Init();
for (i = 0; i < Base.K_NUM_LEN_TO_POS_STATES; i++)
{
_posSlotDecoder[i].Init();
@@ -272,12 +273,12 @@ namespace SharpCompress.Compressors.LZMA
_rep3 = 0;
}
public void Code(Stream inStream, Stream outStream,
Int64 inSize, Int64 outSize, ICodeProgress progress)
public async ValueTask CodeAsync(Stream inStream, Stream outStream,
Int64 inSize, Int64 outSize, ICodeProgress progress, CancellationToken cancellationToken)
{
if (_outWindow is null)
{
CreateDictionary();
_outWindow = CreateDictionary();
}
_outWindow.Init(outStream);
if (outSize > 0)
@@ -290,9 +291,9 @@ namespace SharpCompress.Compressors.LZMA
}
RangeCoder.Decoder rangeDecoder = new RangeCoder.Decoder();
rangeDecoder.Init(inStream);
await rangeDecoder.InitAsync(inStream, cancellationToken);
Code(_dictionarySize, _outWindow, rangeDecoder);
await CodeAsync(_dictionarySize, _outWindow, rangeDecoder, cancellationToken);
_outWindow.ReleaseStream();
rangeDecoder.ReleaseStream();
@@ -308,8 +309,9 @@ namespace SharpCompress.Compressors.LZMA
_outWindow = null;
}
internal bool Code(int dictionarySize, OutWindow outWindow, RangeCoder.Decoder rangeDecoder)
internal async ValueTask<bool> CodeAsync(int dictionarySize, OutWindow outWindow, RangeCoder.Decoder rangeDecoder, CancellationToken cancellationToken)
{
_literalDecoder ??= _literalDecoder.CheckNotNull(nameof(_literalDecoder));
int dictionarySizeCheck = Math.Max(dictionarySize, 1);
outWindow.CopyPending();
@@ -317,19 +319,19 @@ namespace SharpCompress.Compressors.LZMA
while (outWindow.HasSpace)
{
uint posState = (uint)outWindow._total & _posStateMask;
if (_isMatchDecoders[(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState].Decode(rangeDecoder) == 0)
if (await _isMatchDecoders[(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState].DecodeAsync(rangeDecoder, cancellationToken) == 0)
{
byte b;
byte prevByte = outWindow.GetByte(0);
if (!_state.IsCharState())
{
b = _literalDecoder.DecodeWithMatchByte(rangeDecoder,
b = await _literalDecoder.DecodeWithMatchByteAsync(rangeDecoder,
(uint)outWindow._total, prevByte,
outWindow.GetByte((int)_rep0));
outWindow.GetByte((int)_rep0), cancellationToken);
}
else
{
b = _literalDecoder.DecodeNormal(rangeDecoder, (uint)outWindow._total, prevByte);
b = await _literalDecoder.DecodeNormalAsync(rangeDecoder, (uint)outWindow._total, prevByte, cancellationToken);
}
outWindow.PutByte(b);
_state.UpdateChar();
@@ -337,13 +339,13 @@ namespace SharpCompress.Compressors.LZMA
else
{
uint len;
if (_isRepDecoders[_state._index].Decode(rangeDecoder) == 1)
if (await _isRepDecoders[_state._index].DecodeAsync(rangeDecoder, cancellationToken) == 1)
{
if (_isRepG0Decoders[_state._index].Decode(rangeDecoder) == 0)
if (await _isRepG0Decoders[_state._index].DecodeAsync(rangeDecoder, cancellationToken) == 0)
{
if (
_isRep0LongDecoders[(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState].Decode(
rangeDecoder) == 0)
await _isRep0LongDecoders[(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState].DecodeAsync(
rangeDecoder, cancellationToken) == 0)
{
_state.UpdateShortRep();
outWindow.PutByte(outWindow.GetByte((int)_rep0));
@@ -353,13 +355,13 @@ namespace SharpCompress.Compressors.LZMA
else
{
UInt32 distance;
if (_isRepG1Decoders[_state._index].Decode(rangeDecoder) == 0)
if (await _isRepG1Decoders[_state._index].DecodeAsync(rangeDecoder, cancellationToken) == 0)
{
distance = _rep1;
}
else
{
if (_isRepG2Decoders[_state._index].Decode(rangeDecoder) == 0)
if (await _isRepG2Decoders[_state._index].DecodeAsync(rangeDecoder, cancellationToken) == 0)
{
distance = _rep2;
}
@@ -373,7 +375,7 @@ namespace SharpCompress.Compressors.LZMA
_rep1 = _rep0;
_rep0 = distance;
}
len = _repLenDecoder.Decode(rangeDecoder, posState) + Base.K_MATCH_MIN_LEN;
len = await _repLenDecoder.DecodeAsync(rangeDecoder, posState, cancellationToken) + Base.K_MATCH_MIN_LEN;
_state.UpdateRep();
}
else
@@ -381,23 +383,22 @@ namespace SharpCompress.Compressors.LZMA
_rep3 = _rep2;
_rep2 = _rep1;
_rep1 = _rep0;
len = Base.K_MATCH_MIN_LEN + _lenDecoder.Decode(rangeDecoder, posState);
len = Base.K_MATCH_MIN_LEN + await _lenDecoder.DecodeAsync(rangeDecoder, posState, cancellationToken);
_state.UpdateMatch();
uint posSlot = _posSlotDecoder[Base.GetLenToPosState(len)].Decode(rangeDecoder);
uint posSlot = await _posSlotDecoder[Base.GetLenToPosState(len)].DecodeAsync(rangeDecoder, cancellationToken);
if (posSlot >= Base.K_START_POS_MODEL_INDEX)
{
int numDirectBits = (int)((posSlot >> 1) - 1);
_rep0 = ((2 | (posSlot & 1)) << numDirectBits);
if (posSlot < Base.K_END_POS_MODEL_INDEX)
{
_rep0 += BitTreeDecoder.ReverseDecode(_posDecoders,
_rep0 - posSlot - 1, rangeDecoder, numDirectBits);
_rep0 += await BitTreeDecoder.ReverseDecode(_posDecoders,
_rep0 - posSlot - 1, rangeDecoder, numDirectBits, cancellationToken);
}
else
{
_rep0 += (rangeDecoder.DecodeDirectBits(
numDirectBits - Base.K_NUM_ALIGN_BITS) << Base.K_NUM_ALIGN_BITS);
_rep0 += _posAlignDecoder.ReverseDecode(rangeDecoder);
_rep0 += (await rangeDecoder.DecodeDirectBitsAsync(numDirectBits - Base.K_NUM_ALIGN_BITS, cancellationToken) << Base.K_NUM_ALIGN_BITS);
_rep0 += await _posAlignDecoder.ReverseDecode(rangeDecoder, cancellationToken);
}
}
else
@@ -450,7 +451,7 @@ namespace SharpCompress.Compressors.LZMA
{
if (_outWindow is null)
{
CreateDictionary();
_outWindow = CreateDictionary();
}
_outWindow.Train(stream);
}

View File

@@ -2,6 +2,8 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.LZMA.LZ;
using SharpCompress.Compressors.LZMA.RangeCoder;
@@ -61,7 +63,7 @@ namespace SharpCompress.Compressors.LZMA
return (UInt32)(G_FAST_POS[pos >> 26] + 52);
}
private Base.State _state = new Base.State();
private Base.State _state = new();
private Byte _previousByte;
private readonly UInt32[] _repDistances = new UInt32[Base.K_NUM_REP_DISTANCES];
@@ -97,18 +99,18 @@ namespace SharpCompress.Compressors.LZMA
}
}
public void Encode(RangeCoder.Encoder rangeEncoder, byte symbol)
public async ValueTask EncodeAsync(RangeCoder.Encoder rangeEncoder, byte symbol)
{
uint context = 1;
for (int i = 7; i >= 0; i--)
{
uint bit = (uint)((symbol >> i) & 1);
_encoders[context].Encode(rangeEncoder, bit);
await _encoders[context].EncodeAsync(rangeEncoder, bit);
context = (context << 1) | bit;
}
}
public void EncodeMatched(RangeCoder.Encoder rangeEncoder, byte matchByte, byte symbol)
public async ValueTask EncodeMatchedAsync(RangeCoder.Encoder rangeEncoder, byte matchByte, byte symbol)
{
uint context = 1;
bool same = true;
@@ -122,7 +124,7 @@ namespace SharpCompress.Compressors.LZMA
state += ((1 + matchBit) << 8);
same = (matchBit == bit);
}
_encoders[state].Encode(rangeEncoder, bit);
await _encoders[state].EncodeAsync(rangeEncoder, bit);
context = (context << 1) | bit;
}
}
@@ -196,11 +198,11 @@ namespace SharpCompress.Compressors.LZMA
private class LenEncoder
{
private BitEncoder _choice = new BitEncoder();
private BitEncoder _choice2 = new BitEncoder();
private BitEncoder _choice = new();
private BitEncoder _choice2 = new();
private readonly BitTreeEncoder[] _lowCoder = new BitTreeEncoder[Base.K_NUM_POS_STATES_ENCODING_MAX];
private readonly BitTreeEncoder[] _midCoder = new BitTreeEncoder[Base.K_NUM_POS_STATES_ENCODING_MAX];
private BitTreeEncoder _highCoder = new BitTreeEncoder(Base.K_NUM_HIGH_LEN_BITS);
private BitTreeEncoder _highCoder = new(Base.K_NUM_HIGH_LEN_BITS);
public LenEncoder()
{
@@ -223,26 +225,26 @@ namespace SharpCompress.Compressors.LZMA
_highCoder.Init();
}
public void Encode(RangeCoder.Encoder rangeEncoder, UInt32 symbol, UInt32 posState)
public async ValueTask EncodeAsync(RangeCoder.Encoder rangeEncoder, UInt32 symbol, UInt32 posState)
{
if (symbol < Base.K_NUM_LOW_LEN_SYMBOLS)
{
_choice.Encode(rangeEncoder, 0);
_lowCoder[posState].Encode(rangeEncoder, symbol);
await _choice.EncodeAsync(rangeEncoder, 0);
await _lowCoder[posState].EncodeAsync(rangeEncoder, symbol);
}
else
{
symbol -= Base.K_NUM_LOW_LEN_SYMBOLS;
_choice.Encode(rangeEncoder, 1);
await _choice.EncodeAsync(rangeEncoder, 1);
if (symbol < Base.K_NUM_MID_LEN_SYMBOLS)
{
_choice2.Encode(rangeEncoder, 0);
_midCoder[posState].Encode(rangeEncoder, symbol);
await _choice2.EncodeAsync(rangeEncoder, 0);
await _midCoder[posState].EncodeAsync(rangeEncoder, symbol);
}
else
{
_choice2.Encode(rangeEncoder, 1);
_highCoder.Encode(rangeEncoder, symbol - Base.K_NUM_MID_LEN_SYMBOLS);
await _choice2.EncodeAsync(rangeEncoder, 1);
await _highCoder.EncodeAsync(rangeEncoder, symbol - Base.K_NUM_MID_LEN_SYMBOLS);
}
}
}
@@ -309,9 +311,9 @@ namespace SharpCompress.Compressors.LZMA
}
}
public new void Encode(RangeCoder.Encoder rangeEncoder, UInt32 symbol, UInt32 posState)
public new async ValueTask EncodeAsync(RangeCoder.Encoder rangeEncoder, UInt32 symbol, UInt32 posState)
{
base.Encode(rangeEncoder, symbol, posState);
await base.EncodeAsync(rangeEncoder, symbol, posState);
if (--_counters[posState] == 0)
{
UpdateTable(posState);
@@ -361,7 +363,7 @@ namespace SharpCompress.Compressors.LZMA
private readonly Optimal[] _optimum = new Optimal[K_NUM_OPTS];
private BinTree _matchFinder;
private readonly RangeCoder.Encoder _rangeEncoder = new RangeCoder.Encoder();
private readonly RangeCoder.Encoder _rangeEncoder = new();
private readonly BitEncoder[] _isMatch =
new BitEncoder[Base.K_NUM_STATES << Base.K_NUM_POS_STATES_BITS_MAX];
@@ -379,12 +381,12 @@ namespace SharpCompress.Compressors.LZMA
private readonly BitEncoder[] _posEncoders =
new BitEncoder[Base.K_NUM_FULL_DISTANCES - Base.K_END_POS_MODEL_INDEX];
private BitTreeEncoder _posAlignEncoder = new BitTreeEncoder(Base.K_NUM_ALIGN_BITS);
private BitTreeEncoder _posAlignEncoder = new(Base.K_NUM_ALIGN_BITS);
private readonly LenPriceTableEncoder _lenEncoder = new LenPriceTableEncoder();
private readonly LenPriceTableEncoder _repMatchLenEncoder = new LenPriceTableEncoder();
private readonly LenPriceTableEncoder _lenEncoder = new();
private readonly LenPriceTableEncoder _repMatchLenEncoder = new();
private readonly LiteralEncoder _literalEncoder = new LiteralEncoder();
private readonly LiteralEncoder _literalEncoder = new();
private readonly UInt32[] _matchDistances = new UInt32[Base.K_MATCH_MAX_LEN * 2 + 2];
@@ -1189,40 +1191,40 @@ namespace SharpCompress.Compressors.LZMA
return (smallDist < ((UInt32)(1) << (32 - kDif)) && bigDist >= (smallDist << kDif));
}
private void WriteEndMarker(UInt32 posState)
private async ValueTask WriteEndMarkerAsync(UInt32 posState)
{
if (!_writeEndMark)
{
return;
}
_isMatch[(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState].Encode(_rangeEncoder, 1);
_isRep[_state._index].Encode(_rangeEncoder, 0);
await _isMatch[(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState].EncodeAsync(_rangeEncoder, 1);
await _isRep[_state._index].EncodeAsync(_rangeEncoder, 0);
_state.UpdateMatch();
UInt32 len = Base.K_MATCH_MIN_LEN;
_lenEncoder.Encode(_rangeEncoder, len - Base.K_MATCH_MIN_LEN, posState);
await _lenEncoder.EncodeAsync(_rangeEncoder, len - Base.K_MATCH_MIN_LEN, posState);
UInt32 posSlot = (1 << Base.K_NUM_POS_SLOT_BITS) - 1;
UInt32 lenToPosState = Base.GetLenToPosState(len);
_posSlotEncoder[lenToPosState].Encode(_rangeEncoder, posSlot);
await _posSlotEncoder[lenToPosState].EncodeAsync(_rangeEncoder, posSlot);
int footerBits = 30;
UInt32 posReduced = (((UInt32)1) << footerBits) - 1;
_rangeEncoder.EncodeDirectBits(posReduced >> Base.K_NUM_ALIGN_BITS, footerBits - Base.K_NUM_ALIGN_BITS);
_posAlignEncoder.ReverseEncode(_rangeEncoder, posReduced & Base.K_ALIGN_MASK);
await _rangeEncoder.EncodeDirectBits(posReduced >> Base.K_NUM_ALIGN_BITS, footerBits - Base.K_NUM_ALIGN_BITS);
await _posAlignEncoder.ReverseEncodeAsync(_rangeEncoder, posReduced & Base.K_ALIGN_MASK);
}
private void Flush(UInt32 nowPos)
private async ValueTask FlushAsync(UInt32 nowPos)
{
ReleaseMfStream();
WriteEndMarker(nowPos & _posStateMask);
_rangeEncoder.FlushData();
_rangeEncoder.FlushStream();
await WriteEndMarkerAsync(nowPos & _posStateMask);
await _rangeEncoder.FlushData();
await _rangeEncoder.FlushAsync();
}
public void CodeOneBlock(out Int64 inSize, out Int64 outSize, out bool finished)
public async ValueTask<(Int64, Int64, bool)> CodeOneBlockAsync()
{
inSize = 0;
outSize = 0;
finished = true;
long inSize = 0;
long outSize = 0;
var finished = true;
if (_inStream != null)
{
@@ -1233,7 +1235,7 @@ namespace SharpCompress.Compressors.LZMA
if (_finished)
{
return;
return (inSize, outSize, finished);
}
_finished = true;
@@ -1254,20 +1256,20 @@ namespace SharpCompress.Compressors.LZMA
if (_processingMode && _matchFinder.IsDataStarved)
{
_finished = false;
return;
return (inSize, outSize, finished);
}
if (_matchFinder.GetNumAvailableBytes() == 0)
{
Flush((UInt32)_nowPos64);
return;
await FlushAsync((UInt32)_nowPos64);
return (inSize, outSize, finished);
}
UInt32 len, numDistancePairs; // it's not used
ReadMatchDistances(out len, out numDistancePairs);
UInt32 posState = (UInt32)(_nowPos64) & _posStateMask;
_isMatch[(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState].Encode(_rangeEncoder, 0);
await _isMatch[(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState].EncodeAsync(_rangeEncoder, 0);
_state.UpdateChar();
Byte curByte = _matchFinder.GetIndexByte((Int32)(0 - _additionalOffset));
_literalEncoder.GetSubCoder((UInt32)(_nowPos64), _previousByte).Encode(_rangeEncoder, curByte);
await _literalEncoder.GetSubCoder((UInt32)(_nowPos64), _previousByte).EncodeAsync(_rangeEncoder, curByte);
_previousByte = curByte;
_additionalOffset--;
_nowPos64++;
@@ -1275,19 +1277,19 @@ namespace SharpCompress.Compressors.LZMA
if (_processingMode && _matchFinder.IsDataStarved)
{
_finished = false;
return;
return (inSize, outSize, finished);
}
if (_matchFinder.GetNumAvailableBytes() == 0)
{
Flush((UInt32)_nowPos64);
return;
await FlushAsync((UInt32)_nowPos64);
return (inSize, outSize, finished);
}
while (true)
{
if (_processingMode && _matchFinder.IsDataStarved)
{
_finished = false;
return;
return (inSize, outSize, finished);
}
UInt32 pos;
@@ -1297,51 +1299,51 @@ namespace SharpCompress.Compressors.LZMA
UInt32 complexState = (_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState;
if (len == 1 && pos == 0xFFFFFFFF)
{
_isMatch[complexState].Encode(_rangeEncoder, 0);
await _isMatch[complexState].EncodeAsync(_rangeEncoder, 0);
Byte curByte = _matchFinder.GetIndexByte((Int32)(0 - _additionalOffset));
LiteralEncoder.Encoder2 subCoder = _literalEncoder.GetSubCoder((UInt32)_nowPos64, _previousByte);
if (!_state.IsCharState())
{
Byte matchByte =
_matchFinder.GetIndexByte((Int32)(0 - _repDistances[0] - 1 - _additionalOffset));
subCoder.EncodeMatched(_rangeEncoder, matchByte, curByte);
await subCoder.EncodeMatchedAsync(_rangeEncoder, matchByte, curByte);
}
else
{
subCoder.Encode(_rangeEncoder, curByte);
await subCoder.EncodeAsync(_rangeEncoder, curByte);
}
_previousByte = curByte;
_state.UpdateChar();
}
else
{
_isMatch[complexState].Encode(_rangeEncoder, 1);
await _isMatch[complexState].EncodeAsync(_rangeEncoder, 1);
if (pos < Base.K_NUM_REP_DISTANCES)
{
_isRep[_state._index].Encode(_rangeEncoder, 1);
await _isRep[_state._index].EncodeAsync(_rangeEncoder, 1);
if (pos == 0)
{
_isRepG0[_state._index].Encode(_rangeEncoder, 0);
await _isRepG0[_state._index].EncodeAsync(_rangeEncoder, 0);
if (len == 1)
{
_isRep0Long[complexState].Encode(_rangeEncoder, 0);
await _isRep0Long[complexState].EncodeAsync(_rangeEncoder, 0);
}
else
{
_isRep0Long[complexState].Encode(_rangeEncoder, 1);
await _isRep0Long[complexState].EncodeAsync(_rangeEncoder, 1);
}
}
else
{
_isRepG0[_state._index].Encode(_rangeEncoder, 1);
await _isRepG0[_state._index].EncodeAsync(_rangeEncoder, 1);
if (pos == 1)
{
_isRepG1[_state._index].Encode(_rangeEncoder, 0);
await _isRepG1[_state._index].EncodeAsync(_rangeEncoder, 0);
}
else
{
_isRepG1[_state._index].Encode(_rangeEncoder, 1);
_isRepG2[_state._index].Encode(_rangeEncoder, pos - 2);
await _isRepG1[_state._index].EncodeAsync(_rangeEncoder, 1);
await _isRepG2[_state._index].EncodeAsync(_rangeEncoder, pos - 2);
}
}
if (len == 1)
@@ -1350,7 +1352,7 @@ namespace SharpCompress.Compressors.LZMA
}
else
{
_repMatchLenEncoder.Encode(_rangeEncoder, len - Base.K_MATCH_MIN_LEN, posState);
await _repMatchLenEncoder.EncodeAsync(_rangeEncoder, len - Base.K_MATCH_MIN_LEN, posState);
_state.UpdateRep();
}
UInt32 distance = _repDistances[pos];
@@ -1365,13 +1367,13 @@ namespace SharpCompress.Compressors.LZMA
}
else
{
_isRep[_state._index].Encode(_rangeEncoder, 0);
await _isRep[_state._index].EncodeAsync(_rangeEncoder, 0);
_state.UpdateMatch();
_lenEncoder.Encode(_rangeEncoder, len - Base.K_MATCH_MIN_LEN, posState);
await _lenEncoder.EncodeAsync(_rangeEncoder, len - Base.K_MATCH_MIN_LEN, posState);
pos -= Base.K_NUM_REP_DISTANCES;
UInt32 posSlot = GetPosSlot(pos);
UInt32 lenToPosState = Base.GetLenToPosState(len);
_posSlotEncoder[lenToPosState].Encode(_rangeEncoder, posSlot);
await _posSlotEncoder[lenToPosState].EncodeAsync(_rangeEncoder, posSlot);
if (posSlot >= Base.K_START_POS_MODEL_INDEX)
{
@@ -1381,15 +1383,15 @@ namespace SharpCompress.Compressors.LZMA
if (posSlot < Base.K_END_POS_MODEL_INDEX)
{
BitTreeEncoder.ReverseEncode(_posEncoders,
baseVal - posSlot - 1, _rangeEncoder, footerBits,
posReduced);
await BitTreeEncoder.ReverseEncodeAsync(_posEncoders,
baseVal - posSlot - 1, _rangeEncoder, footerBits,
posReduced);
}
else
{
_rangeEncoder.EncodeDirectBits(posReduced >> Base.K_NUM_ALIGN_BITS,
await _rangeEncoder.EncodeDirectBits(posReduced >> Base.K_NUM_ALIGN_BITS,
footerBits - Base.K_NUM_ALIGN_BITS);
_posAlignEncoder.ReverseEncode(_rangeEncoder, posReduced & Base.K_ALIGN_MASK);
await _posAlignEncoder.ReverseEncodeAsync(_rangeEncoder, posReduced & Base.K_ALIGN_MASK);
_alignPriceCount++;
}
}
@@ -1421,19 +1423,19 @@ namespace SharpCompress.Compressors.LZMA
if (_processingMode && _matchFinder.IsDataStarved)
{
_finished = false;
return;
return (inSize, outSize, finished);
}
if (_matchFinder.GetNumAvailableBytes() == 0)
{
Flush((UInt32)_nowPos64);
return;
await FlushAsync((UInt32)_nowPos64);
return (inSize, outSize, finished);
}
if (_nowPos64 - progressPosValuePrev >= (1 << 12))
{
_finished = false;
finished = false;
return;
return (inSize, outSize, finished);
}
}
}
@@ -1488,8 +1490,8 @@ namespace SharpCompress.Compressors.LZMA
_nowPos64 = 0;
}
public void Code(Stream inStream, Stream outStream,
Int64 inSize, Int64 outSize, ICodeProgress progress)
public async ValueTask CodeAsync(Stream inStream, Stream outStream,
Int64 inSize, Int64 outSize, ICodeProgress progress, CancellationToken cancellationToken)
{
_needReleaseMfStream = false;
_processingMode = false;
@@ -1498,10 +1500,7 @@ namespace SharpCompress.Compressors.LZMA
SetStreams(inStream, outStream, inSize, outSize);
while (true)
{
Int64 processedInSize;
Int64 processedOutSize;
bool finished;
CodeOneBlock(out processedInSize, out processedOutSize, out finished);
var (processedInSize, processedOutSize, finished) = await CodeOneBlockAsync();
if (finished)
{
return;
@@ -1518,7 +1517,7 @@ namespace SharpCompress.Compressors.LZMA
}
}
public long Code(Stream inStream, bool final)
public async ValueTask<long> CodeAsync(Stream inStream, bool final)
{
_matchFinder.SetStream(inStream);
_processingMode = !final;
@@ -1526,10 +1525,7 @@ namespace SharpCompress.Compressors.LZMA
{
while (true)
{
Int64 processedInSize;
Int64 processedOutSize;
bool finished;
CodeOneBlock(out processedInSize, out processedOutSize, out finished);
var (processedInSize, processedOutSize, finished) = await CodeOneBlockAsync();
if (finished)
{
return processedInSize;

View File

@@ -1,21 +1,25 @@
#nullable disable
using System;
using System.Buffers;
using System.Buffers.Binary;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.LZMA.LZ;
using SharpCompress.IO;
namespace SharpCompress.Compressors.LZMA
{
public class LzmaStream : Stream
public class LzmaStream : AsyncStream
{
private readonly Stream _inputStream;
private readonly long _inputSize;
private readonly long _outputSize;
private Stream _inputStream;
private long _inputSize;
private long _outputSize;
private readonly int _dictionarySize;
private readonly OutWindow _outWindow = new OutWindow();
private readonly RangeCoder.Decoder _rangeDecoder = new RangeCoder.Decoder();
private int _dictionarySize;
private OutWindow _outWindow = new OutWindow();
private RangeCoder.Decoder _rangeDecoder = new RangeCoder.Decoder();
private Decoder _decoder;
private long _position;
@@ -25,70 +29,60 @@ namespace SharpCompress.Compressors.LZMA
private long _inputPosition;
// LZMA2
private readonly bool _isLzma2;
private bool _isLzma2;
private bool _uncompressedChunk;
private bool _needDictReset = true;
private bool _needProps = true;
private readonly Encoder _encoder;
private bool _isDisposed;
private LzmaStream() {}
public LzmaStream(byte[] properties, Stream inputStream)
: this(properties, inputStream, -1, -1, null, properties.Length < 5)
public static async ValueTask<LzmaStream> CreateAsync(byte[] properties, Stream inputStream, long inputSize = -1, long outputSize = -1,
Stream presetDictionary = null, bool? isLzma2 = null, CancellationToken cancellationToken = default)
{
}
var ls = new LzmaStream();
ls._inputStream = inputStream;
ls._inputSize = inputSize;
ls._outputSize = outputSize;
ls._isLzma2 = isLzma2 ?? properties.Length < 5;
public LzmaStream(byte[] properties, Stream inputStream, long inputSize)
: this(properties, inputStream, inputSize, -1, null, properties.Length < 5)
{
}
public LzmaStream(byte[] properties, Stream inputStream, long inputSize, long outputSize)
: this(properties, inputStream, inputSize, outputSize, null, properties.Length < 5)
{
}
public LzmaStream(byte[] properties, Stream inputStream, long inputSize, long outputSize,
Stream presetDictionary, bool isLzma2)
{
_inputStream = inputStream;
_inputSize = inputSize;
_outputSize = outputSize;
_isLzma2 = isLzma2;
if (!isLzma2)
if (!ls._isLzma2)
{
_dictionarySize = BinaryPrimitives.ReadInt32LittleEndian(properties.AsSpan(1));
_outWindow.Create(_dictionarySize);
ls._dictionarySize = BinaryPrimitives.ReadInt32LittleEndian(properties.AsSpan(1));
ls._outWindow.Create(ls._dictionarySize);
if (presetDictionary != null)
{
_outWindow.Train(presetDictionary);
ls._outWindow.Train(presetDictionary);
}
_rangeDecoder.Init(inputStream);
await ls._rangeDecoder.InitAsync(inputStream, cancellationToken);
_decoder = new Decoder();
_decoder.SetDecoderProperties(properties);
Properties = properties;
ls._decoder = new Decoder();
ls._decoder.SetDecoderProperties(properties);
ls.Properties = properties;
_availableBytes = outputSize < 0 ? long.MaxValue : outputSize;
_rangeDecoderLimit = inputSize;
ls._availableBytes = outputSize < 0 ? long.MaxValue : outputSize;
ls._rangeDecoderLimit = inputSize;
}
else
{
_dictionarySize = 2 | (properties[0] & 1);
_dictionarySize <<= (properties[0] >> 1) + 11;
ls. _dictionarySize = 2 | (properties[0] & 1);
ls. _dictionarySize <<= (properties[0] >> 1) + 11;
_outWindow.Create(_dictionarySize);
ls._outWindow.Create(ls._dictionarySize);
if (presetDictionary != null)
{
_outWindow.Train(presetDictionary);
_needDictReset = false;
ls._outWindow.Train(presetDictionary);
ls._needDictReset = false;
}
Properties = new byte[1];
_availableBytes = 0;
ls. Properties = new byte[1];
ls._availableBytes = 0;
}
return ls;
}
public LzmaStream(LzmaEncoderProperties properties, bool isLzma2, Stream outputStream)
@@ -126,33 +120,25 @@ namespace SharpCompress.Compressors.LZMA
public override bool CanWrite => _encoder != null;
public override void Flush()
{
}
protected override void Dispose(bool disposing)
public override async ValueTask DisposeAsync()
{
if (_isDisposed)
{
return;
}
_isDisposed = true;
if (disposing)
if (_encoder != null)
{
if (_encoder != null)
{
_position = _encoder.Code(null, true);
}
_inputStream?.Dispose();
_position = await _encoder.CodeAsync(null, true);
}
base.Dispose(disposing);
_inputStream?.DisposeAsync();
}
public override long Length => _position + _availableBytes;
public override long Position { get => _position; set => throw new NotSupportedException(); }
public override int Read(byte[] buffer, int offset, int count)
public override async Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
if (_endReached)
{
@@ -166,7 +152,7 @@ namespace SharpCompress.Compressors.LZMA
{
if (_isLzma2)
{
DecodeChunkHeader();
await DecodeChunkHeader(cancellationToken);
}
else
{
@@ -189,7 +175,7 @@ namespace SharpCompress.Compressors.LZMA
{
_inputPosition += _outWindow.CopyStream(_inputStream, toProcess);
}
else if (_decoder.Code(_dictionarySize, _outWindow, _rangeDecoder)
else if (await _decoder.CodeAsync(_dictionarySize, _outWindow, _rangeDecoder, cancellationToken)
&& _outputSize < 0)
{
_availableBytes = _outWindow.AvailableBytes;
@@ -231,7 +217,7 @@ namespace SharpCompress.Compressors.LZMA
return total;
}
private void DecodeChunkHeader()
private async ValueTask DecodeChunkHeader(CancellationToken cancellationToken)
{
int control = _inputStream.ReadByte();
_inputPosition++;
@@ -283,7 +269,7 @@ namespace SharpCompress.Compressors.LZMA
_decoder.SetDecoderProperties(Properties);
}
_rangeDecoder.Init(_inputStream);
await _rangeDecoder.InitAsync(_inputStream, cancellationToken);
}
else if (control > 0x02)
{
@@ -307,14 +293,25 @@ namespace SharpCompress.Compressors.LZMA
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
public override async Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
if (_encoder != null)
{
_position = _encoder.Code(new MemoryStream(buffer, offset, count), false);
_position = await _encoder.CodeAsync(new MemoryStream(buffer, offset, count), false);
}
}
public byte[] Properties { get; } = new byte[5];
public override async ValueTask WriteAsync(ReadOnlyMemory<byte> buffer, CancellationToken cancellationToken = new CancellationToken())
{
if (_encoder != null)
{
var m = ArrayPool<byte>.Shared.Rent(buffer.Length);
buffer.CopyTo(m.AsMemory().Slice(0, buffer.Length));
_position = await _encoder.CodeAsync(new MemoryStream(m, 0, buffer.Length), false);
ArrayPool<byte>.Shared.Return(m);
}
}
public byte[] Properties { get; private set; }
}
}

View File

@@ -1,11 +1,14 @@
#nullable disable
using System;
using System.Buffers;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Compressors.LZMA.RangeCoder
{
internal class Encoder
internal class Encoder : IAsyncDisposable
{
public const uint K_TOP_VALUE = (1 << 24);
@@ -38,43 +41,46 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
_cache = 0;
}
public void FlushData()
public async ValueTask FlushData()
{
for (int i = 0; i < 5; i++)
{
ShiftLow();
await ShiftLowAsync();
}
}
public void FlushStream()
public Task FlushAsync()
{
_stream.Flush();
return _stream.FlushAsync();
}
public void CloseStream()
public ValueTask DisposeAsync()
{
_stream.Dispose();
return _stream.DisposeAsync();
}
public void Encode(uint start, uint size, uint total)
public async ValueTask EncodeAsync(uint start, uint size, uint total)
{
_low += start * (_range /= total);
_range *= size;
while (_range < K_TOP_VALUE)
{
_range <<= 8;
ShiftLow();
await ShiftLowAsync();
}
}
public void ShiftLow()
public async ValueTask ShiftLowAsync()
{
if ((uint)_low < 0xFF000000 || (uint)(_low >> 32) == 1)
{
using var buffer = MemoryPool<byte>.Shared.Rent(1);
var b = buffer.Memory.Slice(0,1);
byte temp = _cache;
do
{
_stream.WriteByte((byte)(temp + (_low >> 32)));
b.Span[0] = (byte)(temp + (_low >> 32));
await _stream.WriteAsync(b);
temp = 0xFF;
}
while (--_cacheSize != 0);
@@ -84,7 +90,7 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
_low = ((uint)_low) << 8;
}
public void EncodeDirectBits(uint v, int numTotalBits)
public async ValueTask EncodeDirectBits(uint v, int numTotalBits)
{
for (int i = numTotalBits - 1; i >= 0; i--)
{
@@ -96,12 +102,12 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
if (_range < K_TOP_VALUE)
{
_range <<= 8;
ShiftLow();
await ShiftLowAsync();
}
}
}
public void EncodeBit(uint size0, int numTotalBits, uint symbol)
public async ValueTask EncodeBitAsync(uint size0, int numTotalBits, uint symbol)
{
uint newBound = (_range >> numTotalBits) * size0;
if (symbol == 0)
@@ -116,7 +122,7 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
while (_range < K_TOP_VALUE)
{
_range <<= 8;
ShiftLow();
await ShiftLowAsync();
}
}
@@ -129,7 +135,7 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
}
}
internal class Decoder
internal class Decoder: IAsyncDisposable
{
public const uint K_TOP_VALUE = (1 << 24);
public uint _range;
@@ -139,7 +145,7 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
public Stream _stream;
public long _total;
public void Init(Stream stream)
public async ValueTask InitAsync(Stream stream, CancellationToken cancellationToken)
{
// Stream.Init(stream);
_stream = stream;
@@ -148,7 +154,7 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
_range = 0xFFFFFFFF;
for (int i = 0; i < 5; i++)
{
_code = (_code << 8) | (byte)_stream.ReadByte();
_code = (_code << 8) | await _stream.ReadByteAsync(cancellationToken);
}
_total = 5;
}
@@ -159,44 +165,34 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
_stream = null;
}
public void CloseStream()
public ValueTask DisposeAsync()
{
_stream.Dispose();
return _stream.DisposeAsync();
}
public void Normalize()
public async ValueTask NormalizeAsync(CancellationToken cancellationToken)
{
while (_range < K_TOP_VALUE)
{
_code = (_code << 8) | (byte)_stream.ReadByte();
_code = (_code << 8) | await _stream.ReadByteAsync(cancellationToken);
_range <<= 8;
_total++;
}
}
public void Normalize2()
{
if (_range < K_TOP_VALUE)
{
_code = (_code << 8) | (byte)_stream.ReadByte();
_range <<= 8;
_total++;
}
}
public uint GetThreshold(uint total)
{
return _code / (_range /= total);
}
public void Decode(uint start, uint size)
public async ValueTask DecodeAsync(uint start, uint size, CancellationToken cancellationToken)
{
_code -= start * _range;
_range *= size;
Normalize();
await NormalizeAsync(cancellationToken);
}
public uint DecodeDirectBits(int numTotalBits)
public async ValueTask<uint> DecodeDirectBitsAsync(int numTotalBits, CancellationToken cancellationToken)
{
uint range = _range;
uint code = _code;
@@ -218,7 +214,7 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
if (range < K_TOP_VALUE)
{
code = (code << 8) | (byte)_stream.ReadByte();
code = (code << 8) | await _stream.ReadByteAsync(cancellationToken);
range <<= 8;
_total++;
}
@@ -228,7 +224,7 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
return result;
}
public uint DecodeBit(uint size0, int numTotalBits)
public async ValueTask<uint> DecodeBitAsync(uint size0, int numTotalBits, CancellationToken cancellationToken)
{
uint newBound = (_range >> numTotalBits) * size0;
uint symbol;
@@ -243,7 +239,7 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
_code -= newBound;
_range -= newBound;
}
Normalize();
await NormalizeAsync(cancellationToken);
return symbol;
}

View File

@@ -1,4 +1,7 @@
using System;
using System.Buffers;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Compressors.LZMA.RangeCoder
{
@@ -29,7 +32,7 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
}
}
public void Encode(Encoder encoder, uint symbol)
public async ValueTask EncodeAsync(Encoder encoder, uint symbol)
{
// encoder.EncodeBit(Prob, kNumBitModelTotalBits, symbol);
// UpdateModel(symbol);
@@ -48,7 +51,7 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
if (encoder._range < Encoder.K_TOP_VALUE)
{
encoder._range <<= 8;
encoder.ShiftLow();
await encoder.ShiftLowAsync();
}
}
@@ -110,7 +113,7 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
_prob = K_BIT_MODEL_TOTAL >> 1;
}
public uint Decode(Decoder rangeDecoder)
public async ValueTask<uint> DecodeAsync(Decoder rangeDecoder, CancellationToken cancellationToken)
{
uint newBound = (rangeDecoder._range >> K_NUM_BIT_MODEL_TOTAL_BITS) * _prob;
if (rangeDecoder._code < newBound)
@@ -119,7 +122,7 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
_prob += (K_BIT_MODEL_TOTAL - _prob) >> K_NUM_MOVE_BITS;
if (rangeDecoder._range < Decoder.K_TOP_VALUE)
{
rangeDecoder._code = (rangeDecoder._code << 8) | (byte)rangeDecoder._stream.ReadByte();
rangeDecoder._code = (rangeDecoder._code << 8) | await rangeDecoder._stream.ReadByteAsync(cancellationToken);
rangeDecoder._range <<= 8;
rangeDecoder._total++;
}
@@ -130,7 +133,7 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
_prob -= (_prob) >> K_NUM_MOVE_BITS;
if (rangeDecoder._range < Decoder.K_TOP_VALUE)
{
rangeDecoder._code = (rangeDecoder._code << 8) | (byte)rangeDecoder._stream.ReadByte();
rangeDecoder._code = (rangeDecoder._code << 8) | await rangeDecoder._stream.ReadByteAsync(cancellationToken);
rangeDecoder._range <<= 8;
rangeDecoder._total++;
}

Some files were not shown because too many files have changed in this diff Show More