Compare commits

...

50 Commits

Author SHA1 Message Date
Adam Hathcock
dfc303ff6c start of create pattern 2026-01-19 07:14:08 +00:00
Adam Hathcock
dea635e39e Intermediate commit of rar async reading 2026-01-18 16:29:03 +00:00
Adam Hathcock
f359f553b3 some minor fixes 2026-01-18 15:31:10 +00:00
Adam Hathcock
08118f7286 add more async writing 2026-01-18 15:07:02 +00:00
Adam Hathcock
408d2e6663 Async add entry 2026-01-18 14:57:01 +00:00
Adam Hathcock
4c4b727bd7 Tar detection works 2026-01-17 13:39:57 +00:00
Adam Hathcock
8e54b10b7f tar tests are better? 2026-01-16 15:10:08 +00:00
Adam Hathcock
f99e421115 fix factory 2026-01-16 15:04:01 +00:00
Adam Hathcock
82d56b9678 multi-file rars done manually 2026-01-16 13:43:26 +00:00
Adam Hathcock
447d35267f some fixes 2026-01-16 13:19:41 +00:00
Adam Hathcock
763805e03a async IsRarFile 2026-01-16 12:12:51 +00:00
Adam Hathcock
cd70a7760e remvoe AutoFactory 2026-01-16 11:44:12 +00:00
Adam Hathcock
ec7c359341 Arj works 2026-01-16 11:12:26 +00:00
Adam Hathcock
cc59c1960a fix ace tests 2026-01-16 10:49:18 +00:00
Adam Hathcock
1cc80e7675 Merge pull request #1141 from adamhathcock/copilot/sub-pr-1132
[WIP] Address feedback on async creation cleanup changes
2026-01-16 10:12:08 +00:00
Adam Hathcock
cfe59fc515 Merge branch 'adam/async-creation' into copilot/sub-pr-1132 2026-01-16 10:11:45 +00:00
copilot-swe-agent[bot]
2180df3318 Pass CancellationToken.None explicitly to OpenAsyncArchive methods
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-16 10:09:54 +00:00
Adam Hathcock
29f4c7fe2e Merge pull request #1142 from adamhathcock/copilot/sub-pr-1132-another-one
Fix ReadFullyAsync with ArrayPool buffer in SevenZipArchive signature check
2026-01-16 10:09:07 +00:00
Adam Hathcock
d5f9815561 Merge pull request #1136 from adamhathcock/adam/upgrade-xunit
Upgrade xunit to v3
2026-01-16 10:08:23 +00:00
copilot-swe-agent[bot]
6e5e47f041 Update SevenZipFactory to consistently call OpenAsyncArchive methods
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-16 10:07:37 +00:00
copilot-swe-agent[bot]
b0fde2b8c7 Fix ReadFullyAsync call to specify offset and count for ArrayPool buffer
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-16 10:03:48 +00:00
copilot-swe-agent[bot]
4b9b20de42 Initial plan 2026-01-16 09:59:14 +00:00
Adam Hathcock
f7c91bb26f Update src/SharpCompress/Factories/SevenZipFactory.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-01-16 09:58:09 +00:00
copilot-swe-agent[bot]
4b34dd61d3 Initial plan 2026-01-16 09:58:06 +00:00
Adam Hathcock
c958d184d0 Merge pull request #1137 from adamhathcock/copilot/sub-pr-1136
Fix async test failures after xunit v3 upgrade
2026-01-16 09:54:04 +00:00
copilot-swe-agent[bot]
0de5c59a77 Restore AsyncOnlyStream in archive async tests as requested
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-16 09:50:36 +00:00
Adam Hathcock
3b10be53b5 Merge pull request #1140 from adamhathcock/copilot/sub-pr-1132-another-one
Replace empty catch blocks with explicit exception handling in TarArchive validation methods
2026-01-16 09:39:45 +00:00
Adam Hathcock
5336eb6fe6 Merge pull request #1138 from adamhathcock/copilot/sub-pr-1132
Remove redundant stream field in AsyncOnlyStream
2026-01-16 09:38:42 +00:00
copilot-swe-agent[bot]
9fa686b8f9 Fix empty catch blocks in TarArchive.Factory.cs with explicit exception handling
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-16 09:35:24 +00:00
copilot-swe-agent[bot]
2012077fb0 Remove redundant _stream field from AsyncOnlyStream and use base Stream property
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-16 09:34:29 +00:00
copilot-swe-agent[bot]
302cf2e14f Initial plan 2026-01-16 09:30:05 +00:00
Adam Hathcock
b9fccbd691 Update src/SharpCompress/Factories/ZStandardFactory.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-01-16 09:29:13 +00:00
copilot-swe-agent[bot]
bbbbc8810a Initial plan 2026-01-16 09:29:09 +00:00
copilot-swe-agent[bot]
c7da19f3a5 Format code with CSharpier
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-16 09:26:04 +00:00
copilot-swe-agent[bot]
e919930cf6 Fix Archive async tests to not use AsyncOnlyStream (archives need seekable streams)
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-16 09:23:32 +00:00
copilot-swe-agent[bot]
2906529080 Fix ReaderFactory.OpenAsyncReader to use async IsArchiveAsync methods
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-16 09:12:54 +00:00
copilot-swe-agent[bot]
75cc36849b Initial plan 2026-01-16 09:00:13 +00:00
Adam Hathcock
63e124e72f Upgrade xunit to v3 2026-01-16 08:58:26 +00:00
Adam Hathcock
394d982168 Merge pull request #1133 from adamhathcock/copilot/sub-pr-1132
Add async I/O support for SevenZip archive initialization
2026-01-16 08:44:04 +00:00
Adam Hathcock
f4ce4cbad8 fix tests for both frameworks 2026-01-16 08:43:13 +00:00
Adam Hathcock
491beabe03 uncomment tests 2026-01-16 08:35:49 +00:00
copilot-swe-agent[bot]
9bb670ad19 Fix SevenZipArchive async stream handling by adding async Open and ReadDatabase methods
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-15 17:28:05 +00:00
copilot-swe-agent[bot]
bbba2e6c7a Initial plan for fixing SevenZipArchive_LZMA_AsyncStreamExtraction test
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-15 16:53:18 +00:00
copilot-swe-agent[bot]
0b2158f74c Initial plan 2026-01-15 16:44:57 +00:00
Adam Hathcock
5c06b8c48f enable single test 2026-01-15 16:41:58 +00:00
Adam Hathcock
810df8a18b revert lazy archive 2026-01-15 16:40:08 +00:00
Adam Hathcock
63736efcac Merge remote-tracking branch 'origin/master' into adam/async-creation
# Conflicts:
#	tests/SharpCompress.Test/SevenZip/SevenZipArchiveAsyncTests.cs
2026-01-15 16:21:30 +00:00
Adam Hathcock
33b6447c18 Merge remote-tracking branch 'origin/master' into adam/async-creation 2026-01-15 16:16:41 +00:00
Adam Hathcock
2d597e6e43 be more lazy with loading of sync stuff 2026-01-15 15:09:23 +00:00
Adam Hathcock
a410f73bf3 archive asyncs are more right 2026-01-15 14:52:10 +00:00
95 changed files with 4485 additions and 888 deletions

View File

@@ -12,7 +12,7 @@
<PackageVersion Include="System.Text.Encoding.CodePages" Version="10.0.0" />
<PackageVersion Include="System.Buffers" Version="4.6.1" />
<PackageVersion Include="System.Memory" Version="4.6.3" />
<PackageVersion Include="xunit" Version="2.9.3" />
<PackageVersion Include="xunit.v3" Version="3.2.1" />
<PackageVersion Include="xunit.runner.visualstudio" Version="3.1.5" />
<GlobalPackageReference Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
<GlobalPackageReference Include="Microsoft.NETFramework.ReferenceAssemblies" Version="1.0.3" />

View File

@@ -200,7 +200,7 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IAsyncArchive
private async IAsyncEnumerable<IVolume> VolumesAsyncCast()
{
await foreach (var volume in VolumesAsync)
await foreach (var volume in _lazyVolumesAsync)
{
yield return volume;
}

View File

@@ -73,7 +73,25 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
modifiedEntries.AddRange(OldEntries.Concat(newEntries));
}
private async ValueTask RebuildModifiedCollectionAsync()
{
if (pauseRebuilding)
{
return;
}
hasModifications = true;
newEntries.RemoveAll(v => removedEntries.Contains(v));
modifiedEntries.Clear();
await foreach (var entry in OldEntriesAsync)
{
modifiedEntries.Add(entry);
}
modifiedEntries.AddRange(newEntries);
}
private IEnumerable<TEntry> OldEntries => base.Entries.Where(x => !removedEntries.Contains(x));
private IAsyncEnumerable<TEntry> OldEntriesAsync =>
base.EntriesAsync.Where(x => !removedEntries.Contains(x));
public void RemoveEntry(TEntry entry)
{
@@ -84,12 +102,24 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
}
}
void IWritableArchiveCommon.RemoveEntry(IArchiveEntry entry) => RemoveEntry((TEntry)entry);
public async ValueTask RemoveEntryAsync(TEntry entry)
{
if (!removedEntries.Contains(entry))
{
removedEntries.Add(entry);
await RebuildModifiedCollectionAsync();
}
}
void IWritableArchive.RemoveEntry(IArchiveEntry entry) => RemoveEntry((TEntry)entry);
ValueTask IWritableAsyncArchive.RemoveEntryAsync(IArchiveEntry entry) =>
RemoveEntryAsync((TEntry)entry);
public TEntry AddEntry(string key, Stream source, long size = 0, DateTime? modified = null) =>
AddEntry(key, source, false, size, modified);
IArchiveEntry IWritableArchiveCommon.AddEntry(
IArchiveEntry IWritableArchive.AddEntry(
string key,
Stream source,
bool closeStream,
@@ -97,7 +127,7 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
DateTime? modified
) => AddEntry(key, source, closeStream, size, modified);
IArchiveEntry IWritableArchiveCommon.AddDirectoryEntry(string key, DateTime? modified) =>
IArchiveEntry IWritableArchive.AddDirectoryEntry(string key, DateTime? modified) =>
AddDirectoryEntry(key, modified);
public TEntry AddEntry(
@@ -140,6 +170,68 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
return false;
}
private async ValueTask<bool> DoesKeyMatchExistingAsync(
string key,
CancellationToken cancellationToken
)
{
await foreach (
var entry in EntriesAsync.WithCancellation(cancellationToken).ConfigureAwait(false)
)
{
var path = entry.Key;
if (path is null)
{
continue;
}
var p = path.Replace('/', '\\');
if (p.Length > 0 && p[0] == '\\')
{
p = p.Substring(1);
}
return string.Equals(p, key, StringComparison.OrdinalIgnoreCase);
}
return false;
}
async ValueTask<IArchiveEntry> IWritableAsyncArchive.AddEntryAsync(
string key,
Stream source,
bool closeStream,
long size,
DateTime? modified,
CancellationToken cancellationToken
) => await AddEntryAsync(key, source, closeStream, size, modified, cancellationToken);
async ValueTask<IArchiveEntry> IWritableAsyncArchive.AddDirectoryEntryAsync(
string key,
DateTime? modified,
CancellationToken cancellationToken
) => await AddDirectoryEntryAsync(key, modified, cancellationToken);
public async ValueTask<TEntry> AddEntryAsync(
string key,
Stream source,
bool closeStream,
long size = 0,
DateTime? modified = null,
CancellationToken cancellationToken = default
)
{
if (key.Length > 0 && key[0] is '/' or '\\')
{
key = key.Substring(1);
}
if (await DoesKeyMatchExistingAsync(key, cancellationToken).ConfigureAwait(false))
{
throw new ArchiveException("Cannot add entry with duplicate key: " + key);
}
var entry = CreateEntry(key, source, size, modified, closeStream);
newEntries.Add(entry);
await RebuildModifiedCollectionAsync();
return entry;
}
public TEntry AddDirectoryEntry(string key, DateTime? modified = null)
{
if (key.Length > 0 && key[0] is '/' or '\\')
@@ -156,6 +248,26 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
return entry;
}
public async ValueTask<TEntry> AddDirectoryEntryAsync(
string key,
DateTime? modified = null,
CancellationToken cancellationToken = default
)
{
if (key.Length > 0 && key[0] is '/' or '\\')
{
key = key.Substring(1);
}
if (await DoesKeyMatchExistingAsync(key, cancellationToken).ConfigureAwait(false))
{
throw new ArchiveException("Cannot add entry with duplicate key: " + key);
}
var entry = CreateDirectoryEntry(key, modified);
newEntries.Add(entry);
await RebuildModifiedCollectionAsync();
return entry;
}
public void SaveTo(Stream stream, WriterOptions options)
{
//reset streams of new entries
@@ -171,7 +283,7 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
{
//reset streams of new entries
newEntries.Cast<IWritableArchiveEntry>().ForEach(x => x.Stream.Seek(0, SeekOrigin.Begin));
await SaveToAsync(stream, options, OldEntries, newEntries, cancellationToken)
await SaveToAsync(stream, options, OldEntriesAsync, newEntries, cancellationToken)
.ConfigureAwait(false);
}
@@ -212,7 +324,7 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
protected abstract ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IEnumerable<TEntry> oldEntries,
IAsyncEnumerable<TEntry> oldEntries,
IEnumerable<TEntry> newEntries,
CancellationToken cancellationToken = default
);

View File

@@ -78,7 +78,7 @@ public static class ArchiveFactory
options ??= new ReaderOptions { LeaveStreamOpen = false };
var factory = await FindFactoryAsync<IArchiveFactory>(fileInfo, cancellationToken);
return factory.OpenAsyncArchive(fileInfo, options, cancellationToken);
return factory.OpenAsyncArchive(fileInfo, options);
}
public static IArchive OpenArchive(
@@ -175,7 +175,7 @@ public static class ArchiveFactory
firstStream.NotNull(nameof(firstStream));
options ??= new ReaderOptions();
var factory = FindFactory<IMultiArchiveFactory>(firstStream);
var factory = await FindFactoryAsync<IMultiArchiveFactory>(firstStream, cancellationToken);
return factory.OpenAsyncArchive(streamsArray, options);
}
@@ -189,7 +189,25 @@ public static class ArchiveFactory
archive.WriteToDirectory(destinationDirectory, options);
}
private static T FindFactory<T>(FileInfo finfo)
public static T FindFactory<T>(string path)
where T : IFactory
{
path.NotNullOrEmpty(nameof(path));
using Stream stream = File.OpenRead(path);
return FindFactory<T>(stream);
}
public static ValueTask<T> FindFactoryAsync<T>(
string path,
CancellationToken cancellationToken = default
)
where T : IFactory
{
path.NotNullOrEmpty(nameof(path));
return FindFactoryAsync<T>(new FileInfo(path), cancellationToken);
}
public static T FindFactory<T>(FileInfo finfo)
where T : IFactory
{
finfo.NotNull(nameof(finfo));
@@ -197,7 +215,7 @@ public static class ArchiveFactory
return FindFactory<T>(stream);
}
private static T FindFactory<T>(Stream stream)
public static T FindFactory<T>(Stream stream)
where T : IFactory
{
stream.NotNull(nameof(stream));
@@ -345,6 +363,4 @@ public static class ArchiveFactory
}
}
}
public static IArchiveFactory AutoFactory { get; } = new AutoArchiveFactory();
}

View File

@@ -1,52 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
internal class AutoArchiveFactory : IArchiveFactory
{
public string Name => nameof(AutoArchiveFactory);
public ArchiveType? KnownArchiveType => null;
public IEnumerable<string> GetSupportedExtensions() => throw new NotSupportedException();
public bool IsArchive(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => throw new NotSupportedException();
public ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
) => throw new NotSupportedException();
public FileInfo? GetFilePart(int index, FileInfo part1) => throw new NotSupportedException();
public IArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null) =>
ArchiveFactory.OpenArchive(stream, readerOptions);
public IAsyncArchive OpenAsyncArchive(Stream stream, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)OpenArchive(stream, readerOptions);
public IArchive OpenArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
ArchiveFactory.OpenArchive(fileInfo, readerOptions);
public IAsyncArchive OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)OpenArchive(fileInfo, readerOptions);
}
}

View File

@@ -95,7 +95,7 @@ public partial class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZi
protected override async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IEnumerable<GZipArchiveEntry> oldEntries,
IAsyncEnumerable<GZipArchiveEntry> oldEntries,
IEnumerable<GZipArchiveEntry> newEntries,
CancellationToken cancellationToken = default
)
@@ -105,7 +105,23 @@ public partial class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZi
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
}
using var writer = new GZipWriter(stream, new GZipWriterOptions(options));
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
await foreach (
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
)
{
if (!entry.IsDirectory)
{
using var entryStream = entry.OpenEntryStream();
await writer
.WriteAsync(
entry.Key.NotNull("Entry Key is null"),
entryStream,
cancellationToken
)
.ConfigureAwait(false);
}
}
foreach (var entry in newEntries.Where(x => !x.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
await writer

View File

@@ -47,9 +47,5 @@ public interface IArchiveFactory : IFactory
/// <param name="fileInfo">the file to open.</param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
IAsyncArchive OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
IAsyncArchive OpenAsyncArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null);
}

View File

@@ -13,12 +13,10 @@ public interface IWritableArchiveCommon
/// </summary>
/// <returns>IDisposeable to resume entry rebuilding</returns>
IDisposable PauseEntryRebuilding();
}
/// <summary>
/// Removes the specified entry from the archive.
/// </summary>
void RemoveEntry(IArchiveEntry entry);
public interface IWritableArchive : IArchive, IWritableArchiveCommon
{
IArchiveEntry AddEntry(
string key,
Stream source,
@@ -28,14 +26,16 @@ public interface IWritableArchiveCommon
);
IArchiveEntry AddDirectoryEntry(string key, DateTime? modified = null);
}
public interface IWritableArchive : IArchive, IWritableArchiveCommon
{
/// <summary>
/// Saves the archive to the specified stream using the given writer options.
/// </summary>
void SaveTo(Stream stream, WriterOptions options);
/// <summary>
/// Removes the specified entry from the archive.
/// </summary>
void RemoveEntry(IArchiveEntry entry);
}
public interface IWritableAsyncArchive : IAsyncArchive, IWritableArchiveCommon
@@ -48,4 +48,30 @@ public interface IWritableAsyncArchive : IAsyncArchive, IWritableArchiveCommon
WriterOptions options,
CancellationToken cancellationToken = default
);
/// <summary>
/// Asynchronously adds an entry to the archive with the specified key, source stream, and options.
/// </summary>
ValueTask<IArchiveEntry> AddEntryAsync(
string key,
Stream source,
bool closeStream,
long size = 0,
DateTime? modified = null,
CancellationToken cancellationToken = default
);
/// <summary>
/// Asynchronously adds a directory entry to the archive with the specified key and modification time.
/// </summary>
ValueTask<IArchiveEntry> AddDirectoryEntryAsync(
string key,
DateTime? modified = null,
CancellationToken cancellationToken = default
);
/// <summary>
/// Removes the specified entry from the archive.
/// </summary>
ValueTask RemoveEntryAsync(IArchiveEntry entry);
}

View File

@@ -1,59 +0,0 @@
using System;
using System.IO;
namespace SharpCompress.Archives;
public static class IWritableArchiveCommonExtensions
{
extension(IWritableArchiveCommon writableArchive)
{
public void AddAllFromDirectory(
string filePath,
string searchPattern = "*.*",
SearchOption searchOption = SearchOption.AllDirectories
)
{
using (writableArchive.PauseEntryRebuilding())
{
foreach (
var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption)
)
{
var fileInfo = new FileInfo(path);
writableArchive.AddEntry(
path.Substring(filePath.Length),
fileInfo.OpenRead(),
true,
fileInfo.Length,
fileInfo.LastWriteTime
);
}
}
}
public IArchiveEntry AddEntry(string key, string file) =>
writableArchive.AddEntry(key, new FileInfo(file));
public IArchiveEntry AddEntry(
string key,
Stream source,
long size = 0,
DateTime? modified = null
) => writableArchive.AddEntry(key, source, false, size, modified);
public IArchiveEntry AddEntry(string key, FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
throw new ArgumentException("FileInfo does not exist.");
}
return writableArchive.AddEntry(
key,
fileInfo.OpenRead(),
true,
fileInfo.Length,
fileInfo.LastWriteTime
);
}
}
}

View File

@@ -1,3 +1,4 @@
using System;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Writers;
@@ -8,6 +9,55 @@ public static class IWritableArchiveExtensions
{
extension(IWritableArchive writableArchive)
{
public void AddAllFromDirectory(
string filePath,
string searchPattern = "*.*",
SearchOption searchOption = SearchOption.AllDirectories
)
{
using (writableArchive.PauseEntryRebuilding())
{
foreach (
var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption)
)
{
var fileInfo = new FileInfo(path);
writableArchive.AddEntry(
path.Substring(filePath.Length),
fileInfo.OpenRead(),
true,
fileInfo.Length,
fileInfo.LastWriteTime
);
}
}
}
public IArchiveEntry AddEntry(string key, string file) =>
writableArchive.AddEntry(key, new FileInfo(file));
public IArchiveEntry AddEntry(
string key,
Stream source,
long size = 0,
DateTime? modified = null
) => writableArchive.AddEntry(key, source, false, size, modified);
public IArchiveEntry AddEntry(string key, FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
throw new ArgumentException("FileInfo does not exist.");
}
return writableArchive.AddEntry(
key,
fileInfo.OpenRead(),
true,
fileInfo.Length,
fileInfo.LastWriteTime
);
}
public void SaveTo(string filePath, WriterOptions? options = null) =>
writableArchive.SaveTo(new FileInfo(filePath), options ?? new(CompressionType.Deflate));

View File

@@ -1,3 +1,4 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
@@ -10,6 +11,55 @@ public static class IWritableAsyncArchiveExtensions
{
extension(IWritableAsyncArchive writableArchive)
{
public async ValueTask AddAllFromDirectoryAsync(
string filePath,
string searchPattern = "*.*",
SearchOption searchOption = SearchOption.AllDirectories
)
{
using (writableArchive.PauseEntryRebuilding())
{
foreach (
var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption)
)
{
var fileInfo = new FileInfo(path);
await writableArchive.AddEntryAsync(
path.Substring(filePath.Length),
fileInfo.OpenRead(),
true,
fileInfo.Length,
fileInfo.LastWriteTime
);
}
}
}
public ValueTask<IArchiveEntry> AddEntryAsync(string key, string file) =>
writableArchive.AddEntryAsync(key, new FileInfo(file));
public ValueTask<IArchiveEntry> AddEntryAsync(
string key,
Stream source,
long size = 0,
DateTime? modified = null
) => writableArchive.AddEntryAsync(key, source, false, size, modified);
public ValueTask<IArchiveEntry> AddEntryAsync(string key, FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
throw new ArgumentException("FileInfo does not exist.");
}
return writableArchive.AddEntryAsync(
key,
fileInfo.OpenRead(),
true,
fileInfo.Length,
fileInfo.LastWriteTime
);
}
public ValueTask SaveToAsync(
string filePath,
WriterOptions? options = null,

View File

@@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
@@ -163,4 +164,24 @@ public partial class RarArchive
return false;
}
}
public static async ValueTask<bool> IsRarFileAsync(
Stream stream,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
await MarkHeader
.ReadAsync(stream, true, false, cancellationToken)
.ConfigureAwait(false);
return true;
}
catch
{
return false;
}
}
}

View File

@@ -111,8 +111,12 @@ public partial class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>, I
public override bool IsSolid => Volumes.First().IsSolidArchive;
public override async ValueTask<bool> IsSolidAsync() =>
(await VolumesAsync.CastAsync<RarVolume>().FirstAsync()).IsSolidArchive;
public override bool IsEncrypted => Entries.First(x => !x.IsDirectory).IsEncrypted;
public virtual int MinVersion => Volumes.First().MinVersion;
public virtual int MaxVersion => Volumes.First().MaxVersion;
}

View File

@@ -102,7 +102,9 @@ public class RarArchiveEntry : RarEntry, IArchiveEntry
stream = new RarStream(
archive.UnpackV1.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>())
await MultiVolumeReadOnlyAsyncStream.Create(
Parts.ToAsyncEnumerable().CastAsync<RarFilePart>()
)
);
}
else
@@ -110,7 +112,9 @@ public class RarArchiveEntry : RarEntry, IArchiveEntry
stream = new RarStream(
archive.UnpackV2017.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>())
await MultiVolumeReadOnlyAsyncStream.Create(
Parts.ToAsyncEnumerable().CastAsync<RarFilePart>()
)
);
}

View File

@@ -1,12 +1,10 @@
using System;
using System.Buffers;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.SevenZip;
using SharpCompress.Compressors.LZMA.Utilites;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -157,13 +155,56 @@ public partial class SevenZipArchive
}
}
private static ReadOnlySpan<byte> Signature =>
new byte[] { (byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C };
public static async ValueTask<bool> IsSevenZipFileAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
return await SignatureMatchAsync(stream, cancellationToken);
}
catch
{
return false;
}
}
private static ReadOnlySpan<byte> Signature => [(byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C];
private static bool SignatureMatch(Stream stream)
{
var reader = new BinaryReader(stream);
ReadOnlySpan<byte> signatureBytes = reader.ReadBytes(6);
return signatureBytes.SequenceEqual(Signature);
var buffer = ArrayPool<byte>.Shared.Rent(6);
try
{
stream.ReadExact(buffer, 0, 6);
return buffer.AsSpan().Slice(0, 6).SequenceEqual(Signature);
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
}
}
private static async ValueTask<bool> SignatureMatchAsync(
Stream stream,
CancellationToken cancellationToken
)
{
var buffer = ArrayPool<byte>.Shared.Rent(6);
try
{
if (!await stream.ReadFullyAsync(buffer, 0, 6, cancellationToken).ConfigureAwait(false))
{
return false;
}
return buffer.AsSpan().Slice(0, 6).SequenceEqual(Signature);
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
}
}
}

View File

@@ -32,11 +32,56 @@ public partial class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, Sev
IEnumerable<SevenZipVolume> volumes
)
{
var stream = volumes.Single().Stream;
LoadFactory(stream);
foreach (var volume in volumes)
{
LoadFactory(volume.Stream);
if (_database is null)
{
yield break;
}
var entries = new SevenZipArchiveEntry[_database._files.Count];
for (var i = 0; i < _database._files.Count; i++)
{
var file = _database._files[i];
entries[i] = new SevenZipArchiveEntry(
this,
new SevenZipFilePart(
volume.Stream,
_database,
i,
file,
ReaderOptions.ArchiveEncoding
)
);
}
foreach (
var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder)
)
{
var isSolid = false;
foreach (var entry in group)
{
entry.IsSolid = isSolid;
isSolid = true;
}
}
foreach (var entry in entries)
{
yield return entry;
}
}
}
protected override async IAsyncEnumerable<SevenZipArchiveEntry> LoadEntriesAsync(
IAsyncEnumerable<SevenZipVolume> volumes
)
{
var stream = (await volumes.SingleAsync()).Stream;
await LoadFactoryAsync(stream);
if (_database is null)
{
return Enumerable.Empty<SevenZipArchiveEntry>();
yield break;
}
var entries = new SevenZipArchiveEntry[_database._files.Count];
for (var i = 0; i < _database._files.Count; i++)
@@ -57,7 +102,10 @@ public partial class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, Sev
}
}
return entries;
foreach (var entry in entries)
{
yield return entry;
}
}
private void LoadFactory(Stream stream)
@@ -71,6 +119,27 @@ public partial class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, Sev
}
}
private async Task LoadFactoryAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
if (_database is null)
{
stream.Position = 0;
var reader = new ArchiveReader();
await reader.OpenAsync(
stream,
lookForHeader: ReaderOptions.LookForHeader,
cancellationToken
);
_database = await reader.ReadDatabaseAsync(
new PasswordProvider(ReaderOptions.Password),
cancellationToken
);
}
}
protected override IReader CreateReaderForSolidExtraction() =>
new SevenZipReader(ReaderOptions, this);

View File

@@ -2,15 +2,13 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Tar;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Writers;
using SharpCompress.Writers.Tar;
namespace SharpCompress.Archives.Tar;
@@ -154,15 +152,44 @@ public partial class TarArchive
try
{
var tarHeader = new TarHeader(new ArchiveEncoding());
var readSucceeded = tarHeader.Read(new BinaryReader(stream));
var reader = new BinaryReader(stream, Encoding.UTF8, false);
var readSucceeded = tarHeader.Read(reader);
var isEmptyArchive =
tarHeader.Name?.Length == 0
&& tarHeader.Size == 0
&& Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
return readSucceeded || isEmptyArchive;
}
catch { }
return false;
catch (Exception)
{
// Catch all exceptions during tar header reading to determine if this is a valid tar file
// Invalid tar files or corrupted streams will throw various exceptions
return false;
}
}
public static async ValueTask<bool> IsTarFileAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
try
{
var tarHeader = new TarHeader(new ArchiveEncoding());
var reader = new AsyncBinaryReader(stream, false);
var readSucceeded = await tarHeader.ReadAsync(reader);
var isEmptyArchive =
tarHeader.Name?.Length == 0
&& tarHeader.Size == 0
&& Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
return readSucceeded || isEmptyArchive;
}
catch (Exception)
{
// Catch all exceptions during tar header reading to determine if this is a valid tar file
// Invalid tar files or corrupted streams will throw various exceptions
return false;
}
}
public static IWritableArchive CreateArchive() => new TarArchive();

View File

@@ -32,6 +32,10 @@ public partial class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVo
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
{
var stream = volumes.Single().Stream;
if (stream.CanSeek)
{
stream.Position = 0;
}
TarHeader? previousHeader = null;
foreach (
var header in TarHeaderFactory.ReadHeader(
@@ -87,6 +91,77 @@ public partial class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVo
}
}
protected override async IAsyncEnumerable<TarArchiveEntry> LoadEntriesAsync(
IAsyncEnumerable<TarVolume> volumes
)
{
var stream = (await volumes.SingleAsync()).Stream;
if (stream.CanSeek)
{
stream.Position = 0;
}
// Always use async header reading in LoadEntriesAsync for consistency
{
// Use async header reading for async-only streams
TarHeader? previousHeader = null;
await foreach (
var header in TarHeaderFactory.ReadHeaderAsync(
StreamingMode.Seekable,
stream,
ReaderOptions.ArchiveEncoding
)
)
{
if (header != null)
{
if (header.EntryType == EntryType.LongName)
{
previousHeader = header;
}
else
{
if (previousHeader != null)
{
var entry = new TarArchiveEntry(
this,
new TarFilePart(previousHeader, stream),
CompressionType.None
);
var oldStreamPos = stream.Position;
using (var entryStream = entry.OpenEntryStream())
{
using var memoryStream = new MemoryStream();
await entryStream.CopyToAsync(memoryStream);
memoryStream.Position = 0;
var bytes = memoryStream.ToArray();
header.Name = ReaderOptions
.ArchiveEncoding.Decode(bytes)
.TrimNulls();
}
stream.Position = oldStreamPos;
previousHeader = null;
}
yield return new TarArchiveEntry(
this,
new TarFilePart(header, stream),
CompressionType.None
);
}
}
else
{
throw new IncompleteArchiveException("Failed to read TAR header");
}
}
}
}
protected override TarArchiveEntry CreateEntryInternal(
string filePath,
Stream source,
@@ -142,13 +217,41 @@ public partial class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVo
protected override async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IEnumerable<TarArchiveEntry> oldEntries,
IAsyncEnumerable<TarArchiveEntry> oldEntries,
IEnumerable<TarArchiveEntry> newEntries,
CancellationToken cancellationToken = default
)
{
using var writer = new TarWriter(stream, new TarWriterOptions(options));
foreach (var entry in oldEntries.Concat(newEntries))
await foreach (
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
)
{
if (entry.IsDirectory)
{
await writer
.WriteDirectoryAsync(
entry.Key.NotNull("Entry Key is null"),
entry.LastModifiedTime,
cancellationToken
)
.ConfigureAwait(false);
}
else
{
using var entryStream = entry.OpenEntryStream();
await writer
.WriteAsync(
entry.Key.NotNull("Entry Key is null"),
entryStream,
entry.LastModifiedTime,
entry.Size,
cancellationToken
)
.ConfigureAwait(false);
}
}
foreach (var entry in newEntries)
{
if (entry.IsDirectory)
{

View File

@@ -195,13 +195,39 @@ public partial class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVo
protected override async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IEnumerable<ZipArchiveEntry> oldEntries,
IAsyncEnumerable<ZipArchiveEntry> oldEntries,
IEnumerable<ZipArchiveEntry> newEntries,
CancellationToken cancellationToken = default
)
{
using var writer = new ZipWriter(stream, new ZipWriterOptions(options));
foreach (var entry in oldEntries.Concat(newEntries))
await foreach (
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
)
{
if (entry.IsDirectory)
{
await writer
.WriteDirectoryAsync(
entry.Key.NotNull("Entry Key is null"),
entry.LastModifiedTime,
cancellationToken
)
.ConfigureAwait(false);
}
else
{
using var entryStream = entry.OpenEntryStream();
await writer
.WriteAsync(
entry.Key.NotNull("Entry Key is null"),
entryStream,
cancellationToken
)
.ConfigureAwait(false);
}
}
foreach (var entry in newEntries)
{
if (entry.IsDirectory)
{

View File

@@ -2,6 +2,8 @@ using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using System.Xml.Linq;
using SharpCompress.Common.Arc;
@@ -147,6 +149,107 @@ namespace SharpCompress.Common.Ace.Headers
return this;
}
/// <summary>
/// Asynchronously reads the next file entry header from the stream.
/// Returns null if no more entries or end of archive.
/// Supports both ACE 1.0 and ACE 2.0 formats.
/// </summary>
public override async ValueTask<AceHeader?> ReadAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
var headerData = await ReadHeaderAsync(stream, cancellationToken);
if (headerData.Length == 0)
{
return null;
}
int offset = 0;
// Header type (1 byte)
HeaderType = headerData[offset++];
// Skip recovery record headers (ACE 2.0 feature)
if (HeaderType == (byte)SharpCompress.Common.Ace.Headers.AceHeaderType.RECOVERY32)
{
// Skip to next header
return null;
}
if (HeaderType != (byte)SharpCompress.Common.Ace.Headers.AceHeaderType.FILE)
{
// Unknown header type - skip
return null;
}
// Header flags (2 bytes)
HeaderFlags = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Packed size (4 bytes)
PackedSize = BitConverter.ToUInt32(headerData, offset);
offset += 4;
// Original size (4 bytes)
OriginalSize = BitConverter.ToUInt32(headerData, offset);
offset += 4;
// File date/time in DOS format (4 bytes)
var dosDateTime = BitConverter.ToUInt32(headerData, offset);
DateTime = ConvertDosDateTime(dosDateTime);
offset += 4;
// File attributes (4 bytes)
Attributes = (int)BitConverter.ToUInt32(headerData, offset);
offset += 4;
// CRC32 (4 bytes)
Crc32 = BitConverter.ToUInt32(headerData, offset);
offset += 4;
// Compression type (1 byte)
byte compressionType = headerData[offset++];
CompressionType = GetCompressionType(compressionType);
// Compression quality/parameter (1 byte)
byte compressionQuality = headerData[offset++];
CompressionQuality = GetCompressionQuality(compressionQuality);
// Parameters (2 bytes)
Parameters = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Reserved (2 bytes) - skip
offset += 2;
// Filename length (2 bytes)
var filenameLength = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Filename
if (offset + filenameLength <= headerData.Length)
{
Filename = ArchiveEncoding.Decode(headerData, offset, filenameLength);
offset += filenameLength;
}
// Handle comment if present
if ((HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.COMMENT) != 0)
{
// Comment length (2 bytes)
if (offset + 2 <= headerData.Length)
{
ushort commentLength = BitConverter.ToUInt16(headerData, offset);
offset += 2 + commentLength; // Skip comment
}
}
// Store the data start position
DataStartPosition = stream.Position;
return this;
}
public CompressionType GetCompressionType(byte value) =>
value switch
{

View File

@@ -1,5 +1,7 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Arj.Headers;
using SharpCompress.Crypto;
@@ -58,6 +60,11 @@ namespace SharpCompress.Common.Ace.Headers
public abstract AceHeader? Read(Stream reader);
public abstract ValueTask<AceHeader?> ReadAsync(
Stream reader,
CancellationToken cancellationToken = default
);
public byte[] ReadHeader(Stream stream)
{
// Read header CRC (2 bytes) and header size (2 bytes)
@@ -90,6 +97,41 @@ namespace SharpCompress.Common.Ace.Headers
return body;
}
public async ValueTask<byte[]> ReadHeaderAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
// Read header CRC (2 bytes) and header size (2 bytes)
var headerBytes = new byte[4];
if (await stream.ReadAsync(headerBytes, 0, 4, cancellationToken) != 4)
{
return Array.Empty<byte>();
}
HeaderCrc = BitConverter.ToUInt16(headerBytes, 0); // CRC for validation
HeaderSize = BitConverter.ToUInt16(headerBytes, 2);
if (HeaderSize == 0)
{
return Array.Empty<byte>();
}
// Read the header data
var body = new byte[HeaderSize];
if (await stream.ReadAsync(body, 0, HeaderSize, cancellationToken) != HeaderSize)
{
return Array.Empty<byte>();
}
// Verify crc
var checksum = AceCrc.AceCrc16(body);
if (checksum != HeaderCrc)
{
throw new InvalidDataException("Header checksum is invalid");
}
return body;
}
public static bool IsArchive(Stream stream)
{
// ACE files have a specific signature
@@ -105,6 +147,26 @@ namespace SharpCompress.Common.Ace.Headers
return CheckMagicBytes(bytes, 7);
}
/// <summary>
/// Asynchronously checks if the stream is an ACE archive
/// </summary>
/// <param name="stream">The stream to read from</param>
/// <param name="cancellationToken">Cancellation token</param>
/// <returns>True if the stream is an ACE archive, false otherwise</returns>
public static async ValueTask<bool> IsArchiveAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
var bytes = new byte[14];
if (await stream.ReadAsync(bytes, 0, 14, cancellationToken) != 14)
{
return false;
}
return CheckMagicBytes(bytes, 7);
}
protected static bool CheckMagicBytes(byte[] headerBytes, int offset)
{
// Check for "**ACE**" at specified offset

View File

@@ -2,6 +2,8 @@ using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Ace.Headers;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Crypto;
@@ -93,5 +95,77 @@ namespace SharpCompress.Common.Ace.Headers
return this;
}
/// <summary>
/// Asynchronously reads the main archive header from the stream.
/// Returns header if this is a valid ACE archive.
/// Supports both ACE 1.0 and ACE 2.0 formats.
/// </summary>
public override async ValueTask<AceHeader?> ReadAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
var headerData = await ReadHeaderAsync(stream, cancellationToken);
if (headerData.Length == 0)
{
return null;
}
int offset = 0;
// Header type should be 0 for main header
if (headerData[offset++] != HeaderType)
{
return null;
}
// Header flags (2 bytes)
HeaderFlags = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Skip signature "**ACE**" (7 bytes)
if (!CheckMagicBytes(headerData, offset))
{
throw new InvalidDataException("Invalid ACE archive signature.");
}
offset += 7;
// ACE version (1 byte) - 10 for ACE 1.0, 20 for ACE 2.0
AceVersion = headerData[offset++];
ExtractVersion = headerData[offset++];
// Host OS (1 byte)
if (offset < headerData.Length)
{
var hostOsByte = headerData[offset++];
HostOS = hostOsByte <= 11 ? (HostOS)hostOsByte : HostOS.Unknown;
}
// Volume number (1 byte)
VolumeNumber = headerData[offset++];
// Creation date/time (4 bytes)
var dosDateTime = BitConverter.ToUInt32(headerData, offset);
DateTime = ConvertDosDateTime(dosDateTime);
offset += 4;
// Reserved fields (8 bytes)
if (offset + 8 <= headerData.Length)
{
offset += 8;
}
// Skip additional fields based on flags
// Handle comment if present
if ((HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.COMMENT) != 0)
{
if (offset + 2 <= headerData.Length)
{
ushort commentLength = BitConverter.ToUInt16(headerData, offset);
offset += 2 + commentLength;
}
}
return this;
}
}
}

View File

@@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Crypto;
@@ -31,6 +32,11 @@ namespace SharpCompress.Common.Arj.Headers
public abstract ArjHeader? Read(Stream reader);
public abstract ValueTask<ArjHeader?> ReadAsync(
Stream reader,
CancellationToken cancellationToken = default
);
public byte[] ReadHeader(Stream stream)
{
// check for magic bytes
@@ -72,6 +78,102 @@ namespace SharpCompress.Common.Arj.Headers
return body;
}
public async ValueTask<byte[]> ReadHeaderAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
// check for magic bytes
var magic = new byte[2];
if (await stream.ReadAsync(magic, 0, 2, cancellationToken) != 2)
{
return Array.Empty<byte>();
}
if (!CheckMagicBytes(magic))
{
throw new InvalidDataException("Not an ARJ file (wrong magic bytes)");
}
// read header_size
byte[] headerBytes = new byte[2];
await stream.ReadAsync(headerBytes, 0, 2, cancellationToken);
var headerSize = (ushort)(headerBytes[0] | headerBytes[1] << 8);
if (headerSize < 1)
{
return Array.Empty<byte>();
}
var body = new byte[headerSize];
var read = await stream.ReadAsync(body, 0, headerSize, cancellationToken);
if (read < headerSize)
{
return Array.Empty<byte>();
}
byte[] crc = new byte[4];
read = await stream.ReadAsync(crc, 0, 4, cancellationToken);
var checksum = Crc32Stream.Compute(body);
// Compute the hash value
if (checksum != BitConverter.ToUInt32(crc, 0))
{
throw new InvalidDataException("Header checksum is invalid");
}
return body;
}
protected async ValueTask<List<byte[]>> ReadExtendedHeadersAsync(
Stream reader,
CancellationToken cancellationToken = default
)
{
List<byte[]> extendedHeader = new List<byte[]>();
byte[] buffer = new byte[2];
while (true)
{
int bytesRead = await reader.ReadAsync(buffer, 0, 2, cancellationToken);
if (bytesRead < 2)
{
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header size."
);
}
var extHeaderSize = (ushort)(buffer[0] | (buffer[1] << 8));
if (extHeaderSize == 0)
{
return extendedHeader;
}
byte[] header = new byte[extHeaderSize];
bytesRead = await reader.ReadAsync(header, 0, extHeaderSize, cancellationToken);
if (bytesRead < extHeaderSize)
{
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header data."
);
}
byte[] crc = new byte[4];
bytesRead = await reader.ReadAsync(crc, 0, 4, cancellationToken);
if (bytesRead < 4)
{
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header CRC."
);
}
var checksum = Crc32Stream.Compute(header);
if (checksum != BitConverter.ToUInt32(crc, 0))
{
throw new InvalidDataException("Extended header checksum is invalid");
}
extendedHeader.Add(header);
}
}
protected List<byte[]> ReadExtendedHeaders(Stream reader)
{
List<byte[]> extendedHeader = new List<byte[]>();
@@ -149,6 +251,26 @@ namespace SharpCompress.Common.Arj.Headers
return CheckMagicBytes(bytes);
}
/// <summary>
/// Asynchronously checks if the stream is an ARJ archive
/// </summary>
/// <param name="stream">The stream to read from</param>
/// <param name="cancellationToken">Cancellation token</param>
/// <returns>True if the stream is an ARJ archive, false otherwise</returns>
public static async ValueTask<bool> IsArchiveAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
var bytes = new byte[2];
if (await stream.ReadAsync(bytes, 0, 2, cancellationToken) != 2)
{
return false;
}
return CheckMagicBytes(bytes);
}
protected static bool CheckMagicBytes(byte[] headerBytes)
{
var magicValue = (ushort)(headerBytes[0] | headerBytes[1] << 8);

View File

@@ -4,6 +4,7 @@ using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Arj.Headers
@@ -55,6 +56,22 @@ namespace SharpCompress.Common.Arj.Headers
return null;
}
public override async ValueTask<ArjHeader?> ReadAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
var body = await ReadHeaderAsync(stream, cancellationToken);
if (body.Length > 0)
{
await ReadExtendedHeadersAsync(stream, cancellationToken);
var header = LoadFrom(body);
header.DataStartPosition = stream.Position;
return header;
}
return null;
}
public ArjLocalHeader LoadFrom(byte[] headerBytes)
{
int offset = 0;

View File

@@ -1,6 +1,8 @@
using System;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Crypto;
@@ -45,6 +47,16 @@ namespace SharpCompress.Common.Arj.Headers
return LoadFrom(body);
}
public override async ValueTask<ArjHeader?> ReadAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
var body = await ReadHeaderAsync(stream, cancellationToken);
await ReadExtendedHeadersAsync(stream, cancellationToken);
return LoadFrom(body);
}
public ArjMainHeader LoadFrom(byte[] headerBytes)
{
var offset = 1;

View File

@@ -0,0 +1,49 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.Rar;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar;
internal class AsyncRarCrcBinaryReader : AsyncMarkingBinaryReader
{
private uint _currentCrc;
public AsyncRarCrcBinaryReader(Stream stream, CancellationToken ct = default)
: base(stream, ct) { }
public uint GetCrc32() => ~_currentCrc;
public void ResetCrc() => _currentCrc = 0xffffffff;
protected void UpdateCrc(byte b) => _currentCrc = RarCRC.CheckCrc(_currentCrc, b);
protected void UpdateCrc(byte[] bytes, int offset, int count) =>
_currentCrc = RarCRC.CheckCrc(_currentCrc, bytes, offset, count);
protected async ValueTask<byte[]> ReadBytesNoCrcAsync(int count, CancellationToken ct = default)
{
CurrentReadByteCount += count;
var buffer = new byte[count];
await BaseStream.ReadExactAsync(buffer, 0, count, ct).ConfigureAwait(false);
return buffer;
}
public override async ValueTask<byte> ReadByteAsync(CancellationToken ct = default)
{
var b = await base.ReadByteAsync(ct).ConfigureAwait(false);
_currentCrc = RarCRC.CheckCrc(_currentCrc, b);
return b;
}
public override async ValueTask<byte[]> ReadBytesAsync(
int count,
CancellationToken ct = default
)
{
var result = await base.ReadBytesAsync(count, ct).ConfigureAwait(false);
_currentCrc = RarCRC.CheckCrc(_currentCrc, result, 0, result.Length);
return result;
}
}

View File

@@ -0,0 +1,98 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Crypto;
namespace SharpCompress.Common.Rar;
internal sealed class AsyncRarCryptoBinaryReader : AsyncRarCrcBinaryReader
{
private readonly Queue<byte> _data = new();
private long _readCount;
private BlockTransformer? _rijndael;
public AsyncRarCryptoBinaryReader(
Stream stream,
ICryptKey cryptKey,
CancellationToken ct = default
)
: base(stream, ct)
{
var salt = base.ReadBytesNoCrcAsync(EncryptionConstV5.SIZE_SALT30, ct)
.GetAwaiter()
.GetResult();
_readCount += EncryptionConstV5.SIZE_SALT30;
_rijndael = new BlockTransformer(cryptKey.Transformer(salt));
}
public AsyncRarCryptoBinaryReader(Stream stream, ICryptKey cryptKey, byte[] salt)
: base(stream) => _rijndael = new BlockTransformer(cryptKey.Transformer(salt));
public override long CurrentReadByteCount
{
get => _readCount;
protected set
{
// ignore
}
}
public override void Mark() => _readCount = 0;
public override async ValueTask<byte> ReadByteAsync(CancellationToken ct = default)
{
var result = await ReadAndDecryptBytesAsync(1, ct).ConfigureAwait(false);
return result[0];
}
public override async ValueTask<byte[]> ReadBytesAsync(
int count,
CancellationToken ct = default
) => await ReadAndDecryptBytesAsync(count, ct).ConfigureAwait(false);
private async ValueTask<byte[]> ReadAndDecryptBytesAsync(
int count,
CancellationToken ct = default
)
{
var queueSize = _data.Count;
var sizeToRead = count - queueSize;
if (sizeToRead > 0)
{
var alignedSize = sizeToRead + ((~sizeToRead + 1) & 0xf);
for (var i = 0; i < alignedSize / 16; i++)
{
var cipherText = await ReadBytesNoCrcAsync(16, ct).ConfigureAwait(false);
var readBytes = _rijndael!.ProcessBlock(cipherText);
foreach (var readByte in readBytes)
{
_data.Enqueue(readByte);
}
}
}
var decryptedBytes = new byte[count];
for (var i = 0; i < count; i++)
{
var b = _data.Dequeue();
decryptedBytes[i] = b;
UpdateCrc(b);
}
_readCount += count;
return decryptedBytes;
}
public void ClearQueue() => _data.Clear();
public void SkipQueue()
{
var position = BaseStream.Position;
BaseStream.Position = position + _data.Count;
ClearQueue();
}
}

View File

@@ -1,5 +1,8 @@
#nullable disable
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Rar;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers;
@@ -13,4 +16,14 @@ internal class ArchiveCryptHeader : RarHeader
protected override void ReadFinish(MarkingBinaryReader reader) =>
CryptInfo = new Rar5CryptoInfo(reader, false);
protected override async ValueTask ReadFinishAsync(
AsyncMarkingBinaryReader reader,
CancellationToken cancellationToken = default
)
{
CryptInfo = await Rar5CryptoInfo
.CreateAsync(reader, false, cancellationToken)
.ConfigureAwait(false);
}
}

View File

@@ -1,3 +1,6 @@
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Rar;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers;
@@ -33,6 +36,37 @@ internal sealed class ArchiveHeader : RarHeader
}
}
protected override async ValueTask ReadFinishAsync(
AsyncMarkingBinaryReader reader,
CancellationToken cancellationToken = default
)
{
if (IsRar5)
{
Flags = await reader.ReadRarVIntUInt16Async().ConfigureAwait(false);
if (HasFlag(ArchiveFlagsV5.HAS_VOLUME_NUMBER))
{
VolumeNumber = (int)await reader.ReadRarVIntUInt32Async().ConfigureAwait(false);
}
// later: we may have a locator record if we need it
//if (ExtraSize != 0) {
// ReadLocator(reader);
//}
}
else
{
Flags = HeaderFlags;
HighPosAv = await reader.ReadInt16Async(cancellationToken).ConfigureAwait(false);
PosAv = await reader.ReadInt32Async(cancellationToken).ConfigureAwait(false);
if (HasFlag(ArchiveFlagsV4.ENCRYPT_VER))
{
EncryptionVersion = await reader
.ReadByteAsync(cancellationToken)
.ConfigureAwait(false);
}
}
}
private ushort Flags { get; set; }
private bool HasFlag(ushort flag) => (Flags & flag) == flag;

View File

@@ -1,4 +1,6 @@
using SharpCompress.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers;
@@ -27,6 +29,29 @@ internal class EndArchiveHeader : RarHeader
}
}
protected override async ValueTask ReadFinishAsync(
AsyncMarkingBinaryReader reader,
CancellationToken cancellationToken = default
)
{
if (IsRar5)
{
Flags = await reader.ReadRarVIntUInt16Async().ConfigureAwait(false);
}
else
{
Flags = HeaderFlags;
if (HasFlag(EndArchiveFlagsV4.DATA_CRC))
{
ArchiveCrc = await reader.ReadInt32Async(cancellationToken).ConfigureAwait(false);
}
if (HasFlag(EndArchiveFlagsV4.VOLUME_NUMBER))
{
VolumeNumber = await reader.ReadInt16Async(cancellationToken).ConfigureAwait(false);
}
}
}
private ushort Flags { get; set; }
private bool HasFlag(ushort flag) => (Flags & flag) == flag;

View File

@@ -2,6 +2,8 @@ using System;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
#if !Rar2017_64bit
using size_t = System.UInt32;
@@ -32,6 +34,33 @@ internal class FileHeader : RarHeader
}
}
public static new async ValueTask<FileHeader> CreateAsync(
RarHeader header,
AsyncRarCrcBinaryReader reader,
HeaderType headerType,
CancellationToken cancellationToken = default
)
{
var fh = new FileHeader(header, reader, headerType);
await fh.ReadFinishAsync(reader, cancellationToken).ConfigureAwait(false);
return fh;
}
protected override async ValueTask ReadFinishAsync(
AsyncMarkingBinaryReader reader,
CancellationToken cancellationToken = default
)
{
if (IsRar5)
{
await ReadFromReaderV5Async(reader, cancellationToken).ConfigureAwait(false);
}
else
{
await ReadFromReaderV4Async(reader, cancellationToken).ConfigureAwait(false);
}
}
private void ReadFromReaderV5(MarkingBinaryReader reader)
{
Flags = reader.ReadRarVIntUInt16();
@@ -205,6 +234,162 @@ internal class FileHeader : RarHeader
}
}
private async Task ReadFromReaderV5Async(
AsyncMarkingBinaryReader reader,
CancellationToken cancellationToken = default
)
{
Flags = await reader.ReadRarVIntUInt16Async().ConfigureAwait(false);
var lvalue = checked((long)await reader.ReadRarVIntAsync().ConfigureAwait(false));
UncompressedSize = HasFlag(FileFlagsV5.UNPACKED_SIZE_UNKNOWN) ? long.MaxValue : lvalue;
FileAttributes = await reader.ReadRarVIntUInt32Async().ConfigureAwait(false);
if (HasFlag(FileFlagsV5.HAS_MOD_TIME))
{
var value = await reader.ReadUInt32Async(cancellationToken).ConfigureAwait(false);
FileLastModifiedTime = Utility.UnixTimeToDateTime(value);
}
if (HasFlag(FileFlagsV5.HAS_CRC32))
{
FileCrc = await reader.ReadBytesAsync(4, cancellationToken).ConfigureAwait(false);
}
var compressionInfo = await reader.ReadRarVIntUInt16Async().ConfigureAwait(false);
CompressionAlgorithm = (byte)((compressionInfo & 0x3f) + 50);
IsSolid = (compressionInfo & 0x40) == 0x40;
CompressionMethod = (byte)((compressionInfo >> 7) & 0x7);
WindowSize = IsDirectory ? 0 : ((size_t)0x20000) << ((compressionInfo >> 10) & 0xf);
HostOs = await reader.ReadRarVIntByteAsync().ConfigureAwait(false);
var nameSize = await reader.ReadRarVIntUInt16Async().ConfigureAwait(false);
var b = await reader.ReadBytesAsync(nameSize, cancellationToken).ConfigureAwait(false);
FileName = ConvertPathV5(Encoding.UTF8.GetString(b, 0, b.Length));
if (ExtraSize != RemainingHeaderBytes(reader))
{
throw new InvalidFormatException("rar5 header size / extra size inconsistency");
}
const ushort FHEXTRA_CRYPT = 0x01;
const ushort FHEXTRA_HASH = 0x02;
const ushort FHEXTRA_HTIME = 0x03;
const ushort FHEXTRA_REDIR = 0x05;
while (RemainingHeaderBytes(reader) > 0)
{
var size = await reader.ReadRarVIntUInt16Async().ConfigureAwait(false);
var n = RemainingHeaderBytes(reader);
var type = await reader.ReadRarVIntUInt16Async().ConfigureAwait(false);
switch (type)
{
case FHEXTRA_CRYPT:
{
Rar5CryptoInfo = new Rar5CryptoInfo(reader, true);
if (Rar5CryptoInfo.PswCheck.All(singleByte => singleByte == 0))
{
Rar5CryptoInfo = null;
}
}
break;
case FHEXTRA_HASH:
{
const uint FHEXTRA_HASH_BLAKE2 = 0x0;
const int BLAKE2_DIGEST_SIZE = 0x20;
if (
(uint)await reader.ReadRarVIntAsync().ConfigureAwait(false)
== FHEXTRA_HASH_BLAKE2
)
{
_hash = await reader
.ReadBytesAsync(BLAKE2_DIGEST_SIZE, cancellationToken)
.ConfigureAwait(false);
}
}
break;
case FHEXTRA_HTIME:
{
var flags = await reader.ReadRarVIntUInt16Async().ConfigureAwait(false);
var isWindowsTime = (flags & 1) == 0;
if ((flags & 0x2) == 0x2)
{
FileLastModifiedTime = await ReadExtendedTimeV5Async(
reader,
isWindowsTime,
cancellationToken
);
}
if ((flags & 0x4) == 0x4)
{
FileCreatedTime = await ReadExtendedTimeV5Async(
reader,
isWindowsTime,
cancellationToken
);
}
if ((flags & 0x8) == 0x8)
{
FileLastAccessedTime = await ReadExtendedTimeV5Async(
reader,
isWindowsTime,
cancellationToken
);
}
}
break;
case FHEXTRA_REDIR:
{
RedirType = await reader.ReadRarVIntByteAsync().ConfigureAwait(false);
RedirFlags = await reader.ReadRarVIntByteAsync().ConfigureAwait(false);
var nn = await reader.ReadRarVIntUInt16Async().ConfigureAwait(false);
var bb = await reader
.ReadBytesAsync(nn, cancellationToken)
.ConfigureAwait(false);
RedirTargetName = ConvertPathV5(Encoding.UTF8.GetString(bb, 0, bb.Length));
}
break;
default:
break;
}
var did = n - RemainingHeaderBytes(reader);
var drain = size - did;
if (drain > 0)
{
await reader.ReadBytesAsync(drain, cancellationToken).ConfigureAwait(false);
}
}
if (AdditionalDataSize != 0)
{
CompressedSize = AdditionalDataSize;
}
}
private static async ValueTask<DateTime> ReadExtendedTimeV5Async(
AsyncMarkingBinaryReader reader,
bool isWindowsTime,
CancellationToken cancellationToken = default
)
{
if (isWindowsTime)
{
var value = await reader.ReadInt64Async(cancellationToken).ConfigureAwait(false);
return DateTime.FromFileTime(value);
}
else
{
var value = await reader.ReadUInt32Async(cancellationToken).ConfigureAwait(false);
return Utility.UnixTimeToDateTime(value);
}
}
private static DateTime ReadExtendedTimeV5(MarkingBinaryReader reader, bool isWindowsTime)
{
if (isWindowsTime)
@@ -217,6 +402,169 @@ internal class FileHeader : RarHeader
}
}
private async Task ReadFromReaderV4Async(
AsyncMarkingBinaryReader reader,
CancellationToken cancellationToken = default
)
{
Flags = HeaderFlags;
IsSolid = HasFlag(FileFlagsV4.SOLID);
WindowSize = IsDirectory
? 0U
: ((size_t)0x10000) << ((Flags & FileFlagsV4.WINDOW_MASK) >> 5);
var lowUncompressedSize = await reader
.ReadUInt32Async(cancellationToken)
.ConfigureAwait(false);
HostOs = await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false);
FileCrc = await reader.ReadBytesAsync(4, cancellationToken).ConfigureAwait(false);
var value = await reader.ReadUInt32Async(cancellationToken).ConfigureAwait(false);
FileLastModifiedTime = Utility.DosDateToDateTime(value);
CompressionAlgorithm = await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false);
var value1 = await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false);
CompressionMethod = (byte)(value1 - 0x30);
var nameSize = await reader.ReadInt16Async(cancellationToken).ConfigureAwait(false);
FileAttributes = await reader.ReadUInt32Async(cancellationToken).ConfigureAwait(false);
uint highCompressedSize = 0;
uint highUncompressedkSize = 0;
if (HasFlag(FileFlagsV4.LARGE))
{
highCompressedSize = await reader
.ReadUInt32Async(cancellationToken)
.ConfigureAwait(false);
highUncompressedkSize = await reader
.ReadUInt32Async(cancellationToken)
.ConfigureAwait(false);
}
else
{
if (lowUncompressedSize == 0xffffffff)
{
lowUncompressedSize = 0xffffffff;
highUncompressedkSize = int.MaxValue;
}
}
CompressedSize = UInt32To64(highCompressedSize, checked((uint)AdditionalDataSize));
UncompressedSize = UInt32To64(highUncompressedkSize, lowUncompressedSize);
nameSize = nameSize > 4 * 1024 ? (short)(4 * 1024) : nameSize;
var fileNameBytes = await reader
.ReadBytesAsync(nameSize, cancellationToken)
.ConfigureAwait(false);
const int newLhdSize = 32;
switch (HeaderCode)
{
case HeaderCodeV.RAR4_FILE_HEADER:
{
if (HasFlag(FileFlagsV4.UNICODE))
{
var length = 0;
while (length < fileNameBytes.Length && fileNameBytes[length] != 0)
{
length++;
}
if (length != nameSize)
{
length++;
}
FileName = FileNameDecoder.Decode(fileNameBytes, length);
}
else
{
FileName = ArchiveEncoding.Decode(fileNameBytes);
}
FileName = ConvertPathV4(FileName);
}
break;
case HeaderCodeV.RAR4_NEW_SUB_HEADER:
{
var datasize = HeaderSize - newLhdSize - nameSize;
if (HasFlag(FileFlagsV4.SALT))
{
datasize -= EncryptionConstV5.SIZE_SALT30;
}
if (datasize > 0)
{
SubData = await reader
.ReadBytesAsync(datasize, cancellationToken)
.ConfigureAwait(false);
}
if (NewSubHeaderType.SUBHEAD_TYPE_RR.Equals(fileNameBytes))
{
if (SubData is null)
{
throw new InvalidFormatException();
}
RecoverySectors =
SubData[8]
+ (SubData[9] << 8)
+ (SubData[10] << 16)
+ (SubData[11] << 24);
}
}
break;
}
if (HasFlag(FileFlagsV4.SALT))
{
R4Salt = await reader
.ReadBytesAsync(EncryptionConstV5.SIZE_SALT30, cancellationToken)
.ConfigureAwait(false);
}
if (HasFlag(FileFlagsV4.EXT_TIME))
{
if (RemainingHeaderBytes(reader) >= 2)
{
var extendedFlags = await reader
.ReadUInt16Async(cancellationToken)
.ConfigureAwait(false);
if (FileLastModifiedTime is null)
{
FileLastModifiedTime = await ProcessExtendedTimeV4Async(
extendedFlags,
FileLastModifiedTime,
reader,
0,
cancellationToken
);
}
FileCreatedTime = await ProcessExtendedTimeV4Async(
extendedFlags,
null,
reader,
1,
cancellationToken
);
FileLastAccessedTime = await ProcessExtendedTimeV4Async(
extendedFlags,
null,
reader,
2,
cancellationToken
);
FileArchivedTime = await ProcessExtendedTimeV4Async(
extendedFlags,
null,
reader,
3,
cancellationToken
);
}
}
}
private static string ConvertPathV5(string path)
{
if (Path.DirectorySeparatorChar == '\\')
@@ -365,6 +713,43 @@ internal class FileHeader : RarHeader
return l + y;
}
private static async ValueTask<DateTime?> ProcessExtendedTimeV4Async(
ushort extendedFlags,
DateTime? time,
AsyncMarkingBinaryReader reader,
int i,
CancellationToken cancellationToken = default
)
{
var rmode = (uint)extendedFlags >> ((3 - i) * 4);
if ((rmode & 8) == 0)
{
return null;
}
if (i != 0)
{
var dosTime = await reader.ReadUInt32Async(cancellationToken).ConfigureAwait(false);
time = Utility.DosDateToDateTime(dosTime);
}
if ((rmode & 4) == 0 && time is not null)
{
time = time.Value.AddSeconds(1);
}
uint nanosecondHundreds = 0;
var count = (int)rmode & 3;
for (var j = 0; j < count; j++)
{
var b = await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false);
nanosecondHundreds |= (((uint)b) << ((j + 3 - count) * 8));
}
if (time is not null)
{
return time.Value.AddMilliseconds(nanosecondHundreds * Math.Pow(10, -4));
}
return null;
}
private static DateTime? ProcessExtendedTimeV4(
ushort extendedFlags,
DateTime? time,

View File

@@ -1,5 +1,7 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Rar.Headers;
@@ -25,6 +27,19 @@ internal class MarkHeader : IRarHeader
throw new EndOfStreamException();
}
private static async Task<byte> GetByteAsync(Stream stream, CancellationToken cancellationToken)
{
var buffer = new byte[1];
var bytesRead = await stream
.ReadAsync(buffer, 0, 1, cancellationToken)
.ConfigureAwait(false);
if (bytesRead == 1)
{
return buffer[0];
}
throw new EndOfStreamException();
}
public static MarkHeader Read(Stream stream, bool leaveStreamOpen, bool lookForHeader)
{
var maxScanIndex = lookForHeader ? MAX_SFX_SIZE : 0;
@@ -129,4 +144,111 @@ internal class MarkHeader : IRarHeader
throw new InvalidFormatException("Rar signature not found");
}
public static async ValueTask<MarkHeader> ReadAsync(
Stream stream,
bool leaveStreamOpen,
bool lookForHeader,
CancellationToken cancellationToken = default
)
{
var maxScanIndex = lookForHeader ? MAX_SFX_SIZE : 0;
try
{
var start = -1;
var b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
while (start <= maxScanIndex)
{
if (b == 0x52)
{
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b == 0x61)
{
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x72)
{
continue;
}
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x21)
{
continue;
}
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x1a)
{
continue;
}
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x07)
{
continue;
}
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b == 1)
{
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0)
{
continue;
}
return new MarkHeader(true); // Rar5
}
else if (b == 0)
{
return new MarkHeader(false); // Rar4
}
}
else if (b == 0x45)
{
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x7e)
{
continue;
}
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x5e)
{
continue;
}
throw new InvalidFormatException(
"Rar format version pre-4 is unsupported."
);
}
}
else
{
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
}
}
}
catch (Exception e)
{
if (!leaveStreamOpen)
{
stream.Dispose();
}
throw new InvalidFormatException("Error trying to read rar signature.", e);
}
throw new InvalidFormatException("Rar signature not found");
}
}

View File

@@ -1,3 +1,6 @@
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Rar;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers;
@@ -22,6 +25,17 @@ internal sealed class ProtectHeader : RarHeader
Mark = reader.ReadBytes(8);
}
protected override async ValueTask ReadFinishAsync(
AsyncMarkingBinaryReader reader,
CancellationToken cancellationToken = default
)
{
Version = await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false);
RecSectors = await reader.ReadUInt16Async(cancellationToken).ConfigureAwait(false);
TotalBlocks = await reader.ReadUInt32Async(cancellationToken).ConfigureAwait(false);
Mark = await reader.ReadBytesAsync(8, cancellationToken).ConfigureAwait(false);
}
internal uint DataSize => checked((uint)AdditionalDataSize);
internal byte Version { get; private set; }
internal ushort RecSectors { get; private set; }

View File

@@ -1,4 +1,7 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Rar;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers;
@@ -18,7 +21,9 @@ internal class RarHeader : IRarHeader
{
try
{
return new RarHeader(reader, isRar5, archiveEncoding);
var header = new RarHeader(isRar5, archiveEncoding);
header.Initialize(reader);
return header;
}
catch (InvalidFormatException)
{
@@ -26,11 +31,91 @@ internal class RarHeader : IRarHeader
}
}
private RarHeader(RarCrcBinaryReader reader, bool isRar5, IArchiveEncoding archiveEncoding)
internal static async ValueTask<RarHeader?> TryReadBaseAsync(
AsyncRarCrcBinaryReader reader,
bool isRar5,
IArchiveEncoding archiveEncoding,
CancellationToken cancellationToken = default
)
{
try
{
return await CreateBaseAsync(reader, isRar5, archiveEncoding, cancellationToken)
.ConfigureAwait(false);
}
catch (InvalidFormatException)
{
return null;
}
}
private static async ValueTask<RarHeader> CreateBaseAsync(
AsyncRarCrcBinaryReader reader,
bool isRar5,
IArchiveEncoding archiveEncoding,
CancellationToken cancellationToken = default
)
{
var header = new RarHeader(HeaderType.Null, isRar5) { ArchiveEncoding = archiveEncoding };
if (isRar5)
{
header.HeaderCrc = await reader
.ReadUInt32Async(cancellationToken)
.ConfigureAwait(false);
reader.ResetCrc();
header.HeaderSize = (int)
await reader.ReadRarVIntUInt32Async(3, cancellationToken).ConfigureAwait(false);
reader.Mark();
header.HeaderCode = await reader.ReadRarVIntByteAsync(2).ConfigureAwait(false);
header.HeaderFlags = await reader
.ReadRarVIntUInt16Async(2, cancellationToken)
.ConfigureAwait(false);
if (header.HasHeaderFlag(HeaderFlagsV5.HAS_EXTRA))
{
header.ExtraSize = await reader.ReadRarVIntUInt32Async().ConfigureAwait(false);
}
if (header.HasHeaderFlag(HeaderFlagsV5.HAS_DATA))
{
header.AdditionalDataSize = (long)
await reader.ReadRarVIntAsync().ConfigureAwait(false);
}
}
else
{
reader.Mark();
header.HeaderCrc = await reader
.ReadUInt16Async(cancellationToken)
.ConfigureAwait(false);
reader.ResetCrc();
header.HeaderCode = await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false);
header.HeaderFlags = await reader
.ReadUInt16Async(cancellationToken)
.ConfigureAwait(false);
header.HeaderSize = await reader
.ReadInt16Async(cancellationToken)
.ConfigureAwait(false);
if (header.HasHeaderFlag(HeaderFlagsV4.HAS_DATA))
{
header.AdditionalDataSize = await reader
.ReadUInt32Async(cancellationToken)
.ConfigureAwait(false);
}
}
return header;
}
private RarHeader( bool isRar5, IArchiveEncoding archiveEncoding)
{
_headerType = HeaderType.Null;
_isRar5 = isRar5;
ArchiveEncoding = archiveEncoding;
}
private void Initialize(RarCrcBinaryReader reader)
{
if (IsRar5)
{
HeaderCrc = reader.ReadUInt32();
@@ -64,6 +149,14 @@ internal class RarHeader : IRarHeader
}
}
private RarHeader(HeaderType headerType, bool isRar5)
{
_headerType = headerType;
_isRar5 = isRar5;
ArchiveEncoding = null!;
}
protected RarHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType)
{
_headerType = headerType;
@@ -86,12 +179,50 @@ internal class RarHeader : IRarHeader
VerifyHeaderCrc(reader.GetCrc32());
}
public static async ValueTask<RarHeader> CreateAsync(
RarHeader header,
AsyncRarCrcBinaryReader reader,
HeaderType headerType,
CancellationToken cancellationToken = default
)
{
var result = new RarHeader(headerType, header.IsRar5)
{
HeaderCrc = header.HeaderCrc,
HeaderCode = header.HeaderCode,
HeaderFlags = header.HeaderFlags,
HeaderSize = header.HeaderSize,
ExtraSize = header.ExtraSize,
AdditionalDataSize = header.AdditionalDataSize,
ArchiveEncoding = header.ArchiveEncoding,
};
await result.ReadFinishAsync(reader, cancellationToken).ConfigureAwait(false);
var n = result.RemainingHeaderBytes(reader);
if (n > 0)
{
await reader.ReadBytesAsync(n, cancellationToken).ConfigureAwait(false);
}
result.VerifyHeaderCrc(reader.GetCrc32());
return result;
}
protected int RemainingHeaderBytes(AsyncMarkingBinaryReader reader) =>
checked(HeaderSize - (int)reader.CurrentReadByteCount);
protected int RemainingHeaderBytes(MarkingBinaryReader reader) =>
checked(HeaderSize - (int)reader.CurrentReadByteCount);
protected virtual void ReadFinish(MarkingBinaryReader reader) =>
throw new NotImplementedException();
protected virtual async ValueTask ReadFinishAsync(
AsyncMarkingBinaryReader reader,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
private void VerifyHeaderCrc(uint crc32)
{
var b = (IsRar5 ? crc32 : (ushort)crc32) == HeaderCrc;
@@ -105,25 +236,25 @@ internal class RarHeader : IRarHeader
protected bool IsRar5 => _isRar5;
protected uint HeaderCrc { get; }
protected uint HeaderCrc { get; private set; }
internal byte HeaderCode { get; }
internal byte HeaderCode { get; private set; }
protected ushort HeaderFlags { get; }
protected ushort HeaderFlags { get; private set; }
protected bool HasHeaderFlag(ushort flag) => (HeaderFlags & flag) == flag;
protected int HeaderSize { get; }
protected int HeaderSize { get; private set; }
internal IArchiveEncoding ArchiveEncoding { get; }
internal IArchiveEncoding ArchiveEncoding { get; private set; }
/// <summary>
/// Extra header size.
/// </summary>
protected uint ExtraSize { get; }
protected uint ExtraSize { get; private set; }
/// <summary>
/// Size of additional data (eg file contents)
/// </summary>
protected long AdditionalDataSize { get; }
protected long AdditionalDataSize { get; private set; }
}

View File

@@ -1,5 +1,8 @@
using System.Collections.Generic;
using System.IO;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -40,6 +43,35 @@ public class RarHeaderFactory
}
}
public async IAsyncEnumerable<IRarHeader> ReadHeadersAsync(
Stream stream,
[EnumeratorCancellation] CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
var markHeader = await MarkHeader
.ReadAsync(stream, Options.LeaveStreamOpen, Options.LookForHeader, cancellationToken)
.ConfigureAwait(false);
_isRar5 = markHeader.IsRar5;
yield return markHeader;
RarHeader? header;
while (
(header = await TryReadNextHeaderAsync(stream, cancellationToken).ConfigureAwait(false))
!= null
)
{
cancellationToken.ThrowIfCancellationRequested();
yield return header;
if (header.HeaderType == HeaderType.EndArchive)
{
// End of archive marker. RAR does not read anything after this header letting to use third
// party tools to add extra information such as a digital signature to archive.
yield break;
}
}
}
private RarHeader? TryReadNextHeader(Stream stream)
{
RarCrcBinaryReader reader;
@@ -92,7 +124,7 @@ public class RarHeaderFactory
case HeaderCodeV.RAR4_PROTECT_HEADER:
{
var ph = new ProtectHeader(header, reader);
// skip the recovery record data, we do not use it.
// skip for recovery record data, we do not use it.
switch (StreamingMode)
{
case StreamingMode.Seekable:
@@ -198,6 +230,181 @@ public class RarHeaderFactory
}
}
private async ValueTask<RarHeader?> TryReadNextHeaderAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
AsyncRarCrcBinaryReader reader;
if (!IsEncrypted)
{
reader = new AsyncRarCrcBinaryReader(stream);
}
else
{
if (Options.Password is null)
{
throw new CryptographicException(
"Encrypted Rar archive has no password specified."
);
}
if (_isRar5 && _cryptInfo != null)
{
var markingReader = new MarkingBinaryReader(stream);
_cryptInfo.ReadInitV(markingReader);
var _headerKey = new CryptKey5(Options.Password!, _cryptInfo);
reader = new AsyncRarCrcBinaryReader(stream, _headerKey, _cryptInfo.Salt);
}
else
{
var key = new CryptKey3(Options.Password);
reader = new AsyncRarCrcBinaryReader(stream, key);
}
}
var header = await RarHeader
.TryReadBaseAsync(reader, _isRar5, Options.ArchiveEncoding, cancellationToken)
.ConfigureAwait(false);
if (header is null)
{
return null;
}
switch (header.HeaderCode)
{
case HeaderCodeV.RAR5_ARCHIVE_HEADER:
case HeaderCodeV.RAR4_ARCHIVE_HEADER:
{
var ah = await ArchiveHeader
.CreateAsync(header, reader, cancellationToken)
.ConfigureAwait(false);
if (ah.IsEncrypted == true)
{
//!!! rar5 we don't know yet
return await CreateRarHeaderAsync(header, ah, reader, cancellationToken)
.ConfigureAwait(false);
}
return ah;
}
case HeaderCodeV.RAR4_PROTECT_HEADER:
{
var ph = await ProtectHeader
.CreateAsync(header, reader, cancellationToken)
.ConfigureAwait(false);
await SkipProtectHeaderDataAsync(ph, reader, cancellationToken)
.ConfigureAwait(false);
return await CreateRarHeaderAsync(header, ph, reader, cancellationToken)
.ConfigureAwait(false);
}
case HeaderCodeV.RAR5_SERVICE_HEADER:
{
var fh = await FileHeader
.CreateAsync(header, reader, HeaderType.Service, cancellationToken)
.ConfigureAwait(false);
if (fh.FileName == "CMT")
{
fh.PackedStream = new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize);
}
else
{
await SkipDataAsync(fh, reader, cancellationToken).ConfigureAwait(false);
}
return fh;
}
case HeaderCodeV.RAR4_NEW_SUB_HEADER:
{
var fh = await FileHeader
.CreateAsync(header, reader, HeaderType.NewSub, cancellationToken)
.ConfigureAwait(false);
await SkipDataAsync(fh, reader, cancellationToken).ConfigureAwait(false);
return fh;
}
case HeaderCodeV.RAR5_FILE_HEADER:
case HeaderCodeV.RAR4_FILE_HEADER:
{
var fh = await FileHeader
.CreateAsync(header, reader, HeaderType.File, cancellationToken)
.ConfigureAwait(false);
await ProcessFileHeaderAsync(fh, reader, cancellationToken).ConfigureAwait(false);
return fh;
}
case HeaderCodeV.RAR5_END_ARCHIVE_HEADER:
case HeaderCodeV.RAR4_END_ARCHIVE_HEADER:
{
var eh = await EndArchiveHeader
.CreateAsync(header, reader, cancellationToken)
.ConfigureAwait(false);
return await CreateRarHeaderAsync(header, eh, reader, cancellationToken)
.ConfigureAwait(false);
}
case HeaderCodeV.RAR5_ARCHIVE_ENCRYPTION_HEADER:
{
var cryptoHeader = await ArchiveCryptHeader
.CreateAsync(header, reader, cancellationToken)
.ConfigureAwait(false);
IsEncrypted = true;
_cryptInfo = cryptoHeader.CryptInfo;
return await CreateRarHeaderAsync(header, cryptoHeader, reader, cancellationToken)
.ConfigureAwait(false);
}
default:
{
throw new InvalidFormatException("Unknown Rar Header: " + header.HeaderCode);
}
}
}
private static async ValueTask<RarHeader> CreateRarHeaderAsync(
RarHeader header,
RarHeader result,
RarCrcBinaryReader reader,
CancellationToken cancellationToken = default
)
{
await result.ReadFinishAsync(reader, cancellationToken).ConfigureAwait(false);
var n = result.RemainingHeaderBytes(reader);
if (n > 0)
{
await reader.ReadBytesAsync(n, cancellationToken).ConfigureAwait(false);
}
result.VerifyHeaderCrc(reader.GetCrc32());
return result;
}
private static async ValueTask SkipProtectHeaderDataAsync(
ProtectHeader ph,
RarCrcBinaryReader reader,
CancellationToken cancellationToken = default
)
{
switch (ph.StreamingMode)
{
case StreamingMode.Seekable:
{
reader.BaseStream.Position += ph.DataSize;
}
break;
case StreamingMode.Streaming:
{
reader.BaseStream.Skip(ph.DataSize);
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
}
private void SkipData(FileHeader fh, RarCrcBinaryReader reader)
{
switch (StreamingMode)
@@ -210,7 +417,6 @@ public class RarHeaderFactory
break;
case StreamingMode.Streaming:
{
//skip the data because it's useless?
reader.BaseStream.Skip(fh.CompressedSize);
}
break;
@@ -220,4 +426,108 @@ public class RarHeaderFactory
}
}
}
private static void ProcessFileHeader(FileHeader fh, RarCrcBinaryReader reader)
{
switch (fh.StreamingMode)
{
case StreamingMode.Seekable:
{
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming:
{
var ms = new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize);
if (fh.R4Salt is null && fh.Rar5CryptoInfo is null)
{
fh.PackedStream = ms;
}
else
{
fh.PackedStream = new RarCryptoWrapper(
ms,
fh.R4Salt is null ? fh.Rar5CryptoInfo.NotNull().Salt : fh.R4Salt,
fh.R4Salt is null
? new CryptKey5(Options.Password, fh.Rar5CryptoInfo.NotNull())
: new CryptKey3(Options.Password)
);
}
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
}
private static async ValueTask SkipDataAsync(
FileHeader fh,
RarCrcBinaryReader reader,
CancellationToken cancellationToken = default
)
{
switch (fh.StreamingMode)
{
case StreamingMode.Seekable:
{
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming:
{
await reader
.BaseStream.SkipAsync((int)fh.CompressedSize, cancellationToken)
.ConfigureAwait(false);
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
}
private static async ValueTask ProcessFileHeaderAsync(
FileHeader fh,
RarCrcBinaryReader reader,
CancellationToken cancellationToken = default
)
{
switch (fh.StreamingMode)
{
case StreamingMode.Seekable:
{
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming:
{
var ms = new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize);
if (fh.R4Salt is null && fh.Rar5CryptoInfo is null)
{
fh.PackedStream = ms;
}
else
{
fh.PackedStream = new RarCryptoWrapper(
ms,
fh.R4Salt is null ? fh.Rar5CryptoInfo.NotNull().Salt : fh.R4Salt,
fh.R4Salt is null
? new CryptKey5(Options.Password, fh.Rar5CryptoInfo.NotNull())
: new CryptKey3(Options.Password)
);
}
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
}
}

View File

@@ -1,5 +1,7 @@
using System;
using System.Security.Cryptography;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
@@ -9,6 +11,58 @@ internal class Rar5CryptoInfo
{
public Rar5CryptoInfo() { }
public static async ValueTask<Rar5CryptoInfo> CreateAsync(
AsyncMarkingBinaryReader reader,
bool readInitV,
CancellationToken cancellationToken = default
)
{
var info = new Rar5CryptoInfo();
var cryptVersion = await reader.ReadRarVIntUInt32Async().ConfigureAwait(false);
if (cryptVersion > EncryptionConstV5.VERSION)
{
throw new CryptographicException($"Unsupported crypto version of {cryptVersion}");
}
var encryptionFlags = await reader.ReadRarVIntUInt32Async().ConfigureAwait(false);
info.UsePswCheck = FlagUtility.HasFlag(
encryptionFlags,
EncryptionFlagsV5.CHFL_CRYPT_PSWCHECK
);
info.LG2Count = await reader.ReadRarVIntByteAsync(1).ConfigureAwait(false);
if (info.LG2Count > EncryptionConstV5.CRYPT5_KDF_LG2_COUNT_MAX)
{
throw new CryptographicException($"Unsupported LG2 count of {info.LG2Count}.");
}
info.Salt = await reader
.ReadBytesAsync(EncryptionConstV5.SIZE_SALT50, cancellationToken)
.ConfigureAwait(false);
if (readInitV)
{
await info.ReadInitVAsync(reader, cancellationToken).ConfigureAwait(false);
}
if (info.UsePswCheck)
{
info.PswCheck = await reader
.ReadBytesAsync(EncryptionConstV5.SIZE_PSWCHECK, cancellationToken)
.ConfigureAwait(false);
var _pswCheckCsm = await reader
.ReadBytesAsync(EncryptionConstV5.SIZE_PSWCHECK_CSUM, cancellationToken)
.ConfigureAwait(false);
var sha = SHA256.Create();
info.UsePswCheck = sha.ComputeHash(info.PswCheck)
.AsSpan()
.StartsWith(_pswCheckCsm.AsSpan());
}
return info;
}
public Rar5CryptoInfo(MarkingBinaryReader reader, bool readInitV)
{
var cryptVersion = reader.ReadRarVIntUInt32();
@@ -45,6 +99,14 @@ internal class Rar5CryptoInfo
public void ReadInitV(MarkingBinaryReader reader) =>
InitV = reader.ReadBytes(EncryptionConstV5.SIZE_INITV);
public async ValueTask ReadInitVAsync(
AsyncMarkingBinaryReader reader,
CancellationToken cancellationToken = default
) =>
InitV = await reader
.ReadBytesAsync(EncryptionConstV5.SIZE_INITV, cancellationToken)
.ConfigureAwait(false);
public bool UsePswCheck = false;
public int LG2Count = 0;

View File

@@ -5,6 +5,8 @@ using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.LZMA.Utilites;
using SharpCompress.IO;
@@ -1270,6 +1272,46 @@ internal class ArchiveReader
_stream = stream;
}
public async Task OpenAsync(
Stream stream,
bool lookForHeader,
CancellationToken cancellationToken = default
)
{
Close();
_streamOrigin = stream.Position;
_streamEnding = stream.Length;
var canScan = lookForHeader ? 0x80000 - 20 : 0;
while (true)
{
// TODO: Check Signature!
_header = new byte[0x20];
await stream.ReadExactAsync(_header, 0, 0x20, cancellationToken);
if (
!lookForHeader
|| _header
.AsSpan(0, length: 6)
.SequenceEqual<byte>([0x37, 0x7A, 0xBC, 0xAF, 0x27, 0x1C])
)
{
break;
}
if (canScan == 0)
{
throw new InvalidFormatException("Unable to find 7z signature");
}
canScan--;
stream.Position = ++_streamOrigin;
}
_stream = stream;
}
public void Close()
{
_stream?.Dispose();
@@ -1383,6 +1425,110 @@ internal class ArchiveReader
return db;
}
public async Task<ArchiveDatabase> ReadDatabaseAsync(
IPasswordProvider pass,
CancellationToken cancellationToken = default
)
{
var db = new ArchiveDatabase(pass);
db.Clear();
db._majorVersion = _header[6];
db._minorVersion = _header[7];
if (db._majorVersion != 0)
{
throw new InvalidOperationException();
}
var crcFromArchive = DataReader.Get32(_header, 8);
var nextHeaderOffset = (long)DataReader.Get64(_header, 0xC);
var nextHeaderSize = (long)DataReader.Get64(_header, 0x14);
var nextHeaderCrc = DataReader.Get32(_header, 0x1C);
var crc = Crc.INIT_CRC;
crc = Crc.Update(crc, nextHeaderOffset);
crc = Crc.Update(crc, nextHeaderSize);
crc = Crc.Update(crc, nextHeaderCrc);
crc = Crc.Finish(crc);
if (crc != crcFromArchive)
{
throw new InvalidOperationException();
}
db._startPositionAfterHeader = _streamOrigin + 0x20;
// empty header is ok
if (nextHeaderSize == 0)
{
db.Fill();
return db;
}
if (nextHeaderOffset < 0 || nextHeaderSize < 0 || nextHeaderSize > int.MaxValue)
{
throw new InvalidOperationException();
}
if (nextHeaderOffset > _streamEnding - db._startPositionAfterHeader)
{
throw new InvalidOperationException("nextHeaderOffset is invalid");
}
_stream.Seek(nextHeaderOffset, SeekOrigin.Current);
var header = new byte[nextHeaderSize];
await _stream.ReadExactAsync(header, 0, header.Length, cancellationToken);
if (Crc.Finish(Crc.Update(Crc.INIT_CRC, header, 0, header.Length)) != nextHeaderCrc)
{
throw new InvalidOperationException();
}
using (var streamSwitch = new CStreamSwitch())
{
streamSwitch.Set(this, header);
var type = ReadId();
if (type != BlockType.Header)
{
if (type != BlockType.EncodedHeader)
{
throw new InvalidOperationException();
}
var dataVector = ReadAndDecodePackedStreams(
db._startPositionAfterHeader,
db.PasswordProvider
);
// compressed header without content is odd but ok
if (dataVector.Count == 0)
{
db.Fill();
return db;
}
if (dataVector.Count != 1)
{
throw new InvalidOperationException();
}
streamSwitch.Set(this, dataVector[0]);
if (ReadId() != BlockType.Header)
{
throw new InvalidOperationException();
}
}
ReadHeader(db, db.PasswordProvider);
}
db.Fill();
return db;
}
internal class CExtractFolderInfo
{
internal int _fileIndex;

View File

@@ -1,8 +1,10 @@
using System;
using System.Buffers;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Threading.Tasks;
namespace SharpCompress.Common.Tar.Headers;
@@ -495,6 +497,90 @@ internal sealed class TarHeader
return true;
}
internal async Task<bool> ReadAsync(AsyncBinaryReader reader)
{
string? longName = null;
string? longLinkName = null;
var hasLongValue = true;
byte[] buffer;
EntryType entryType;
do
{
buffer = await ReadBlockAsync(reader);
if (buffer.Length == 0)
{
return false;
}
entryType = ReadEntryType(buffer);
// LongName and LongLink headers can follow each other and need
// to apply to the header that follows them.
if (entryType == EntryType.LongName)
{
longName = await ReadLongNameAsync(reader, buffer);
continue;
}
else if (entryType == EntryType.LongLink)
{
longLinkName = await ReadLongNameAsync(reader, buffer);
continue;
}
hasLongValue = false;
} while (hasLongValue);
// Check header checksum
if (!checkChecksum(buffer))
{
return false;
}
Name = longName ?? ArchiveEncoding.Decode(buffer, 0, 100).TrimNulls();
EntryType = entryType;
Size = ReadSize(buffer);
// for symlinks, additionally read the linkname
if (entryType == EntryType.SymLink || entryType == EntryType.HardLink)
{
LinkName = longLinkName ?? ArchiveEncoding.Decode(buffer, 157, 100).TrimNulls();
}
Mode = ReadAsciiInt64Base8(buffer, 100, 7);
if (entryType == EntryType.Directory)
{
Mode |= 0b1_000_000_000;
}
UserId = ReadAsciiInt64Base8oldGnu(buffer, 108, 7);
GroupId = ReadAsciiInt64Base8oldGnu(buffer, 116, 7);
var unixTimeStamp = ReadAsciiInt64Base8(buffer, 136, 11);
LastModifiedTime = EPOCH.AddSeconds(unixTimeStamp).ToLocalTime();
Magic = ArchiveEncoding.Decode(buffer, 257, 6).TrimNulls();
if (!string.IsNullOrEmpty(Magic) && "ustar".Equals(Magic))
{
var namePrefix = ArchiveEncoding.Decode(buffer, 345, 157).TrimNulls();
if (!string.IsNullOrEmpty(namePrefix))
{
Name = namePrefix + "/" + Name;
}
}
if (entryType != EntryType.LongName && Name.Length == 0)
{
return false;
}
return true;
}
internal static int RecalculateChecksum(byte[] buf)
{
// Set default value for checksum. That is 8 spaces.
@@ -530,4 +616,65 @@ internal sealed class TarHeader
public long? DataStartPosition { get; set; }
public string? Magic { get; set; }
private static async ValueTask<byte[]> ReadBlockAsync(AsyncBinaryReader reader)
{
var buffer = ArrayPool<byte>.Shared.Rent(BLOCK_SIZE);
try
{
await reader.ReadBytesAsync(buffer, 0, BLOCK_SIZE);
if (buffer.Length != 0 && buffer.Length < BLOCK_SIZE)
{
throw new InvalidFormatException("Buffer is invalid size");
}
return buffer;
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
}
}
private async Task<string> ReadLongNameAsync(AsyncBinaryReader reader, byte[] buffer)
{
var size = ReadSize(buffer);
// Validate size to prevent memory exhaustion from malformed headers
if (size < 0 || size > MAX_LONG_NAME_SIZE)
{
throw new InvalidFormatException(
$"Long name size {size} is invalid or exceeds maximum allowed size of {MAX_LONG_NAME_SIZE} bytes"
);
}
var nameLength = (int)size;
var nameBytes = ArrayPool<byte>.Shared.Rent(nameLength);
try
{
await reader.ReadBytesAsync(buffer, 0, nameLength);
var remainingBytesToRead = BLOCK_SIZE - (nameLength % BLOCK_SIZE);
// Read the rest of the block and discard the data
if (remainingBytesToRead < BLOCK_SIZE)
{
var remainingBytes = ArrayPool<byte>.Shared.Rent(remainingBytesToRead);
try
{
await reader.ReadBytesAsync(remainingBytes, 0, remainingBytesToRead);
}
finally
{
ArrayPool<byte>.Shared.Return(nameBytes);
}
}
return ArchiveEncoding.Decode(nameBytes, 0, nameLength).TrimNulls();
}
finally
{
ArrayPool<byte>.Shared.Return(nameBytes);
}
}
}

View File

@@ -18,7 +18,7 @@ internal static class TarHeaderFactory
TarHeader? header = null;
try
{
var reader = new BinaryReader(stream);
var reader = new BinaryReader(stream, archiveEncoding.Default, leaveOpen: false);
header = new TarHeader(archiveEncoding);
if (!header.Read(reader))
@@ -54,6 +54,52 @@ internal static class TarHeaderFactory
}
}
internal static async IAsyncEnumerable<TarHeader?> ReadHeaderAsync(
StreamingMode mode,
Stream stream,
IArchiveEncoding archiveEncoding
)
{
while (true)
{
TarHeader? header = null;
try
{
var reader = new AsyncBinaryReader(stream, false);
header = new TarHeader(archiveEncoding);
if (!await header.ReadAsync(reader))
{
yield break;
}
switch (mode)
{
case StreamingMode.Seekable:
{
header.DataStartPosition = stream.Position;
//skip to nearest 512
stream.Position += PadTo512(header.Size);
}
break;
case StreamingMode.Streaming:
{
header.PackedStream = new TarReadOnlySubStream(stream, header.Size);
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
}
catch
{
header = null;
}
yield return header;
}
}
private static long PadTo512(long size)
{
var zeros = (int)(size % 512);

View File

@@ -1,5 +1,7 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Compressors.Arj
{
@@ -60,6 +62,53 @@ namespace SharpCompress.Compressors.Arj
return result;
}
/// <summary>
/// Asynchronously reads a single bit from the stream. Returns 0 or 1.
/// </summary>
public async Task<int> ReadBitAsync(CancellationToken cancellationToken)
{
if (_bitCount == 0)
{
var buffer = new byte[1];
int bytesRead = await _input
.ReadAsync(buffer, 0, 1, cancellationToken)
.ConfigureAwait(false);
if (bytesRead < 1)
{
throw new EndOfStreamException("No more data available in BitReader.");
}
_bitBuffer = buffer[0];
_bitCount = 8;
}
int bit = (_bitBuffer >> (_bitCount - 1)) & 1;
_bitCount--;
return bit;
}
/// <summary>
/// Asynchronously reads n bits (up to 32) from the stream.
/// </summary>
public async Task<int> ReadBitsAsync(int count, CancellationToken cancellationToken)
{
if (count < 0 || count > 32)
{
throw new ArgumentOutOfRangeException(
nameof(count),
"Count must be between 0 and 32."
);
}
int result = 0;
for (int i = 0; i < count; i++)
{
result =
(result << 1) | await ReadBitAsync(cancellationToken).ConfigureAwait(false);
}
return result;
}
/// <summary>
/// Resets any buffered bits.
/// </summary>

View File

@@ -1,6 +1,8 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Compressors.Arj
@@ -114,6 +116,51 @@ namespace SharpCompress.Compressors.Arj
return true;
}
/// <summary>
/// Asynchronously decodes a single element (literal or back-reference) and appends it to _buffer.
/// Returns true if data was added, or false if all input has already been decoded.
/// </summary>
private async Task<bool> DecodeNextAsync(CancellationToken cancellationToken)
{
if (_buffer.Count >= _originalSize)
{
_finishedDecoding = true;
return false;
}
int len = await DecodeValAsync(0, 7, cancellationToken).ConfigureAwait(false);
if (len == 0)
{
byte nextChar = (byte)
await _bitReader.ReadBitsAsync(8, cancellationToken).ConfigureAwait(false);
_buffer.Add(nextChar);
}
else
{
int repCount = len + THRESHOLD - 1;
int backPtr = await DecodeValAsync(9, 13, cancellationToken).ConfigureAwait(false);
if (backPtr >= _buffer.Count)
throw new InvalidDataException("Invalid back_ptr in LH stream");
int srcIndex = _buffer.Count - 1 - backPtr;
for (int j = 0; j < repCount && _buffer.Count < _originalSize; j++)
{
byte b = _buffer[srcIndex];
_buffer.Add(b);
srcIndex++;
// srcIndex may grow; it's allowed (source region can overlap destination)
}
}
if (_buffer.Count >= _originalSize)
{
_finishedDecoding = true;
}
return true;
}
private int DecodeVal(int from, int to)
{
int add = 0;
@@ -129,6 +176,31 @@ namespace SharpCompress.Compressors.Arj
return res + add;
}
private async Task<int> DecodeValAsync(
int from,
int to,
CancellationToken cancellationToken
)
{
int add = 0;
int bit = from;
while (
bit < to
&& await _bitReader.ReadBitsAsync(1, cancellationToken).ConfigureAwait(false) == 1
)
{
add |= 1 << bit;
bit++;
}
int res =
bit > 0
? await _bitReader.ReadBitsAsync(bit, cancellationToken).ConfigureAwait(false)
: 0;
return res + add;
}
/// <summary>
/// Reads decompressed bytes into buffer[offset..offset+count].
/// The method decodes additional data on demand when needed.
@@ -178,6 +250,109 @@ namespace SharpCompress.Compressors.Arj
return totalRead;
}
public override async Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
if (_disposed)
throw new ObjectDisposedException(nameof(LHDecoderStream));
if (buffer is null)
throw new ArgumentNullException(nameof(buffer));
if (offset < 0 || count < 0 || offset + count > buffer.Length)
throw new ArgumentOutOfRangeException("offset/count");
if (_readPosition >= _originalSize)
return 0; // EOF
int totalRead = 0;
while (totalRead < count && _readPosition < _originalSize)
{
cancellationToken.ThrowIfCancellationRequested();
if (_readPosition >= _buffer.Count)
{
bool had = await DecodeNextAsync(cancellationToken).ConfigureAwait(false);
if (!had)
{
break;
}
}
int available = _buffer.Count - (int)_readPosition;
if (available <= 0)
{
if (!_finishedDecoding)
{
continue;
}
break;
}
int toCopy = Math.Min(available, count - totalRead);
_buffer.CopyTo((int)_readPosition, buffer, offset + totalRead, toCopy);
_readPosition += toCopy;
totalRead += toCopy;
}
return totalRead;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
)
{
if (_disposed)
throw new ObjectDisposedException(nameof(LHDecoderStream));
if (_readPosition >= _originalSize)
return 0; // EOF
int totalRead = 0;
while (totalRead < buffer.Length && _readPosition < _originalSize)
{
cancellationToken.ThrowIfCancellationRequested();
if (_readPosition >= _buffer.Count)
{
bool had = await DecodeNextAsync(cancellationToken).ConfigureAwait(false);
if (!had)
{
break;
}
}
int available = _buffer.Count - (int)_readPosition;
if (available <= 0)
{
if (!_finishedDecoding)
{
continue;
}
break;
}
int toCopy = Math.Min(available, buffer.Length - totalRead);
for (int i = 0; i < toCopy; i++)
{
buffer.Span[totalRead + i] = _buffer[(int)_readPosition + i];
}
_readPosition += toCopy;
totalRead += toCopy;
}
return totalRead;
}
#endif
public override void Flush() => throw new NotSupportedException();
public override long Seek(long offset, SeekOrigin origin) =>

View File

@@ -149,4 +149,25 @@ public sealed class BZip2Stream : Stream, IStreamStack
}
return true;
}
/// <summary>
/// Asynchronously consumes two bytes to test if there is a BZip2 header
/// </summary>
/// <param name="stream"></param>
/// <param name="cancellationToken"></param>
/// <returns></returns>
public static async ValueTask<bool> IsBZip2Async(
Stream stream,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
var buffer = new byte[2];
var bytesRead = await stream.ReadAsync(buffer, 0, 2, cancellationToken);
if (bytesRead < 2 || buffer[0] != 'B' || buffer[1] != 'Z')
{
return false;
}
return true;
}
}

View File

@@ -222,6 +222,19 @@ public sealed class LZipStream : Stream, IStreamStack
/// <returns><c>true</c> if the given stream is an LZip file, <c>false</c> otherwise.</returns>
public static bool IsLZipFile(Stream stream) => ValidateAndReadSize(stream) != 0;
/// <summary>
/// Asynchronously determines if the given stream is positioned at the start of a v1 LZip
/// file, as indicated by the ASCII characters "LZIP" and a version byte
/// of 1, followed by at least one byte.
/// </summary>
/// <param name="stream">The stream to read from. Must not be null.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns><c>true</c> if the given stream is an LZip file, <c>false</c> otherwise.</returns>
public static async ValueTask<bool> IsLZipFileAsync(
Stream stream,
CancellationToken cancellationToken = default
) => await ValidateAndReadSizeAsync(stream, cancellationToken) != 0;
/// <summary>
/// Reads the 6-byte header of the stream, and returns 0 if either the header
/// couldn't be read or it isn't a validate LZIP header, or the dictionary
@@ -255,6 +268,44 @@ public sealed class LZipStream : Stream, IStreamStack
return (1 << basePower) - (subtractionNumerator * (1 << (basePower - 4)));
}
/// <summary>
/// Asynchronously reads the 6-byte header of the stream, and returns 0 if either the header
/// couldn't be read or it isn't a validate LZIP header, or the dictionary
/// size if it *is* a valid LZIP file.
/// </summary>
public static async ValueTask<int> ValidateAndReadSizeAsync(
Stream stream,
CancellationToken cancellationToken
)
{
// Read the header
byte[] header = new byte[6];
var n = await stream
.ReadAsync(header, 0, header.Length, cancellationToken)
.ConfigureAwait(false);
// TODO: Handle reading only part of the header?
if (n != 6)
{
return 0;
}
if (
header[0] != 'L'
|| header[1] != 'Z'
|| header[2] != 'I'
|| header[3] != 'P'
|| header[4] != 1 /* version 1 */
)
{
return 0;
}
var basePower = header[5] & 0x1F;
var subtractionNumerator = (header[5] & 0xE0) >> 5;
return (1 << basePower) - (subtractionNumerator * (1 << (basePower - 4)));
}
private static readonly byte[] headerBytes =
[
(byte)'L',

View File

@@ -1,5 +1,7 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
@@ -95,6 +97,45 @@ namespace SharpCompress.Compressors.Lzw
return true;
}
/// <summary>
/// Asynchronously checks if the stream is an LZW stream
/// </summary>
/// <param name="stream">The stream to read from</param>
/// <param name="cancellationToken">Cancellation token</param>
/// <returns>True if the stream is an LZW stream, false otherwise</returns>
public static async ValueTask<bool> IsLzwStreamAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
try
{
byte[] hdr = new byte[LzwConstants.HDR_SIZE];
int result = await stream.ReadAsync(hdr, 0, hdr.Length, cancellationToken);
// Check the magic marker
if (result < 0)
throw new IncompleteArchiveException("Failed to read LZW header");
if (hdr[0] != (LzwConstants.MAGIC >> 8) || hdr[1] != (LzwConstants.MAGIC & 0xff))
{
throw new IncompleteArchiveException(
String.Format(
"Wrong LZW header. Magic bytes don't match. 0x{0:x2} 0x{1:x2}",
hdr[0],
hdr[1]
)
);
}
}
catch (Exception)
{
return false;
}
return true;
}
/// <summary>
/// Gets or sets a flag indicating ownership of underlying stream.
/// When the flag is true <see cref="Stream.Dispose()" /> will close the underlying stream also.

View File

@@ -0,0 +1,234 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.IO;
namespace SharpCompress.Compressors.Rar;
internal sealed class MultiVolumeReadOnlyAsyncStream : MultiVolumeReadOnlyStreamBase, IStreamStack
{
#if DEBUG_STREAMS
long IStreamStack.InstanceId { get; set; }
#endif
int IStreamStack.DefaultBufferSize { get; set; }
Stream IStreamStack.BaseStream() => currentStream.NotNull();
int IStreamStack.BufferSize
{
get => 0;
set { }
}
int IStreamStack.BufferPosition
{
get => 0;
set { }
}
void IStreamStack.SetPosition(long position) { }
private long currentPosition;
private long maxPosition;
private IAsyncEnumerator<RarFilePart> filePartEnumerator;
private Stream? currentStream;
private MultiVolumeReadOnlyAsyncStream(IAsyncEnumerable<RarFilePart> parts)
{
filePartEnumerator = parts.GetAsyncEnumerator();
}
internal static async ValueTask<MultiVolumeReadOnlyAsyncStream> Create(
IAsyncEnumerable<RarFilePart> parts
)
{
var stream = new MultiVolumeReadOnlyAsyncStream(parts);
await stream.filePartEnumerator.MoveNextAsync();
stream.InitializeNextFilePart();
return stream;
}
#if NET8_0_OR_GREATER
public override async ValueTask DisposeAsync()
{
await base.DisposeAsync();
if (filePartEnumerator != null)
{
await filePartEnumerator.DisposeAsync();
}
currentStream = null;
}
#else
protected override void Dispose(bool disposing)
{
base.Dispose(disposing);
filePartEnumerator.DisposeAsync().AsTask().GetAwaiter().GetResult();
currentStream = null;
}
#endif
private void InitializeNextFilePart()
{
maxPosition = filePartEnumerator.Current.FileHeader.CompressedSize;
currentPosition = 0;
currentStream = filePartEnumerator.Current.GetCompressedStream();
CurrentCrc = filePartEnumerator.Current.FileHeader.FileCrc;
}
public override int Read(byte[] buffer, int offset, int count) =>
throw new NotSupportedException(
"Synchronous read is not supported in MultiVolumeReadOnlyAsyncStream."
);
public override async System.Threading.Tasks.Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
System.Threading.CancellationToken cancellationToken
)
{
var totalRead = 0;
var currentOffset = offset;
var currentCount = count;
while (currentCount > 0)
{
var readSize = currentCount;
if (currentCount > maxPosition - currentPosition)
{
readSize = (int)(maxPosition - currentPosition);
}
var read = await currentStream
.NotNull()
.ReadAsync(buffer, currentOffset, readSize, cancellationToken)
.ConfigureAwait(false);
if (read < 0)
{
throw new EndOfStreamException();
}
currentPosition += read;
currentOffset += read;
currentCount -= read;
totalRead += read;
if (
((maxPosition - currentPosition) == 0)
&& filePartEnumerator.Current.FileHeader.IsSplitAfter
)
{
if (filePartEnumerator.Current.FileHeader.R4Salt != null)
{
throw new InvalidFormatException(
"Sharpcompress currently does not support multi-volume decryption."
);
}
var fileName = filePartEnumerator.Current.FileHeader.FileName;
if (!await filePartEnumerator.MoveNextAsync())
{
throw new InvalidFormatException(
"Multi-part rar file is incomplete. Entry expects a new volume: "
+ fileName
);
}
InitializeNextFilePart();
}
else
{
break;
}
}
return totalRead;
}
#if NETCOREAPP2_1_OR_GREATER || NETSTANDARD2_1_OR_GREATER
public override async System.Threading.Tasks.ValueTask<int> ReadAsync(
Memory<byte> buffer,
System.Threading.CancellationToken cancellationToken = default
)
{
var totalRead = 0;
var currentOffset = 0;
var currentCount = buffer.Length;
while (currentCount > 0)
{
var readSize = currentCount;
if (currentCount > maxPosition - currentPosition)
{
readSize = (int)(maxPosition - currentPosition);
}
var read = await currentStream
.NotNull()
.ReadAsync(buffer.Slice(currentOffset, readSize), cancellationToken)
.ConfigureAwait(false);
if (read < 0)
{
throw new EndOfStreamException();
}
currentPosition += read;
currentOffset += read;
currentCount -= read;
totalRead += read;
if (
((maxPosition - currentPosition) == 0)
&& filePartEnumerator.Current.FileHeader.IsSplitAfter
)
{
if (filePartEnumerator.Current.FileHeader.R4Salt != null)
{
throw new InvalidFormatException(
"Sharpcompress currently does not support multi-volume decryption."
);
}
var fileName = filePartEnumerator.Current.FileHeader.FileName;
if (!await filePartEnumerator.MoveNextAsync())
{
throw new InvalidFormatException(
"Multi-part rar file is incomplete. Entry expects a new volume: "
+ fileName
);
}
InitializeNextFilePart();
}
else
{
break;
}
}
return totalRead;
}
#endif
public override bool CanRead => true;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override void Flush() { }
public override long Length => throw new NotSupportedException();
public override long Position
{
get => throw new NotSupportedException();
set => throw new NotSupportedException();
}
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count) =>
throw new NotSupportedException();
}

View File

@@ -1,5 +1,3 @@
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
@@ -9,20 +7,21 @@ using SharpCompress.IO;
namespace SharpCompress.Compressors.Rar;
internal sealed class MultiVolumeReadOnlyStream : Stream, IStreamStack
internal sealed class MultiVolumeReadOnlyStream : MultiVolumeReadOnlyStreamBase, IStreamStack
{
#if DEBUG_STREAMS
long IStreamStack.InstanceId { get; set; }
#endif
int IStreamStack.DefaultBufferSize { get; set; }
Stream IStreamStack.BaseStream() => currentStream;
Stream IStreamStack.BaseStream() => currentStream.NotNull();
int IStreamStack.BufferSize
{
get => 0;
set { }
}
int IStreamStack.BufferPosition
{
get => 0;
@@ -35,7 +34,7 @@ internal sealed class MultiVolumeReadOnlyStream : Stream, IStreamStack
private long maxPosition;
private IEnumerator<RarFilePart> filePartEnumerator;
private Stream currentStream;
private Stream? currentStream;
internal MultiVolumeReadOnlyStream(IEnumerable<RarFilePart> parts)
{
@@ -56,11 +55,8 @@ internal sealed class MultiVolumeReadOnlyStream : Stream, IStreamStack
this.DebugDispose(typeof(MultiVolumeReadOnlyStream));
#endif
if (filePartEnumerator != null)
{
filePartEnumerator.Dispose();
filePartEnumerator = null;
}
filePartEnumerator.Dispose();
currentStream = null;
}
}
@@ -87,7 +83,7 @@ internal sealed class MultiVolumeReadOnlyStream : Stream, IStreamStack
readSize = (int)(maxPosition - currentPosition);
}
var read = currentStream.Read(buffer, currentOffset, readSize);
var read = currentStream.NotNull().Read(buffer, currentOffset, readSize);
if (read < 0)
{
throw new EndOfStreamException();
@@ -108,6 +104,7 @@ internal sealed class MultiVolumeReadOnlyStream : Stream, IStreamStack
"Sharpcompress currently does not support multi-volume decryption."
);
}
var fileName = filePartEnumerator.Current.FileHeader.FileName;
if (!filePartEnumerator.MoveNext())
{
@@ -116,6 +113,7 @@ internal sealed class MultiVolumeReadOnlyStream : Stream, IStreamStack
+ fileName
);
}
InitializeNextFilePart();
}
else
@@ -123,6 +121,7 @@ internal sealed class MultiVolumeReadOnlyStream : Stream, IStreamStack
break;
}
}
return totalRead;
}
@@ -145,6 +144,7 @@ internal sealed class MultiVolumeReadOnlyStream : Stream, IStreamStack
}
var read = await currentStream
.NotNull()
.ReadAsync(buffer, currentOffset, readSize, cancellationToken)
.ConfigureAwait(false);
if (read < 0)
@@ -167,6 +167,7 @@ internal sealed class MultiVolumeReadOnlyStream : Stream, IStreamStack
"Sharpcompress currently does not support multi-volume decryption."
);
}
var fileName = filePartEnumerator.Current.FileHeader.FileName;
if (!filePartEnumerator.MoveNext())
{
@@ -175,6 +176,7 @@ internal sealed class MultiVolumeReadOnlyStream : Stream, IStreamStack
+ fileName
);
}
InitializeNextFilePart();
}
else
@@ -182,6 +184,7 @@ internal sealed class MultiVolumeReadOnlyStream : Stream, IStreamStack
break;
}
}
return totalRead;
}
@@ -203,6 +206,7 @@ internal sealed class MultiVolumeReadOnlyStream : Stream, IStreamStack
}
var read = await currentStream
.NotNull()
.ReadAsync(buffer.Slice(currentOffset, readSize), cancellationToken)
.ConfigureAwait(false);
if (read < 0)
@@ -250,8 +254,6 @@ internal sealed class MultiVolumeReadOnlyStream : Stream, IStreamStack
public override bool CanWrite => false;
public byte[] CurrentCrc { get; private set; }
public override void Flush() { }
public override long Length => throw new NotSupportedException();

View File

@@ -0,0 +1,8 @@
using System.IO;
namespace SharpCompress.Compressors.Rar;
internal abstract class MultiVolumeReadOnlyStreamBase : Stream
{
public byte[]? CurrentCrc { get; protected set; }
}

View File

@@ -30,7 +30,7 @@ internal class RarBLAKE2spStream : RarStream, IStreamStack
void IStreamStack.SetPosition(long position) { }
private readonly MultiVolumeReadOnlyStream readStream;
private readonly MultiVolumeReadOnlyStreamBase readStream;
private readonly bool disableCRCCheck;
const uint BLAKE2S_NUM_ROUNDS = 10;
@@ -108,7 +108,7 @@ internal class RarBLAKE2spStream : RarStream, IStreamStack
private RarBLAKE2spStream(
IRarUnpack unpack,
FileHeader fileHeader,
MultiVolumeReadOnlyStream readStream
MultiVolumeReadOnlyStreamBase readStream
)
: base(unpack, fileHeader, readStream)
{
@@ -137,7 +137,7 @@ internal class RarBLAKE2spStream : RarStream, IStreamStack
public static async Task<RarBLAKE2spStream> CreateAsync(
IRarUnpack unpack,
FileHeader fileHeader,
MultiVolumeReadOnlyStream readStream,
MultiVolumeReadOnlyAsyncStream readStream,
CancellationToken cancellationToken = default
)
{

View File

@@ -29,14 +29,14 @@ internal class RarCrcStream : RarStream, IStreamStack
void IStreamStack.SetPosition(long position) { }
private readonly MultiVolumeReadOnlyStream readStream;
private readonly MultiVolumeReadOnlyStreamBase readStream;
private uint currentCrc;
private readonly bool disableCRC;
private RarCrcStream(
IRarUnpack unpack,
FileHeader fileHeader,
MultiVolumeReadOnlyStream readStream
MultiVolumeReadOnlyStreamBase readStream
)
: base(unpack, fileHeader, readStream)
{
@@ -62,7 +62,7 @@ internal class RarCrcStream : RarStream, IStreamStack
public static async Task<RarCrcStream> CreateAsync(
IRarUnpack unpack,
FileHeader fileHeader,
MultiVolumeReadOnlyStream readStream,
MultiVolumeReadOnlyStreamBase readStream,
CancellationToken cancellationToken = default
)
{
@@ -92,7 +92,7 @@ internal class RarCrcStream : RarStream, IStreamStack
}
else if (
!disableCRC
&& GetCrc() != BitConverter.ToUInt32(readStream.CurrentCrc, 0)
&& GetCrc() != BitConverter.ToUInt32(readStream.NotNull().CurrentCrc.NotNull(), 0)
&& count != 0
)
{
@@ -118,7 +118,7 @@ internal class RarCrcStream : RarStream, IStreamStack
}
else if (
!disableCRC
&& GetCrc() != BitConverter.ToUInt32(readStream.CurrentCrc, 0)
&& GetCrc() != BitConverter.ToUInt32(readStream.NotNull().CurrentCrc.NotNull(), 0)
&& count != 0
)
{
@@ -143,7 +143,7 @@ internal class RarCrcStream : RarStream, IStreamStack
}
else if (
!disableCRC
&& GetCrc() != BitConverter.ToUInt32(readStream.CurrentCrc, 0)
&& GetCrc() != BitConverter.ToUInt32(readStream.NotNull().CurrentCrc.NotNull(), 0)
&& buffer.Length != 0
)
{

View File

@@ -60,6 +60,22 @@ public sealed class XZStream : XZReadOnlyStream, IStreamStack
}
}
public static async ValueTask<bool> IsXZStreamAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
return null != await XZHeader.FromStreamAsync(stream, cancellationToken);
}
catch (Exception)
{
return false;
}
}
private void AssertBlockCheckTypeIsSupported()
{
switch (Header.BlockCheckType)

View File

@@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
@@ -43,6 +44,27 @@ internal class ZStandardStream : DecompressionStream, IStreamStack
return true;
}
internal static async ValueTask<bool> IsZStandardAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
var buffer = new byte[4];
var bytesRead = await stream.ReadAsync(buffer, 0, 4, cancellationToken);
if (bytesRead < 4)
{
return false;
}
var magic = BitConverter.ToUInt32(buffer, 0);
if (ZstandardConstants.MAGIC != magic)
{
return false;
}
return true;
}
public ZStandardStream(Stream baseInputStream)
: base(baseInputStream)
{

View File

@@ -29,23 +29,24 @@ namespace SharpCompress.Factories
int bufferSize = ReaderOptions.DefaultBufferSize
) => AceHeader.IsArchive(stream);
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
) => AceHeader.IsArchiveAsync(stream, cancellationToken);
public IReader OpenReader(Stream stream, ReaderOptions? options) =>
AceReader.OpenReader(stream, options);
public IAsyncReader OpenAsyncReader(
public ValueTask<IAsyncReader> OpenAsyncReader(
Stream stream,
ReaderOptions? options,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncReader)AceReader.OpenReader(stream, options);
return new((IAsyncReader)AceReader.OpenReader(stream, options));
}
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => new(IsArchive(stream, password, bufferSize));
}
}

View File

@@ -1,4 +1,5 @@
using System;
using System.Buffers;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -36,28 +37,54 @@ namespace SharpCompress.Factories
//Hyper - archive, check the next two bytes for "HP" or "ST"(or look below for
//"HYP").Also the ZOO archiver also does put a 01Ah at the start of the file,
//see the ZOO entry below.
var bytes = new byte[2];
stream.Read(bytes, 0, 2);
return bytes[0] == 0x1A && bytes[1] < 10; //rather thin, but this is all we have
var buffer = ArrayPool<byte>.Shared.Rent(2);
try
{
stream.ReadExact(buffer, 0, 2);
return buffer[0] == 0x1A && buffer[1] < 10; //rather thin, but this is all we have
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
}
}
public IReader OpenReader(Stream stream, ReaderOptions? options) =>
ArcReader.OpenReader(stream, options);
public IAsyncReader OpenAsyncReader(
public ValueTask<IAsyncReader> OpenAsyncReader(
Stream stream,
ReaderOptions? options,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncReader)ArcReader.OpenReader(stream, options);
return new((IAsyncReader)ArcReader.OpenReader(stream, options));
}
public override ValueTask<bool> IsArchiveAsync(
public override async ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => new(IsArchive(stream, password, bufferSize));
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
)
{
//You may have to use some(paranoid) checks to ensure that you actually are
//processing an ARC file, since other archivers also adopted the idea of putting
//a 01Ah byte at offset 0, namely the Hyper archiver. To check if you have a
//Hyper - archive, check the next two bytes for "HP" or "ST"(or look below for
//"HYP").Also the ZOO archiver also does put a 01Ah at the start of the file,
//see the ZOO entry below.
var buffer = ArrayPool<byte>.Shared.Rent(2);
try
{
await stream.ReadExactAsync(buffer, 0, 2, cancellationToken);
return buffer[0] == 0x1A && buffer[1] < 10; //rather thin, but this is all we have
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
}
}
}
}

View File

@@ -27,28 +27,26 @@ namespace SharpCompress.Factories
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
return ArjHeader.IsArchive(stream);
}
) => ArjHeader.IsArchive(stream);
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
) => ArjHeader.IsArchiveAsync(stream, cancellationToken);
public IReader OpenReader(Stream stream, ReaderOptions? options) =>
ArjReader.OpenReader(stream, options);
public IAsyncReader OpenAsyncReader(
public ValueTask<IAsyncReader> OpenAsyncReader(
Stream stream,
ReaderOptions? options,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncReader)ArjReader.OpenReader(stream, options);
return new((IAsyncReader)ArjReader.OpenReader(stream, options));
}
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => new(IsArchive(stream, password, bufferSize));
}
}

View File

@@ -60,22 +60,11 @@ public abstract class Factory : IFactory
);
public abstract ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
);
/// <inheritdoc/>
public virtual ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(IsArchive(stream, password, bufferSize));
}
);
/// <inheritdoc/>
public virtual FileInfo? GetFilePart(int index, FileInfo part1) => null;
@@ -112,31 +101,4 @@ public abstract class Factory : IFactory
return false;
}
internal virtual async ValueTask<(bool, IAsyncReader?)> TryOpenReaderAsync(
SharpCompressStream stream,
ReaderOptions options,
CancellationToken cancellationToken
)
{
if (this is IReaderFactory readerFactory)
{
long pos = ((IStreamStack)stream).GetPosition();
if (
await IsArchiveAsync(
stream,
options.Password,
options.BufferSize,
cancellationToken
)
)
{
((IStreamStack)stream).StackSeek(pos);
return (true, readerFactory.OpenAsyncReader(stream, options, cancellationToken));
}
}
return (false, null);
}
}

View File

@@ -68,22 +68,9 @@ public class GZipFactory
public IAsyncArchive OpenAsyncArchive(Stream stream, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)OpenArchive(stream, readerOptions);
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => new(IsArchive(stream, password, bufferSize));
/// <inheritdoc/>
public IAsyncArchive OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)OpenArchive(fileInfo, readerOptions);
}
public IAsyncArchive OpenAsyncArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)OpenArchive(fileInfo, readerOptions);
#endregion
@@ -157,14 +144,14 @@ public class GZipFactory
GZipReader.OpenReader(stream, options);
/// <inheritdoc/>
public IAsyncReader OpenAsyncReader(
public ValueTask<IAsyncReader> OpenAsyncReader(
Stream stream,
ReaderOptions? options,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncReader)GZipReader.OpenReader(stream, options);
return new((IAsyncReader)GZipReader.OpenReader(stream, options));
}
/// <inheritdoc/>

View File

@@ -37,6 +37,14 @@ public class RarFactory : Factory, IArchiveFactory, IMultiArchiveFactory, IReade
int bufferSize = ReaderOptions.DefaultBufferSize
) => RarArchive.IsRarFile(stream);
/// <inheritdoc/>
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
) => RarArchive.IsRarFileAsync(stream, cancellationToken: cancellationToken);
/// <inheritdoc/>
public override FileInfo? GetFilePart(int index, FileInfo part1) =>
RarArchiveVolumeFactory.GetFilePart(index, part1);
@@ -58,21 +66,8 @@ public class RarFactory : Factory, IArchiveFactory, IMultiArchiveFactory, IReade
RarArchive.OpenArchive(fileInfo, readerOptions);
/// <inheritdoc/>
public IAsyncArchive OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)OpenArchive(fileInfo, readerOptions);
}
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => new(IsArchive(stream, password, bufferSize));
public IAsyncArchive OpenAsyncArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)OpenArchive(fileInfo, readerOptions);
#endregion
@@ -116,14 +111,14 @@ public class RarFactory : Factory, IArchiveFactory, IMultiArchiveFactory, IReade
RarReader.OpenReader(stream, options);
/// <inheritdoc/>
public IAsyncReader OpenAsyncReader(
public ValueTask<IAsyncReader> OpenAsyncReader(
Stream stream,
ReaderOptions? options,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncReader)RarReader.OpenReader(stream, options);
return new((IAsyncReader)RarReader.OpenReader(stream, options));
}
#endregion

View File

@@ -36,6 +36,14 @@ public class SevenZipFactory : Factory, IArchiveFactory, IMultiArchiveFactory
int bufferSize = ReaderOptions.DefaultBufferSize
) => SevenZipArchive.IsSevenZipFile(stream);
/// <inheritdoc/>
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
) => SevenZipArchive.IsSevenZipFileAsync(stream, cancellationToken);
#endregion
#region IArchiveFactory
@@ -46,28 +54,15 @@ public class SevenZipFactory : Factory, IArchiveFactory, IMultiArchiveFactory
/// <inheritdoc/>
public IAsyncArchive OpenAsyncArchive(Stream stream, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)OpenArchive(stream, readerOptions);
SevenZipArchive.OpenAsyncArchive(stream, readerOptions, CancellationToken.None);
/// <inheritdoc/>
public IArchive OpenArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
SevenZipArchive.OpenArchive(fileInfo, readerOptions);
/// <inheritdoc/>
public IAsyncArchive OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)OpenArchive(fileInfo, readerOptions);
}
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => new(IsArchive(stream, password, bufferSize));
public IAsyncArchive OpenAsyncArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
SevenZipArchive.OpenAsyncArchive(fileInfo, readerOptions, CancellationToken.None);
#endregion
@@ -83,7 +78,7 @@ public class SevenZipFactory : Factory, IArchiveFactory, IMultiArchiveFactory
public IAsyncArchive OpenAsyncArchive(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null
) => (IAsyncArchive)OpenArchive(streams, readerOptions);
) => SevenZipArchive.OpenAsyncArchive(streams, readerOptions, CancellationToken.None);
/// <inheritdoc/>
public IArchive OpenArchive(
@@ -96,11 +91,7 @@ public class SevenZipFactory : Factory, IArchiveFactory, IMultiArchiveFactory
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)OpenArchive(fileInfos, readerOptions);
}
) => SevenZipArchive.OpenAsyncArchive(fileInfos, readerOptions, cancellationToken);
#endregion

View File

@@ -1,25 +1,15 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Tar;
using SharpCompress.Common;
using SharpCompress.Compressors;
using SharpCompress.Compressors.BZip2;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.Lzw;
using SharpCompress.Compressors.Xz;
using SharpCompress.Compressors.ZStandard;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.Tar;
using SharpCompress.Writers;
using SharpCompress.Writers.Tar;
using GZipArchive = SharpCompress.Archives.GZip.GZipArchive;
namespace SharpCompress.Factories;
@@ -45,7 +35,7 @@ public class TarFactory
/// <inheritdoc/>
public override IEnumerable<string> GetSupportedExtensions()
{
foreach (var testOption in compressionOptions)
foreach (var testOption in TarWrapper.Wrappers)
{
foreach (var ext in testOption.KnownExtensions)
{
@@ -59,13 +49,55 @@ public class TarFactory
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => TarArchive.IsTarFile(stream);
)
{
var rewindableStream = new SharpCompressStream(stream);
long pos = rewindableStream.GetPosition();
foreach (var wrapper in TarWrapper.Wrappers)
{
rewindableStream.StackSeek(pos);
if (wrapper.IsMatch(rewindableStream))
{
rewindableStream.StackSeek(pos);
var decompressedStream = wrapper.CreateStream(rewindableStream);
if (TarArchive.IsTarFile(decompressedStream))
{
rewindableStream.StackSeek(pos);
return true;
}
}
}
public override ValueTask<bool> IsArchiveAsync(
return false;
}
/// <inheritdoc/>
public override async ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => new(IsArchive(stream, password, bufferSize));
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
)
{
var rewindableStream = new SharpCompressStream(stream);
long pos = rewindableStream.GetPosition();
foreach (var wrapper in TarWrapper.Wrappers)
{
rewindableStream.StackSeek(pos);
if (await wrapper.IsMatchAsync(rewindableStream, cancellationToken))
{
rewindableStream.StackSeek(pos);
var decompressedStream = wrapper.CreateStream(rewindableStream);
if (await TarArchive.IsTarFileAsync(decompressedStream, cancellationToken))
{
rewindableStream.StackSeek(pos);
return true;
}
}
}
return false;
}
#endregion
@@ -84,15 +116,8 @@ public class TarFactory
TarArchive.OpenArchive(fileInfo, readerOptions);
/// <inheritdoc/>
public IAsyncArchive OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)OpenArchive(fileInfo, readerOptions);
}
public IAsyncArchive OpenAsyncArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)OpenArchive(fileInfo, readerOptions);
#endregion
@@ -131,161 +156,54 @@ public class TarFactory
#region IReaderFactory
protected class TestOption
{
public readonly CompressionType Type;
public readonly Func<Stream, bool> CanHandle;
public readonly bool WrapInSharpCompressStream;
public readonly Func<Stream, Stream> CreateStream;
public readonly IEnumerable<string> KnownExtensions;
public TestOption(
CompressionType Type,
Func<Stream, bool> CanHandle,
Func<Stream, Stream> CreateStream,
IEnumerable<string> KnownExtensions,
bool WrapInSharpCompressStream = true
)
{
this.Type = Type;
this.CanHandle = CanHandle;
this.WrapInSharpCompressStream = WrapInSharpCompressStream;
this.CreateStream = CreateStream;
this.KnownExtensions = KnownExtensions;
}
}
// https://en.wikipedia.org/wiki/Tar_(computing)#Suffixes_for_compressed_files
protected TestOption[] compressionOptions =
[
new(CompressionType.None, (stream) => true, (stream) => stream, ["tar"], false), // We always do a test for IsTarFile later
new(
CompressionType.BZip2,
BZip2Stream.IsBZip2,
(stream) => new BZip2Stream(stream, CompressionMode.Decompress, false),
["tar.bz2", "tb2", "tbz", "tbz2", "tz2"]
),
new(
CompressionType.GZip,
GZipArchive.IsGZipFile,
(stream) => new GZipStream(stream, CompressionMode.Decompress),
["tar.gz", "taz", "tgz"]
),
new(
CompressionType.ZStandard,
ZStandardStream.IsZStandard,
(stream) => new ZStandardStream(stream),
["tar.zst", "tar.zstd", "tzst", "tzstd"]
),
new(
CompressionType.LZip,
LZipStream.IsLZipFile,
(stream) => new LZipStream(stream, CompressionMode.Decompress),
["tar.lz"]
),
new(
CompressionType.Xz,
XZStream.IsXZStream,
(stream) => new XZStream(stream),
["tar.xz", "txz"],
false
),
new(
CompressionType.Lzw,
LzwStream.IsLzwStream,
(stream) => new LzwStream(stream),
["tar.Z", "tZ", "taZ"],
false
),
];
/// <inheritdoc/>
internal override bool TryOpenReader(
SharpCompressStream rewindableStream,
ReaderOptions options,
out IReader? reader
)
public IReader OpenReader(Stream stream, ReaderOptions? options)
{
reader = null;
long pos = ((IStreamStack)rewindableStream).GetPosition();
TestOption? testedOption = null;
if (!string.IsNullOrWhiteSpace(options.ExtensionHint))
options ??= new ReaderOptions();
var rewindableStream = new SharpCompressStream(stream);
long pos = rewindableStream.GetPosition();
foreach (var wrapper in TarWrapper.Wrappers)
{
testedOption = compressionOptions.FirstOrDefault(a =>
a.KnownExtensions.Contains(
options.ExtensionHint,
StringComparer.CurrentCultureIgnoreCase
)
);
if (testedOption != null)
rewindableStream.StackSeek(pos);
if (wrapper.IsMatch(rewindableStream))
{
reader = TryOption(rewindableStream, options, pos, testedOption);
if (reader != null)
rewindableStream.StackSeek(pos);
var decompressedStream = wrapper.CreateStream(rewindableStream);
if (TarArchive.IsTarFile(decompressedStream))
{
return true;
rewindableStream.StackSeek(pos);
return new TarReader(rewindableStream, options, wrapper.CompressionType);
}
}
}
foreach (var testOption in compressionOptions)
{
if (testedOption == testOption)
{
continue; // Already tested above
}
((IStreamStack)rewindableStream).StackSeek(pos);
reader = TryOption(rewindableStream, options, pos, testOption);
if (reader != null)
{
return true;
}
}
return false;
}
private static IReader? TryOption(
SharpCompressStream rewindableStream,
ReaderOptions options,
long pos,
TestOption testOption
)
{
if (testOption.CanHandle(rewindableStream))
{
((IStreamStack)rewindableStream).StackSeek(pos);
var inStream = rewindableStream;
if (testOption.WrapInSharpCompressStream)
{
inStream = SharpCompressStream.Create(rewindableStream, leaveOpen: true);
}
var testStream = testOption.CreateStream(rewindableStream);
if (TarArchive.IsTarFile(testStream))
{
((IStreamStack)rewindableStream).StackSeek(pos);
return new TarReader(rewindableStream, options, testOption.Type);
}
}
return null;
throw new InvalidFormatException("Not a tar file.");
}
/// <inheritdoc/>
public IReader OpenReader(Stream stream, ReaderOptions? options) =>
TarReader.OpenReader(stream, options);
/// <inheritdoc/>
public IAsyncReader OpenAsyncReader(
public async ValueTask<IAsyncReader> OpenAsyncReader(
Stream stream,
ReaderOptions? options,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
options ??= new ReaderOptions();
var rewindableStream = new SharpCompressStream(stream);
long pos = rewindableStream.GetPosition();
foreach (var wrapper in TarWrapper.Wrappers)
{
rewindableStream.StackSeek(pos);
if (await wrapper.IsMatchAsync(rewindableStream, cancellationToken))
{
rewindableStream.StackSeek(pos);
var decompressedStream = wrapper.CreateStream(rewindableStream);
if (await TarArchive.IsTarFileAsync(decompressedStream, cancellationToken))
{
rewindableStream.StackSeek(pos);
return new TarReader(rewindableStream, options, wrapper.CompressionType);
}
}
}
return (IAsyncReader)TarReader.OpenReader(stream, options);
}

View File

@@ -0,0 +1,92 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives.GZip;
using SharpCompress.Common;
using SharpCompress.Compressors;
using SharpCompress.Compressors.BZip2;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.Lzw;
using SharpCompress.Compressors.Xz;
using SharpCompress.Compressors.ZStandard;
namespace SharpCompress.Factories;
public class TarWrapper(
CompressionType type,
Func<Stream, bool> canHandle,
Func<Stream, CancellationToken, ValueTask<bool>> canHandleAsync,
Func<Stream, Stream> createStream,
IEnumerable<string> knownExtensions,
bool wrapInSharpCompressStream = true
)
{
public CompressionType CompressionType { get; } = type;
public Func<Stream, bool> IsMatch { get; } = canHandle;
public Func<Stream, CancellationToken, ValueTask<bool>> IsMatchAsync { get; } = canHandleAsync;
public bool WrapInSharpCompressStream { get; } = wrapInSharpCompressStream;
public Func<Stream, Stream> CreateStream { get; } = createStream;
public IEnumerable<string> KnownExtensions { get; } = knownExtensions;
// https://en.wikipedia.org/wiki/Tar_(computing)#Suffixes_for_compressed_files
public static TarWrapper[] Wrappers { get; } =
[
new(
CompressionType.None,
(_) => true,
(_, _) => new ValueTask<bool>(true),
(stream) => stream,
["tar"],
false
), // We always do a test for IsTarFile later
new(
CompressionType.BZip2,
BZip2Stream.IsBZip2,
BZip2Stream.IsBZip2Async,
(stream) => new BZip2Stream(stream, CompressionMode.Decompress, false),
["tar.bz2", "tb2", "tbz", "tbz2", "tz2"]
),
new(
CompressionType.GZip,
GZipArchive.IsGZipFile,
GZipArchive.IsGZipFileAsync,
(stream) => new GZipStream(stream, CompressionMode.Decompress),
["tar.gz", "taz", "tgz"]
),
new(
CompressionType.ZStandard,
ZStandardStream.IsZStandard,
ZStandardStream.IsZStandardAsync,
(stream) => new ZStandardStream(stream),
["tar.zst", "tar.zstd", "tzst", "tzstd"]
),
new(
CompressionType.LZip,
LZipStream.IsLZipFile,
LZipStream.IsLZipFileAsync,
(stream) => new LZipStream(stream, CompressionMode.Decompress),
["tar.lz"]
),
new(
CompressionType.Xz,
XZStream.IsXZStream,
XZStream.IsXZStreamAsync,
(stream) => new XZStream(stream),
["tar.xz", "txz"],
false
),
new(
CompressionType.Lzw,
LzwStream.IsLzwStream,
LzwStream.IsLzwStreamAsync,
(stream) => new LzwStream(stream),
["tar.Z", "tZ", "taZ"],
false
),
];
}

View File

@@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Compressors.ZStandard;
@@ -29,6 +30,7 @@ internal class ZStandardFactory : Factory
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => new(IsArchive(stream, password, bufferSize));
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
) => ZStandardStream.IsZStandardAsync(stream, cancellationToken);
}

View File

@@ -81,12 +81,6 @@ public class ZipFactory
return false;
}
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => new(IsArchive(stream, password, bufferSize));
/// <inheritdoc/>
public override async ValueTask<bool> IsArchiveAsync(
Stream stream,
@@ -151,15 +145,8 @@ public class ZipFactory
ZipArchive.OpenArchive(fileInfo, readerOptions);
/// <inheritdoc/>
public IAsyncArchive OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)OpenArchive(fileInfo, readerOptions);
}
public IAsyncArchive OpenAsyncArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)OpenArchive(fileInfo, readerOptions);
#endregion
@@ -203,14 +190,14 @@ public class ZipFactory
ZipReader.OpenReader(stream, options);
/// <inheritdoc/>
public IAsyncReader OpenAsyncReader(
public ValueTask<IAsyncReader> OpenAsyncReader(
Stream stream,
ReaderOptions? options,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncReader)ZipReader.OpenReader(stream, options);
return new((IAsyncReader)ZipReader.OpenReader(stream, options));
}
#endregion

View File

@@ -0,0 +1,186 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.IO;
internal class AsyncMarkingBinaryReader : IDisposable
{
private readonly AsyncBinaryReader _asyncReader;
private readonly bool _leaveOpen;
private bool _disposed;
public AsyncMarkingBinaryReader(Stream stream, CancellationToken cancellationToken = default)
{
_asyncReader = new AsyncBinaryReader(stream, leaveOpen: false);
}
public Stream BaseStream => _asyncReader.BaseStream;
public virtual long CurrentReadByteCount { get; protected set; }
public virtual void Mark() => CurrentReadByteCount = 0;
public virtual async ValueTask<bool> ReadBooleanAsync(CancellationToken ct = default)
{
var b = await ReadByteAsync(ct).ConfigureAwait(false);
return b != 0;
}
public virtual async ValueTask<byte> ReadByteAsync(CancellationToken ct = default)
{
CurrentReadByteCount++;
return await _asyncReader.ReadByteAsync(ct).ConfigureAwait(false);
}
public virtual async ValueTask<byte[]> ReadBytesAsync(int count, CancellationToken ct = default)
{
CurrentReadByteCount += count;
var buffer = new byte[count];
await _asyncReader.ReadBytesAsync(buffer, 0, count, ct).ConfigureAwait(false);
return buffer;
}
public virtual async ValueTask<short> ReadInt16Async(CancellationToken ct = default)
{
var bytes = await ReadBytesAsync(2, ct).ConfigureAwait(false);
return BinaryPrimitives.ReadInt16LittleEndian(bytes);
}
public virtual async ValueTask<int> ReadInt32Async(CancellationToken ct = default)
{
var bytes = await ReadBytesAsync(4, ct).ConfigureAwait(false);
return BinaryPrimitives.ReadInt32LittleEndian(bytes);
}
public virtual async ValueTask<long> ReadInt64Async(CancellationToken ct = default)
{
var bytes = await ReadBytesAsync(8, ct).ConfigureAwait(false);
return BinaryPrimitives.ReadInt64LittleEndian(bytes);
}
public virtual async ValueTask<sbyte> ReadSByteAsync(CancellationToken ct = default)
{
var b = await ReadByteAsync(ct).ConfigureAwait(false);
return (sbyte)b;
}
public virtual async ValueTask<ushort> ReadUInt16Async(CancellationToken ct = default)
{
var bytes = await ReadBytesAsync(2, ct).ConfigureAwait(false);
return BinaryPrimitives.ReadUInt16LittleEndian(bytes);
}
public virtual async ValueTask<uint> ReadUInt32Async(CancellationToken ct = default)
{
var bytes = await ReadBytesAsync(4, ct).ConfigureAwait(false);
return BinaryPrimitives.ReadUInt32LittleEndian(bytes);
}
public virtual async ValueTask<ulong> ReadUInt64Async(CancellationToken ct = default)
{
var bytes = await ReadBytesAsync(8, ct).ConfigureAwait(false);
return BinaryPrimitives.ReadUInt64LittleEndian(bytes);
}
public virtual async ValueTask<ulong> ReadRarVIntAsync(
int maxBytes = 10,
CancellationToken ct = default
) => await DoReadRarVIntAsync((maxBytes - 1) * 7, ct).ConfigureAwait(false);
private async ValueTask<ulong> DoReadRarVIntAsync(int maxShift, CancellationToken ct)
{
var shift = 0;
ulong result = 0;
do
{
var b0 = await ReadByteAsync(ct).ConfigureAwait(false);
var b1 = ((uint)b0) & 0x7f;
ulong n = b1;
var shifted = n << shift;
if (n != shifted >> shift)
{
break;
}
result |= shifted;
if (b0 == b1)
{
return result;
}
shift += 7;
} while (shift <= maxShift);
throw new FormatException("malformed vint");
}
public virtual async ValueTask<uint> ReadRarVIntUInt32Async(
int maxBytes = 5,
CancellationToken ct = default
) => await DoReadRarVIntUInt32Async((maxBytes - 1) * 7, ct).ConfigureAwait(false);
private async ValueTask<uint> DoReadRarVIntUInt32Async(int maxShift, CancellationToken ct)
{
var shift = 0;
uint result = 0;
do
{
var b0 = await ReadByteAsync(ct).ConfigureAwait(false);
var b1 = ((uint)b0) & 0x7f;
var n = b1;
var shifted = n << shift;
if (n != shifted >> shift)
{
break;
}
result |= shifted;
if (b0 == b1)
{
return result;
}
shift += 7;
} while (shift <= maxShift);
throw new FormatException("malformed vint");
}
public virtual async ValueTask<ushort> ReadRarVIntUInt16Async(
int maxBytes = 3,
CancellationToken ct = default
) =>
checked(
(ushort)await DoReadRarVIntUInt32Async((maxBytes - 1) * 7, ct).ConfigureAwait(false)
);
public virtual async ValueTask<byte> ReadRarVIntByteAsync(
int maxBytes = 2,
CancellationToken ct = default
) =>
checked((byte)await DoReadRarVIntUInt32Async((maxBytes - 1) * 7, ct).ConfigureAwait(false));
public void Dispose()
{
if (_disposed)
{
return;
}
_disposed = true;
_asyncReader.Dispose();
}
#if NET6_0_OR_GREATER
public async ValueTask DisposeAsync()
{
if (_disposed)
{
return;
}
_disposed = true;
await _asyncReader.DisposeAsync().ConfigureAwait(false);
}
#endif
}

View File

@@ -31,6 +31,8 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IAsyncReader
public ArchiveType ArchiveType { get; }
protected bool IsAsync => _entriesForCurrentReadStreamAsync is not null;
/// <summary>
/// Current volume that the current entry resides in
/// </summary>

View File

@@ -1,12 +1,39 @@
#if NET8_0_OR_GREATER
using System.Collections.Generic;
using System.IO;
using System.Threading;
using SharpCompress.Common;
namespace SharpCompress.Readers.Ace;
public partial class AceReader : IReaderOpenable
public partial class AceReader
#if NET8_0_OR_GREATER
: IReaderOpenable
#endif
{
/// <summary>
/// Opens an AceReader for non-seeking usage with a single volume.
/// </summary>
/// <param name="stream">The stream containing the ACE archive.</param>
/// <param name="options">Reader options.</param>
/// <returns>An AceReader instance.</returns>
public static IReader OpenReader(Stream stream, ReaderOptions? options = null)
{
stream.NotNull(nameof(stream));
return new SingleVolumeAceReader(stream, options ?? new ReaderOptions());
}
/// <summary>
/// Opens an AceReader for Non-seeking usage with multiple volumes
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
/// <returns></returns>
public static IReader OpenReader(IEnumerable<Stream> streams, ReaderOptions? options = null)
{
streams.NotNull(nameof(streams));
return new MultiVolumeAceReader(streams, options ?? new ReaderOptions());
}
public static IAsyncReader OpenAsyncReader(
string path,
ReaderOptions? readerOptions = null,
@@ -28,6 +55,15 @@ public partial class AceReader : IReaderOpenable
return (IAsyncReader)OpenReader(stream, readerOptions);
}
public static IAsyncReader OpenAsyncReader(
IEnumerable<Stream> streams,
ReaderOptions? options = null
)
{
streams.NotNull(nameof(streams));
return new MultiVolumeAceReader(streams, options ?? new ReaderOptions());
}
public static IAsyncReader OpenAsyncReader(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
@@ -50,4 +86,3 @@ public partial class AceReader : IReaderOpenable
return OpenReader(fileInfo.OpenRead(), readerOptions);
}
}
#endif

View File

@@ -1,115 +1,120 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Ace;
using SharpCompress.Common.Ace.Headers;
using SharpCompress.Common.Arj;
namespace SharpCompress.Readers.Ace
namespace SharpCompress.Readers.Ace;
/// <summary>
/// Reader for ACE archives.
/// ACE is a proprietary archive format. This implementation supports both ACE 1.0 and ACE 2.0 formats
/// and can read archive metadata and extract uncompressed (stored) entries.
/// Compressed entries require proprietary decompression algorithms that are not publicly documented.
/// </summary>
/// <remarks>
/// ACE 2.0 additions over ACE 1.0:
/// - Improved LZ77 compression (compression type 2)
/// - Recovery record support
/// - Additional header flags
/// </remarks>
public abstract partial class AceReader : AbstractReader<AceEntry, AceVolume>
{
/// <summary>
/// Reader for ACE archives.
/// ACE is a proprietary archive format. This implementation supports both ACE 1.0 and ACE 2.0 formats
/// and can read archive metadata and extract uncompressed (stored) entries.
/// Compressed entries require proprietary decompression algorithms that are not publicly documented.
/// </summary>
/// <remarks>
/// ACE 2.0 additions over ACE 1.0:
/// - Improved LZ77 compression (compression type 2)
/// - Recovery record support
/// - Additional header flags
/// </remarks>
public abstract partial class AceReader : AbstractReader<AceEntry, AceVolume>
private readonly IArchiveEncoding _archiveEncoding;
internal AceReader(ReaderOptions options)
: base(options, ArchiveType.Ace)
{
private readonly IArchiveEncoding _archiveEncoding;
_archiveEncoding = Options.ArchiveEncoding;
}
internal AceReader(ReaderOptions options)
: base(options, ArchiveType.Ace)
private AceReader(Stream stream, ReaderOptions options)
: this(options) { }
/// <summary>
/// Derived class must create or manage the Volume itself.
/// AbstractReader.Volume is get-only, so it cannot be set here.
/// </summary>
public override AceVolume? Volume => _volume;
private AceVolume? _volume;
protected abstract void ValidateArchive(AceVolume archive);
protected override IEnumerable<AceEntry> GetEntries(Stream stream)
{
var mainHeaderReader = new AceMainHeader(_archiveEncoding);
var mainHeader = mainHeaderReader.Read(stream);
if (mainHeader == null)
{
_archiveEncoding = Options.ArchiveEncoding;
yield break;
}
private AceReader(Stream stream, ReaderOptions options)
: this(options) { }
/// <summary>
/// Derived class must create or manage the Volume itself.
/// AbstractReader.Volume is get-only, so it cannot be set here.
/// </summary>
public override AceVolume? Volume => _volume;
private AceVolume? _volume;
/// <summary>
/// Opens an AceReader for non-seeking usage with a single volume.
/// </summary>
/// <param name="stream">The stream containing the ACE archive.</param>
/// <param name="options">Reader options.</param>
/// <returns>An AceReader instance.</returns>
public static IReader OpenReader(Stream stream, ReaderOptions? options = null)
if (mainHeader?.IsMultiVolume == true)
{
stream.NotNull(nameof(stream));
return new SingleVolumeAceReader(stream, options ?? new ReaderOptions());
throw new MultiVolumeExtractionException("Multi volumes are currently not supported");
}
/// <summary>
/// Opens an AceReader for Non-seeking usage with multiple volumes
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
/// <returns></returns>
public static IReader OpenReader(IEnumerable<Stream> streams, ReaderOptions? options = null)
if (_volume == null)
{
streams.NotNull(nameof(streams));
return new MultiVolumeAceReader(streams, options ?? new ReaderOptions());
_volume = new AceVolume(stream, Options, 0);
ValidateArchive(_volume);
}
protected abstract void ValidateArchive(AceVolume archive);
protected override IEnumerable<AceEntry> GetEntries(Stream stream)
var localHeaderReader = new AceFileHeader(_archiveEncoding);
while (true)
{
var mainHeaderReader = new AceMainHeader(_archiveEncoding);
var mainHeader = mainHeaderReader.Read(stream);
if (mainHeader == null)
var localHeader = localHeaderReader.Read(stream);
if (localHeader?.IsFileEncrypted == true)
{
yield break;
}
if (mainHeader?.IsMultiVolume == true)
{
throw new MultiVolumeExtractionException(
"Multi volumes are currently not supported"
throw new CryptographicException(
"Password protected archives are currently not supported"
);
}
if (localHeader == null)
break;
if (_volume == null)
{
_volume = new AceVolume(stream, Options, 0);
ValidateArchive(_volume);
}
yield return new AceEntry(new AceFilePart((AceFileHeader)localHeader, stream));
}
}
var localHeaderReader = new AceFileHeader(_archiveEncoding);
while (true)
{
var localHeader = localHeaderReader.Read(stream);
if (localHeader?.IsFileEncrypted == true)
{
throw new CryptographicException(
"Password protected archives are currently not supported"
);
}
if (localHeader == null)
break;
yield return new AceEntry(new AceFilePart((AceFileHeader)localHeader, stream));
}
protected override async IAsyncEnumerable<AceEntry> GetEntriesAsync(Stream stream)
{
var mainHeaderReader = new AceMainHeader(_archiveEncoding);
var mainHeader = await mainHeaderReader.ReadAsync(stream);
if (mainHeader == null)
{
yield break;
}
protected virtual IEnumerable<FilePart> CreateFilePartEnumerableForCurrentEntry() =>
Entry.Parts;
if (mainHeader?.IsMultiVolume == true)
{
throw new MultiVolumeExtractionException("Multi volumes are currently not supported");
}
if (_volume == null)
{
_volume = new AceVolume(stream, Options, 0);
ValidateArchive(_volume);
}
var localHeaderReader = new AceFileHeader(_archiveEncoding);
while (true)
{
var localHeader = await localHeaderReader.ReadAsync(stream);
if (localHeader?.IsFileEncrypted == true)
{
throw new CryptographicException(
"Password protected archives are currently not supported"
);
}
if (localHeader == null)
break;
yield return new AceEntry(new AceFilePart((AceFileHeader)localHeader, stream));
}
}
protected virtual IEnumerable<FilePart> CreateFilePartEnumerableForCurrentEntry() =>
Entry.Parts;
}

View File

@@ -1,6 +1,8 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Arj;
using SharpCompress.Common.Arj.Headers;
@@ -83,6 +85,42 @@ namespace SharpCompress.Readers.Arj
}
}
protected override async IAsyncEnumerable<ArjEntry> GetEntriesAsync(Stream stream)
{
var encoding = new ArchiveEncoding();
var mainHeaderReader = new ArjMainHeader(encoding);
var localHeaderReader = new ArjLocalHeader(encoding);
var mainHeader = await mainHeaderReader.ReadAsync(stream);
if (mainHeader?.IsVolume == true)
{
throw new MultiVolumeExtractionException(
"Multi volumes are currently not supported"
);
}
if (mainHeader?.IsGabled == true)
{
throw new CryptographicException(
"Password protected archives are currently not supported"
);
}
if (_volume == null)
{
_volume = new ArjVolume(stream, Options, 0);
ValidateArchive(_volume);
}
while (true)
{
var localHeader = await localHeaderReader.ReadAsync(stream);
if (localHeader == null)
break;
yield return new ArjEntry(new ArjFilePart((ArjLocalHeader)localHeader, stream));
}
}
protected virtual IEnumerable<FilePart> CreateFilePartEnumerableForCurrentEntry() =>
Entry.Parts;
}

View File

@@ -1,5 +1,6 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Readers;
@@ -20,7 +21,7 @@ public interface IReaderFactory : Factories.IFactory
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
/// <returns></returns>
IAsyncReader OpenAsyncReader(
ValueTask<IAsyncReader> OpenAsyncReader(
Stream stream,
ReaderOptions? options,
CancellationToken cancellationToken

View File

@@ -4,6 +4,8 @@ using System.Collections;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
@@ -47,6 +49,13 @@ internal class MultiVolumeRarReader : RarReader
return enumerator;
}
protected override IAsyncEnumerable<FilePart> CreateFilePartEnumerableForCurrentEntryAsync()
{
var enumerator = new MultiVolumeStreamAsyncEnumerator(this, streams, tempStream);
tempStream = null;
return enumerator;
}
private class MultiVolumeStreamEnumerator : IEnumerable<FilePart>, IEnumerator<FilePart>
{
private readonly MultiVolumeRarReader reader;
@@ -110,4 +119,66 @@ internal class MultiVolumeRarReader : RarReader
public void Reset() { }
}
private class MultiVolumeStreamAsyncEnumerator
: IAsyncEnumerable<FilePart>,
IAsyncEnumerator<FilePart>
{
private readonly MultiVolumeRarReader reader;
private readonly IEnumerator<Stream> nextReadableStreams;
private Stream tempStream;
private bool isFirst = true;
internal MultiVolumeStreamAsyncEnumerator(
MultiVolumeRarReader r,
IEnumerator<Stream> nextReadableStreams,
Stream tempStream
)
{
reader = r;
this.nextReadableStreams = nextReadableStreams;
this.tempStream = tempStream;
}
public FilePart Current { get; private set; }
public async ValueTask<bool> MoveNextAsync()
{
if (isFirst)
{
Current = reader.Entry.Parts.First();
isFirst = false; //first stream already to go
return true;
}
if (!reader.Entry.IsSplitAfter)
{
return false;
}
if (tempStream != null)
{
await reader.LoadStreamForReadingAsync(tempStream);
tempStream = null;
}
else if (!nextReadableStreams.MoveNext())
{
throw new MultiVolumeExtractionException(
"No stream provided when requested by MultiVolumeRarReader"
);
}
else
{
await reader.LoadStreamForReadingAsync(nextReadableStreams.Current);
}
Current = reader.Entry.Parts.First();
return true;
}
public IAsyncEnumerator<FilePart> GetAsyncEnumerator(
CancellationToken cancellationToken = new()
) => this;
public ValueTask DisposeAsync() => new();
}
}

View File

@@ -100,6 +100,9 @@ public abstract partial class RarReader : AbstractReader<RarReaderEntry, RarVolu
protected virtual IEnumerable<FilePart> CreateFilePartEnumerableForCurrentEntry() =>
Entry.Parts;
protected virtual IAsyncEnumerable<FilePart> CreateFilePartEnumerableForCurrentEntryAsync() =>
Entry.Parts.ToAsyncEnumerable();
protected override EntryStream GetEntryStream()
{
if (Entry.IsRedir)
@@ -134,8 +137,8 @@ public abstract partial class RarReader : AbstractReader<RarReaderEntry, RarVolu
throw new InvalidOperationException("no stream for redirect entry");
}
var stream = new MultiVolumeReadOnlyStream(
CreateFilePartEnumerableForCurrentEntry().Cast<RarFilePart>()
var stream = await MultiVolumeReadOnlyAsyncStream.Create(
CreateFilePartEnumerableForCurrentEntryAsync().CastAsync<RarFilePart>()
);
if (Entry.IsRarV3)
{

View File

@@ -24,7 +24,7 @@ public static class ReaderFactory
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
/// <returns></returns>
public static IAsyncReader OpenAsyncReader(
public static ValueTask<IAsyncReader> OpenAsyncReader(
string filePath,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
@@ -47,7 +47,7 @@ public static class ReaderFactory
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
/// <returns></returns>
public static IAsyncReader OpenAsyncReader(
public static ValueTask<IAsyncReader> OpenAsyncReader(
FileInfo fileInfo,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
@@ -70,7 +70,7 @@ public static class ReaderFactory
var bStream = new SharpCompressStream(stream, bufferSize: options.BufferSize);
long pos = ((IStreamStack)bStream).GetPosition();
long pos = bStream.GetPosition();
var factories = Factories.Factory.Factories.OfType<Factories.Factory>();
@@ -89,7 +89,7 @@ public static class ReaderFactory
{
return reader;
}
((IStreamStack)bStream).StackSeek(pos);
bStream.StackSeek(pos);
}
foreach (var factory in factories)
@@ -98,7 +98,7 @@ public static class ReaderFactory
{
continue; // Already tested above
}
((IStreamStack)bStream).StackSeek(pos);
bStream.StackSeek(pos);
if (factory.TryOpenReader(bStream, options, out var reader) && reader != null)
{
return reader;
@@ -110,7 +110,7 @@ public static class ReaderFactory
);
}
public static IAsyncReader OpenAsyncReader(
public static async ValueTask<IAsyncReader> OpenAsyncReader(
Stream stream,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
@@ -120,13 +120,11 @@ public static class ReaderFactory
options ??= new ReaderOptions() { LeaveStreamOpen = false };
var bStream = new SharpCompressStream(stream, bufferSize: options.BufferSize);
long pos = bStream.GetPosition();
long pos = ((IStreamStack)bStream).GetPosition();
var factories = Factories.Factory.Factories.OfType<Factories.Factory>();
var factories = Factory.Factories.OfType<Factory>();
Factory? testedFactory = null;
if (!string.IsNullOrWhiteSpace(options.ExtensionHint))
{
testedFactory = factories.FirstOrDefault(a =>
@@ -135,14 +133,19 @@ public static class ReaderFactory
);
if (testedFactory is IReaderFactory readerFactory)
{
((IStreamStack)bStream).StackSeek(pos);
if (testedFactory.IsArchive(bStream))
bStream.StackSeek(pos);
if (
await testedFactory.IsArchiveAsync(
bStream,
cancellationToken: cancellationToken
)
)
{
((IStreamStack)bStream).StackSeek(pos);
return readerFactory.OpenAsyncReader(bStream, options, cancellationToken);
bStream.StackSeek(pos);
return await readerFactory.OpenAsyncReader(bStream, options, cancellationToken);
}
}
((IStreamStack)bStream).StackSeek(pos);
bStream.StackSeek(pos);
}
foreach (var factory in factories)
@@ -151,11 +154,14 @@ public static class ReaderFactory
{
continue; // Already tested above
}
((IStreamStack)bStream).StackSeek(pos);
if (factory is IReaderFactory readerFactory && factory.IsArchive(bStream))
bStream.StackSeek(pos);
if (
factory is IReaderFactory readerFactory
&& await factory.IsArchiveAsync(bStream, cancellationToken: cancellationToken)
)
{
((IStreamStack)bStream).StackSeek(pos);
return readerFactory.OpenAsyncReader(bStream, options, cancellationToken);
bStream.StackSeek(pos);
return await readerFactory.OpenAsyncReader(bStream, options, cancellationToken);
}
}

View File

@@ -1,11 +1,13 @@
#if NET8_0_OR_GREATER
using System.IO;
using System.Threading;
using SharpCompress.Common;
namespace SharpCompress.Readers.Tar;
public partial class TarReader : IReaderOpenable
public partial class TarReader
#if NET8_0_OR_GREATER
: IReaderOpenable
#endif
{
public static IAsyncReader OpenAsyncReader(
string path,
@@ -50,4 +52,3 @@ public partial class TarReader : IReaderOpenable
return OpenReader(fileInfo.OpenRead(), readerOptions);
}
}
#endif

View File

@@ -58,9 +58,7 @@ public partial class TarReader : AbstractReader<TarEntry, TarVolume>
stream.NotNull(nameof(stream));
options = options ?? new ReaderOptions();
var rewindableStream = new SharpCompressStream(stream);
long pos = ((IStreamStack)rewindableStream).GetPosition();
if (GZipArchive.IsGZipFile(rewindableStream))
{
((IStreamStack)rewindableStream).StackSeek(pos);
@@ -72,7 +70,6 @@ public partial class TarReader : AbstractReader<TarEntry, TarVolume>
}
throw new InvalidFormatException("Not a tar file.");
}
((IStreamStack)rewindableStream).StackSeek(pos);
if (BZip2Stream.IsBZip2(rewindableStream))
{
@@ -85,7 +82,6 @@ public partial class TarReader : AbstractReader<TarEntry, TarVolume>
}
throw new InvalidFormatException("Not a tar file.");
}
((IStreamStack)rewindableStream).StackSeek(pos);
if (ZStandardStream.IsZStandard(rewindableStream))
{
@@ -110,7 +106,6 @@ public partial class TarReader : AbstractReader<TarEntry, TarVolume>
}
throw new InvalidFormatException("Not a tar file.");
}
((IStreamStack)rewindableStream).StackSeek(pos);
return new TarReader(rewindableStream, options, CompressionType.None);
}

View File

@@ -216,9 +216,9 @@
"net10.0": {
"Microsoft.NET.ILLink.Tasks": {
"type": "Direct",
"requested": "[10.0.0, )",
"resolved": "10.0.0",
"contentHash": "kICGrGYEzCNI3wPzfEXcwNHgTvlvVn9yJDhSdRK+oZQy4jvYH529u7O0xf5ocQKzOMjfS07+3z9PKRIjrFMJDA=="
"requested": "[10.0.1, )",
"resolved": "10.0.1",
"contentHash": "ISahzLHsHY7vrwqr2p1YWZ+gsxoBRtH7gWRDK8fDUst9pp2He0GiesaqEfeX0V8QMCJM3eNEHGGpnIcPjFo2NQ=="
},
"Microsoft.NETFramework.ReferenceAssemblies": {
"type": "Direct",

View File

@@ -60,7 +60,10 @@ namespace SharpCompress.Test.Ace
public async ValueTask Ace_Multi_Reader_Async()
{
var exception = await Assert.ThrowsAsync<MultiVolumeExtractionException>(() =>
DoMultiReaderAsync(new[] { "Ace.store.split.ace", "Ace.store.split.c01" })
DoMultiReaderAsync(
new[] { "Ace.store.split.ace", "Ace.store.split.c01" },
streams => AceReader.OpenAsyncReader(streams, null)
)
);
}
@@ -68,7 +71,7 @@ namespace SharpCompress.Test.Ace
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using Stream stream = File.OpenRead(testArchive);
await using var reader = ReaderFactory.OpenAsyncReader(
await using var reader = await ReaderFactory.OpenAsyncReader(
new AsyncOnlyStream(stream),
new ReaderOptions()
);
@@ -93,9 +96,9 @@ namespace SharpCompress.Test.Ace
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using Stream stream = File.OpenRead(testArchive);
await using var reader = ReaderFactory.OpenAsyncReader(
await using var reader = await ReaderFactory.OpenAsyncReader(
new AsyncOnlyStream(stream),
new ReaderOptions() { LookForHeader = false }
new ReaderOptions { LookForHeader = true }
);
while (await reader.MoveToNextEntryAsync())
{
@@ -108,36 +111,29 @@ namespace SharpCompress.Test.Ace
);
}
}
VerifyFiles();
CompareFilesByPath(
Path.Combine(SCRATCH_FILES_PATH, "alice29.txt"),
Path.Combine(MISC_TEST_FILES_PATH, "alice29.txt")
);
}
private async Task DoMultiReaderAsync(string[] archiveNames)
private async Task DoMultiReaderAsync(
string[] archives,
Func<IEnumerable<Stream>, IAsyncReader> readerFactory
)
{
var testArchives = archiveNames
.Select(s => Path.Combine(TEST_ARCHIVES_PATH, s))
.ToList();
var streams = testArchives.Select(File.OpenRead).ToList();
try
await using var reader = readerFactory(
archives.Select(s => Path.Combine(TEST_ARCHIVES_PATH, s)).Select(File.OpenRead)
);
while (await reader.MoveToNextEntryAsync())
{
await using var reader = ReaderFactory.OpenAsyncReader(
new AsyncOnlyStream(streams.First())
);
while (await reader.MoveToNextEntryAsync())
if (!reader.Entry.IsDirectory)
{
if (!reader.Entry.IsDirectory)
{
await reader.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
}
finally
{
foreach (var stream in streams)
{
stream.Dispose();
await reader.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
}

View File

@@ -85,8 +85,15 @@ public class ArchiveTests : ReaderTests
}
}
protected void ArchiveStreamRead(string testArchive, ReaderOptions? readerOptions = null) =>
ArchiveStreamRead(ArchiveFactory.AutoFactory, testArchive, readerOptions);
protected void ArchiveStreamRead(string testArchive, ReaderOptions? readerOptions = null)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
ArchiveStreamRead(
ArchiveFactory.FindFactory<IArchiveFactory>(testArchive),
readerOptions,
testArchive
);
}
protected void ArchiveStreamRead(
IArchiveFactory archiveFactory,
@@ -101,7 +108,12 @@ public class ArchiveTests : ReaderTests
protected void ArchiveStreamRead(
ReaderOptions? readerOptions = null,
params string[] testArchives
) => ArchiveStreamRead(ArchiveFactory.AutoFactory, readerOptions, testArchives);
) =>
ArchiveStreamRead(
ArchiveFactory.FindFactory<IArchiveFactory>(testArchives[0]),
readerOptions,
testArchives
);
protected void ArchiveStreamRead(
IArchiveFactory archiveFactory,
@@ -271,12 +283,13 @@ public class ArchiveTests : ReaderTests
}
protected void ArchiveFileRead(
IArchiveFactory archiveFactory,
string testArchive,
ReaderOptions? readerOptions = null
ReaderOptions? readerOptions = null,
IArchiveFactory? archiveFactory = null
)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
archiveFactory ??= ArchiveFactory.FindFactory<IArchiveFactory>(testArchive);
using (var archive = archiveFactory.OpenArchive(new FileInfo(testArchive), readerOptions))
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
@@ -290,9 +303,6 @@ public class ArchiveTests : ReaderTests
VerifyFiles();
}
protected void ArchiveFileRead(string testArchive, ReaderOptions? readerOptions = null) =>
ArchiveFileRead(ArchiveFactory.AutoFactory, testArchive, readerOptions);
protected void ArchiveFileSkip(
string testArchive,
string fileOrder,
@@ -600,7 +610,7 @@ public class ArchiveTests : ReaderTests
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
await ArchiveStreamReadAsync(
ArchiveFactory.AutoFactory,
ArchiveFactory.FindFactory<IArchiveFactory>(testArchive),
readerOptions,
new[] { testArchive }
);
@@ -651,40 +661,4 @@ public class ArchiveTests : ReaderTests
VerifyFiles();
}
}
[Fact]
public async Task ArchiveFactory_Open_WithPreWrappedStream()
{
// Test that ArchiveFactory.Open works correctly with a stream that's already wrapped
// This addresses the issue where ZIP files fail to open on Linux
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "Zip.bzip2.noEmptyDirs.zip");
// Open with a pre-wrapped stream
using (var fileStream = File.OpenRead(testArchive))
using (var wrappedStream = SharpCompressStream.Create(fileStream, bufferSize: 32768))
await using (
var archive = await ArchiveFactory.OpenAsyncArchive(new AsyncOnlyStream(wrappedStream))
)
{
Assert.Equal(ArchiveType.Zip, archive.Type);
Assert.Equal(3, await archive.EntriesAsync.CountAsync());
}
}
[Fact]
public async Task ArchiveFactory_Open_WithRawFileStream()
{
// Test that ArchiveFactory.Open works correctly with a raw FileStream
// This is the common use case reported in the issue
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "Zip.bzip2.noEmptyDirs.zip");
using (var stream = File.OpenRead(testArchive))
await using (
var archive = await ArchiveFactory.OpenAsyncArchive(new AsyncOnlyStream(stream))
)
{
Assert.Equal(ArchiveType.Zip, archive.Type);
Assert.Equal(3, await archive.EntriesAsync.CountAsync());
}
}
}

View File

@@ -95,7 +95,7 @@ namespace SharpCompress.Test.Arj
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using Stream stream = File.OpenRead(testArchive);
await using var reader = ReaderFactory.OpenAsyncReader(
await using var reader = await ReaderFactory.OpenAsyncReader(
new AsyncOnlyStream(stream),
new ReaderOptions()
);
@@ -123,9 +123,9 @@ namespace SharpCompress.Test.Arj
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using Stream stream = File.OpenRead(testArchive);
await using var reader = ReaderFactory.OpenAsyncReader(
await using var reader = await ReaderFactory.OpenAsyncReader(
new AsyncOnlyStream(stream),
new ReaderOptions() { LookForHeader = false }
new ReaderOptions() { LookForHeader = true }
);
while (await reader.MoveToNextEntryAsync())
{
@@ -138,12 +138,15 @@ namespace SharpCompress.Test.Arj
);
}
}
VerifyFiles();
CompareFilesByPath(
Path.Combine(SCRATCH_FILES_PATH, "alice29.txt"),
Path.Combine(MISC_TEST_FILES_PATH, "alice29.txt")
);
}
private async Task DoMultiReaderAsync(
string[] archiveNames,
Func<IEnumerable<Stream>, IAsyncReader> openReader
Func<IEnumerable<Stream>, ValueTask<IAsyncReader>> openReader
)
{
var testArchives = archiveNames
@@ -152,7 +155,7 @@ namespace SharpCompress.Test.Arj
var streams = testArchives.Select(File.OpenRead).ToList();
try
{
await using var reader = openReader(streams);
await using var reader = await openReader(streams);
while (await reader.MoveToNextEntryAsync())
{
if (!reader.Entry.IsDirectory)

View File

@@ -26,7 +26,7 @@ public class AsyncTests : TestBase
#else
await using var stream = File.OpenRead(testArchive);
#endif
await using var reader = ReaderFactory.OpenAsyncReader(new AsyncOnlyStream(stream));
await using var reader = await ReaderFactory.OpenAsyncReader(new AsyncOnlyStream(stream));
await reader.WriteAllToDirectoryAsync(
SCRATCH_FILES_PATH,
@@ -51,7 +51,7 @@ public class AsyncTests : TestBase
#else
await using var stream = File.OpenRead(testArchive);
#endif
await using var reader = ReaderFactory.OpenAsyncReader(new AsyncOnlyStream(stream));
await using var reader = await ReaderFactory.OpenAsyncReader(new AsyncOnlyStream(stream));
while (await reader.MoveToNextEntryAsync())
{
@@ -141,7 +141,7 @@ public class AsyncTests : TestBase
#else
await using var stream = File.OpenRead(testArchive);
#endif
await using var reader = ReaderFactory.OpenAsyncReader(
await using var reader = await ReaderFactory.OpenAsyncReader(
new AsyncOnlyStream(stream),
cancellationToken: cts.Token
);
@@ -195,7 +195,7 @@ public class AsyncTests : TestBase
#else
await using var stream = File.OpenRead(testArchive);
#endif
await using var reader = ReaderFactory.OpenAsyncReader(new AsyncOnlyStream(stream));
await using var reader = await ReaderFactory.OpenAsyncReader(new AsyncOnlyStream(stream));
while (await reader.MoveToNextEntryAsync())
{

View File

@@ -1,3 +1,4 @@
using System;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
@@ -22,9 +23,11 @@ public class GZipArchiveAsyncTests : ArchiveTests
#else
await using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz")))
#endif
using (var archive = ArchiveFactory.OpenArchive(new AsyncOnlyStream(stream)))
await using (
var archive = await ArchiveFactory.OpenAsyncArchive(new AsyncOnlyStream(stream))
)
{
var entry = archive.Entries.First();
var entry = await archive.EntriesAsync.FirstAsync();
await entry.WriteToFileAsync(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull()));
var size = entry.Size;
@@ -79,7 +82,9 @@ public class GZipArchiveAsyncTests : ArchiveTests
#endif
await using (var archive = GZipArchive.OpenAsyncArchive(new AsyncOnlyStream(stream)))
{
Assert.Throws<InvalidFormatException>(() => archive.AddEntry("jpg\\test.jpg", jpg));
await Assert.ThrowsAsync<NotSupportedException>(async () =>
await archive.AddEntryAsync("jpg\\test.jpg", File.OpenRead(jpg), closeStream: true)
);
await archive.SaveToAsync(Path.Combine(SCRATCH_FILES_PATH, "Tar.tar.gz"));
}
}
@@ -98,8 +103,8 @@ public class GZipArchiveAsyncTests : ArchiveTests
inputStream.Position = 0;
}
using var archive = GZipArchive.OpenArchive(new AsyncOnlyStream(inputStream));
var archiveEntry = archive.Entries.First();
await using var archive = GZipArchive.OpenAsyncArchive(new AsyncOnlyStream(inputStream));
var archiveEntry = await archive.EntriesAsync.FirstAsync();
MemoryStream tarStream;
#if NETFRAMEWORK
@@ -147,7 +152,7 @@ public class GZipArchiveAsyncTests : ArchiveTests
[Fact]
public async Task TestGzCrcWithMostSignificantBitNotNegative_Async()
{
using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"));
await using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"));
await using var archive = GZipArchive.OpenAsyncArchive(new AsyncOnlyStream(stream));
await foreach (var entry in archive.EntriesAsync.Where(entry => !entry.IsDirectory))
{
@@ -158,7 +163,7 @@ public class GZipArchiveAsyncTests : ArchiveTests
[Fact]
public async Task TestGzArchiveTypeGzip_Async()
{
using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"));
await using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"));
await using var archive = GZipArchive.OpenAsyncArchive(new AsyncOnlyStream(stream));
Assert.Equal(archive.Type, ArchiveType.GZip);
}

View File

@@ -22,7 +22,7 @@ public class GZipReaderAsyncTests : ReaderTests
{
//read only as GZip item
using Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"));
await using var reader = ReaderFactory.OpenAsyncReader(new AsyncOnlyStream(stream));
await using var reader = await ReaderFactory.OpenAsyncReader(new AsyncOnlyStream(stream));
while (await reader.MoveToNextEntryAsync())
{
Assert.NotEqual(0, reader.Entry.Size);
@@ -71,7 +71,7 @@ public class GZipReaderAsyncTests : ReaderTests
);
using var testStream = new TestStream(protectedStream);
await using (
var reader = ReaderFactory.OpenAsyncReader(
var reader = await ReaderFactory.OpenAsyncReader(
new AsyncOnlyStream(testStream),
options,
default

View File

@@ -2,30 +2,29 @@ using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Test.Mocks;
public class AsyncOnlyStream : Stream
public class AsyncOnlyStream : SharpCompressStream
{
private readonly Stream _stream;
public AsyncOnlyStream(Stream stream)
: base(stream)
{
_stream = stream;
// Console.WriteLine("AsyncOnlyStream created");
}
public override bool CanRead => _stream.CanRead;
public override bool CanSeek => _stream.CanSeek;
public override bool CanWrite => _stream.CanWrite;
public override long Length => _stream.Length;
public override bool CanRead => Stream.CanRead;
public override bool CanSeek => Stream.CanSeek;
public override bool CanWrite => Stream.CanWrite;
public override long Length => Stream.Length;
public override long Position
{
get => _stream.Position;
set => _stream.Position = value;
get => Stream.Position;
set => Stream.Position = value;
}
public override void Flush() => _stream.Flush();
public override void Flush() => Stream.Flush();
public override int Read(byte[] buffer, int offset, int count) =>
throw new NotSupportedException("Synchronous Read is not supported");
@@ -35,18 +34,18 @@ public class AsyncOnlyStream : Stream
int offset,
int count,
CancellationToken cancellationToken
) => _stream.ReadAsync(buffer, offset, count, cancellationToken);
) => Stream.ReadAsync(buffer, offset, count, cancellationToken);
#if NET8_0_OR_GREATER
public override ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
) => _stream.ReadAsync(buffer, cancellationToken);
) => Stream.ReadAsync(buffer, cancellationToken);
#endif
public override long Seek(long offset, SeekOrigin origin) => _stream.Seek(offset, origin);
public override long Seek(long offset, SeekOrigin origin) => Stream.Seek(offset, origin);
public override void SetLength(long value) => _stream.SetLength(value);
public override void SetLength(long value) => Stream.SetLength(value);
public override Task WriteAsync(
byte[] buffer,
@@ -59,18 +58,14 @@ public class AsyncOnlyStream : Stream
public override ValueTask WriteAsync(
ReadOnlyMemory<byte> buffer,
CancellationToken cancellationToken = default
) => _stream.WriteAsync(buffer, cancellationToken);
) => Stream.WriteAsync(buffer, cancellationToken);
#endif
public override void Write(byte[] buffer, int offset, int count) =>
_stream.Write(buffer, offset, count);
Stream.Write(buffer, offset, count);
protected override void Dispose(bool disposing)
{
if (disposing)
{
_stream.Dispose();
}
base.Dispose(disposing);
}
}

View File

@@ -541,7 +541,7 @@ public class ProgressReportTests : TestBase
var readerOptions = new ReaderOptions { Progress = progress };
await using (
var reader = ReaderFactory.OpenAsyncReader(
var reader = await ReaderFactory.OpenAsyncReader(
new AsyncOnlyStream(archiveStream),
readerOptions
)

View File

@@ -208,7 +208,7 @@ public class RarReaderAsyncTests : ReaderTests
private async ValueTask DoRar_Entry_Stream_Async(string filename)
{
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename)))
await using (var reader = ReaderFactory.OpenAsyncReader(new AsyncOnlyStream(stream)))
await using (var reader = await ReaderFactory.OpenAsyncReader(new AsyncOnlyStream(stream)))
{
while (await reader.MoveToNextEntryAsync())
{
@@ -253,7 +253,7 @@ public class RarReaderAsyncTests : ReaderTests
var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Rar.Audio_program.rar"))
)
await using (
var reader = ReaderFactory.OpenAsyncReader(
var reader = await ReaderFactory.OpenAsyncReader(
new AsyncOnlyStream(stream),
new ReaderOptions { LookForHeader = true }
)
@@ -325,7 +325,7 @@ public class RarReaderAsyncTests : ReaderTests
private async ValueTask DoRar_Solid_Skip_Reader_Async(string filename)
{
using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename));
await using var reader = ReaderFactory.OpenAsyncReader(
await using var reader = await ReaderFactory.OpenAsyncReader(
new AsyncOnlyStream(stream),
new ReaderOptions { LookForHeader = true }
);
@@ -351,7 +351,7 @@ public class RarReaderAsyncTests : ReaderTests
private async ValueTask DoRar_Reader_Skip_Async(string filename)
{
using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename));
await using var reader = ReaderFactory.OpenAsyncReader(
await using var reader = await ReaderFactory.OpenAsyncReader(
new AsyncOnlyStream(stream),
new ReaderOptions { LookForHeader = true }
);
@@ -376,7 +376,7 @@ public class RarReaderAsyncTests : ReaderTests
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using Stream stream = File.OpenRead(testArchive);
await using var reader = ReaderFactory.OpenAsyncReader(
await using var reader = await ReaderFactory.OpenAsyncReader(
new AsyncOnlyStream(stream),
readerOptions ?? new ReaderOptions()
);

View File

@@ -4,7 +4,10 @@ using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using AwesomeAssertions;
using SharpCompress.Archives;
using SharpCompress.Common;
using SharpCompress.Factories;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Test.Mocks;
@@ -111,6 +114,25 @@ public abstract class ReaderTests : TestBase
}
}
protected async Task AssertArchiveAsync<T>(
string testArchive,
CancellationToken cancellationToken = default
)
where T : IFactory
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
var factory = new TarFactory();
factory.IsArchive(new FileInfo(testArchive).OpenRead()).Should().BeTrue();
(
await factory.IsArchiveAsync(
new FileInfo(testArchive).OpenRead(),
cancellationToken: cancellationToken
)
)
.Should()
.BeTrue();
}
protected async Task ReadAsync(
string testArchive,
CompressionType? expectedCompression = null,
@@ -146,7 +168,7 @@ public abstract class ReaderTests : TestBase
);
using var testStream = new TestStream(protectedStream);
await using (
var reader = ReaderFactory.OpenAsyncReader(
var reader = await ReaderFactory.OpenAsyncReader(
new AsyncOnlyStream(testStream),
options,
cancellationToken
@@ -237,18 +259,16 @@ public abstract class ReaderTests : TestBase
protected void DoMultiReader(
string[] archives,
Func<IEnumerable<Stream>, IDisposable> readerFactory
Func<IEnumerable<Stream>, IReader> readerFactory
)
{
using var reader = readerFactory(
archives.Select(s => Path.Combine(TEST_ARCHIVES_PATH, s)).Select(File.OpenRead)
);
dynamic dynReader = reader;
while (dynReader.MoveToNextEntry())
while (reader.MoveToNextEntry())
{
dynReader.WriteEntryToDirectory(
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);

View File

@@ -3,20 +3,22 @@ using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Common;
using SharpCompress.Test.Mocks;
using Xunit;
namespace SharpCompress.Test.SevenZip;
#if !NETFRAMEWORK
public class SevenZipArchiveAsyncTests : ArchiveTests
{
[Fact]
public async ValueTask SevenZipArchive_LZMA_AsyncStreamExtraction()
public async Task SevenZipArchive_LZMA_AsyncStreamExtraction()
{
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "7Zip.LZMA.7z");
#if NETFRAMEWORK
using var stream = File.OpenRead(testArchive);
#else
await using var stream = File.OpenRead(testArchive);
#endif
await using var archive = await ArchiveFactory.OpenAsyncArchive(
new AsyncOnlyStream(stream)
);
@@ -31,19 +33,35 @@ public class SevenZipArchiveAsyncTests : ArchiveTests
Directory.CreateDirectory(targetDir);
}
#if NETFRAMEWORK
using var sourceStream = await entry.OpenEntryStreamAsync(CancellationToken.None);
#else
await using var sourceStream = await entry.OpenEntryStreamAsync(CancellationToken.None);
#endif
#if NETFRAMEWORK
using var targetStream = File.Create(targetPath);
#else
await using var targetStream = File.Create(targetPath);
#endif
#if NETFRAMEWORK
await sourceStream.CopyToAsync(targetStream, 81920, CancellationToken.None);
#else
await sourceStream.CopyToAsync(targetStream, CancellationToken.None);
#endif
}
VerifyFiles();
}
[Fact]
public async ValueTask SevenZipArchive_LZMA2_AsyncStreamExtraction()
//[Fact]
public async Task SevenZipArchive_LZMA2_AsyncStreamExtraction()
{
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "7Zip.LZMA2.7z");
#if NETFRAMEWORK
using var stream = File.OpenRead(testArchive);
#else
await using var stream = File.OpenRead(testArchive);
#endif
await using var archive = await ArchiveFactory.OpenAsyncArchive(
new AsyncOnlyStream(stream)
);
@@ -58,19 +76,35 @@ public class SevenZipArchiveAsyncTests : ArchiveTests
Directory.CreateDirectory(targetDir);
}
#if NETFRAMEWORK
using var sourceStream = await entry.OpenEntryStreamAsync(CancellationToken.None);
#else
await using var sourceStream = await entry.OpenEntryStreamAsync(CancellationToken.None);
#endif
#if NETFRAMEWORK
using var targetStream = File.Create(targetPath);
#else
await using var targetStream = File.Create(targetPath);
#endif
#if NETFRAMEWORK
await sourceStream.CopyToAsync(targetStream, 81920, CancellationToken.None);
#else
await sourceStream.CopyToAsync(targetStream, CancellationToken.None);
#endif
}
VerifyFiles();
}
[Fact]
public async ValueTask SevenZipArchive_Solid_AsyncStreamExtraction()
public async Task SevenZipArchive_Solid_AsyncStreamExtraction()
{
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "7Zip.solid.7z");
#if NETFRAMEWORK
using var stream = File.OpenRead(testArchive);
#else
await using var stream = File.OpenRead(testArchive);
#endif
await using var archive = await ArchiveFactory.OpenAsyncArchive(
new AsyncOnlyStream(stream)
);
@@ -85,19 +119,35 @@ public class SevenZipArchiveAsyncTests : ArchiveTests
Directory.CreateDirectory(targetDir);
}
#if NETFRAMEWORK
using var sourceStream = await entry.OpenEntryStreamAsync(CancellationToken.None);
#else
await using var sourceStream = await entry.OpenEntryStreamAsync(CancellationToken.None);
#endif
#if NETFRAMEWORK
using var targetStream = File.Create(targetPath);
#else
await using var targetStream = File.Create(targetPath);
#endif
#if NETFRAMEWORK
await sourceStream.CopyToAsync(targetStream, 81920, CancellationToken.None);
#else
await sourceStream.CopyToAsync(targetStream, CancellationToken.None);
#endif
}
VerifyFiles();
}
[Fact]
public async ValueTask SevenZipArchive_BZip2_AsyncStreamExtraction()
public async Task SevenZipArchive_BZip2_AsyncStreamExtraction()
{
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "7Zip.BZip2.7z");
#if NETFRAMEWORK
using var stream = File.OpenRead(testArchive);
#else
await using var stream = File.OpenRead(testArchive);
#endif
await using var archive = await ArchiveFactory.OpenAsyncArchive(
new AsyncOnlyStream(stream)
);
@@ -112,19 +162,35 @@ public class SevenZipArchiveAsyncTests : ArchiveTests
Directory.CreateDirectory(targetDir);
}
#if NETFRAMEWORK
using var sourceStream = await entry.OpenEntryStreamAsync(CancellationToken.None);
#else
await using var sourceStream = await entry.OpenEntryStreamAsync(CancellationToken.None);
#endif
#if NETFRAMEWORK
using var targetStream = File.Create(targetPath);
#else
await using var targetStream = File.Create(targetPath);
#endif
#if NETFRAMEWORK
await sourceStream.CopyToAsync(targetStream, 81920, CancellationToken.None);
#else
await sourceStream.CopyToAsync(targetStream, CancellationToken.None);
#endif
}
VerifyFiles();
}
[Fact]
public async ValueTask SevenZipArchive_PPMd_AsyncStreamExtraction()
public async Task SevenZipArchive_PPMd_AsyncStreamExtraction()
{
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "7Zip.PPMd.7z");
#if NETFRAMEWORK
using var stream = File.OpenRead(testArchive);
#else
await using var stream = File.OpenRead(testArchive);
#endif
await using var archive = await ArchiveFactory.OpenAsyncArchive(
new AsyncOnlyStream(stream)
);
@@ -139,12 +205,23 @@ public class SevenZipArchiveAsyncTests : ArchiveTests
Directory.CreateDirectory(targetDir);
}
#if NETFRAMEWORK
using var sourceStream = await entry.OpenEntryStreamAsync(CancellationToken.None);
#else
await using var sourceStream = await entry.OpenEntryStreamAsync(CancellationToken.None);
#endif
#if NETFRAMEWORK
using var targetStream = File.Create(targetPath);
#else
await using var targetStream = File.Create(targetPath);
#endif
#if NETFRAMEWORK
await sourceStream.CopyToAsync(targetStream, 81920, CancellationToken.None);
#else
await sourceStream.CopyToAsync(targetStream, CancellationToken.None);
#endif
}
VerifyFiles();
}
}
#endif

View File

@@ -61,7 +61,7 @@ public class SevenZipArchiveTests : ArchiveTests
[Fact]
public void SevenZipArchive_LZMA2_EXE_PathRead() =>
ArchiveFileRead(new SevenZipFactory(), "7Zip.LZMA2.exe", new() { LookForHeader = true });
ArchiveFileRead("7Zip.LZMA2.exe", new() { LookForHeader = true }, new SevenZipFactory());
[Fact]
public void SevenZipArchive_LZMA2AES_StreamRead() =>

View File

@@ -1,6 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net10.0;net48</TargetFrameworks>
<OutputType>Exe</OutputType>
<AssemblyName>SharpCompress.Test</AssemblyName>
<PackageId>SharpCompress.Test</PackageId>
<AssemblyOriginatorKeyFile>SharpCompress.Test.snk</AssemblyOriginatorKeyFile>
@@ -22,7 +23,7 @@
<PackageReference Include="AwesomeAssertions" />
<PackageReference Include="Microsoft.NET.Test.Sdk" />
<PackageReference Include="xunit.runner.visualstudio" />
<PackageReference Include="xunit" />
<PackageReference Include="xunit.v3" />
</ItemGroup>
<ItemGroup Condition=" '$(VersionlessImplicitFrameworkDefine)' != 'NETFRAMEWORK' ">
<PackageReference Include="Mono.Posix.NETStandard" />

View File

@@ -139,7 +139,7 @@ public class TarArchiveAsyncTests : ArchiveTests
await using (var archive = TarArchive.CreateAsyncArchive())
{
archive.AddAllFromDirectory(ORIGINAL_FILES_PATH);
await archive.AddAllFromDirectoryAsync(ORIGINAL_FILES_PATH);
var twopt = new TarWriterOptions(CompressionType.None, true);
twopt.ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(866) };
await archive.SaveToAsync(scratchPath, twopt);
@@ -157,7 +157,7 @@ public class TarArchiveAsyncTests : ArchiveTests
await using (var archive = TarArchive.OpenAsyncArchive(unmodified))
{
archive.AddEntry("jpg\\test.jpg", jpg);
await archive.AddEntryAsync("jpg\\test.jpg", jpg);
await archive.SaveToAsync(scratchPath, new WriterOptions(CompressionType.None));
}
CompareArchivesByPath(modified, scratchPath);
@@ -175,7 +175,7 @@ public class TarArchiveAsyncTests : ArchiveTests
var entry = await archive.EntriesAsync.SingleAsync(x =>
x.Key.NotNull().EndsWith("jpg", StringComparison.OrdinalIgnoreCase)
);
archive.RemoveEntry(entry);
await archive.RemoveEntryAsync(entry);
await archive.SaveToAsync(scratchPath, new WriterOptions(CompressionType.None));
}
CompareArchivesByPath(modified, scratchPath);
@@ -200,7 +200,10 @@ public class TarArchiveAsyncTests : ArchiveTests
{
var tropt = new ReaderOptions { ArchiveEncoding = enc };
await using (
var tr = ReaderFactory.OpenAsyncReader(new AsyncOnlyStream(inputMemory), tropt)
var tr = await ReaderFactory.OpenAsyncReader(
new AsyncOnlyStream(inputMemory),
tropt
)
)
{
while (await tr.MoveToNextEntryAsync())

View File

@@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Factories;
using SharpCompress.Readers;
using SharpCompress.Readers.Tar;
using SharpCompress.Test.Mocks;
@@ -23,7 +24,7 @@ public class TarReaderAsyncTests : ReaderTests
using Stream stream = new ForwardOnlyStream(
File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar"))
);
await using var reader = ReaderFactory.OpenAsyncReader(new AsyncOnlyStream(stream));
await using var reader = await ReaderFactory.OpenAsyncReader(new AsyncOnlyStream(stream));
var x = 0;
while (await reader.MoveToNextEntryAsync())
{
@@ -45,6 +46,9 @@ public class TarReaderAsyncTests : ReaderTests
public async ValueTask Tar_Z_Reader_Async() =>
await ReadAsync("Tar.tar.Z", CompressionType.Lzw);
[Fact]
public async ValueTask Tar_Async_Assert() => await AssertArchiveAsync<TarFactory>("Tar.tar");
[Fact]
public async ValueTask Tar_BZip2_Reader_Async() =>
await ReadAsync("Tar.tar.bz2", CompressionType.BZip2);
@@ -72,29 +76,26 @@ public class TarReaderAsyncTests : ReaderTests
[Fact]
public async ValueTask Tar_BZip2_Entry_Stream_Async()
{
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2")))
await using (var reader = ReaderFactory.OpenAsyncReader(new AsyncOnlyStream(stream)))
using Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2"));
await using var reader = TarReader.OpenAsyncReader(stream);
while (await reader.MoveToNextEntryAsync())
{
while (await reader.MoveToNextEntryAsync())
if (!reader.Entry.IsDirectory)
{
if (!reader.Entry.IsDirectory)
Assert.Equal(CompressionType.BZip2, reader.Entry.CompressionType);
using var entryStream = await reader.OpenEntryStreamAsync();
var file = Path.GetFileName(reader.Entry.Key);
var folder =
Path.GetDirectoryName(reader.Entry.Key) ?? throw new ArgumentNullException();
var destdir = Path.Combine(SCRATCH_FILES_PATH, folder);
if (!Directory.Exists(destdir))
{
Assert.Equal(CompressionType.BZip2, reader.Entry.CompressionType);
using var entryStream = await reader.OpenEntryStreamAsync();
var file = Path.GetFileName(reader.Entry.Key);
var folder =
Path.GetDirectoryName(reader.Entry.Key)
?? throw new ArgumentNullException();
var destdir = Path.Combine(SCRATCH_FILES_PATH, folder);
if (!Directory.Exists(destdir))
{
Directory.CreateDirectory(destdir);
}
var destinationFileName = Path.Combine(destdir, file.NotNull());
using var fs = File.OpenWrite(destinationFileName);
await entryStream.CopyToAsync(fs);
Directory.CreateDirectory(destdir);
}
var destinationFileName = Path.Combine(destdir, file.NotNull());
using var fs = File.OpenWrite(destinationFileName);
await entryStream.CopyToAsync(fs);
}
}
VerifyFiles();
@@ -134,18 +135,18 @@ public class TarReaderAsyncTests : ReaderTests
}
[Fact]
public void Tar_BZip2_Skip_Entry_Stream_Async()
public async ValueTask Tar_BZip2_Skip_Entry_Stream_Async()
{
using Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2"));
using var reader = TarReader.OpenReader(stream);
await using var reader = TarReader.OpenAsyncReader(stream);
var names = new List<string>();
while (reader.MoveToNextEntry())
while (await reader.MoveToNextEntryAsync())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(CompressionType.BZip2, reader.Entry.CompressionType);
using var entryStream = reader.OpenEntryStream();
entryStream.SkipEntry();
using var entryStream = await reader.OpenEntryStreamAsync();
await entryStream.SkipEntryAsync();
names.Add(reader.Entry.Key.NotNull());
}
}
@@ -184,7 +185,7 @@ public class TarReaderAsyncTests : ReaderTests
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar");
using Stream stream = File.OpenRead(archiveFullPath);
await using var reader = ReaderFactory.OpenAsyncReader(new AsyncOnlyStream(stream));
await using var reader = await ReaderFactory.OpenAsyncReader(new AsyncOnlyStream(stream));
var memoryStream = new MemoryStream();
Assert.True(await reader.MoveToNextEntryAsync());
@@ -201,7 +202,7 @@ public class TarReaderAsyncTests : ReaderTests
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "TarCorrupted.tar");
using Stream stream = File.OpenRead(archiveFullPath);
await using var reader = ReaderFactory.OpenAsyncReader(new AsyncOnlyStream(stream));
await using var reader = await ReaderFactory.OpenAsyncReader(new AsyncOnlyStream(stream));
var memoryStream = new MemoryStream();
Assert.True(await reader.MoveToNextEntryAsync());
@@ -220,7 +221,7 @@ public class TarReaderAsyncTests : ReaderTests
using Stream stream = File.OpenRead(
Path.Combine(TEST_ARCHIVES_PATH, "TarWithSymlink.tar.gz")
);
await using var reader = ReaderFactory.OpenAsyncReader(
await using var reader = await ReaderFactory.OpenAsyncReader(
new AsyncOnlyStream(stream),
new ReaderOptions { LookForHeader = true }
);

View File

@@ -92,7 +92,7 @@ public class WriterTests : TestBase
readerOptions.ArchiveEncoding.Default = encoding ?? Encoding.Default;
await using var reader = ReaderFactory.OpenAsyncReader(
await using var reader = await ReaderFactory.OpenAsyncReader(
new AsyncOnlyStream(SharpCompressStream.Create(stream, leaveOpen: true)),
readerOptions,
cancellationToken

View File

@@ -199,7 +199,7 @@ public class Zip64AsyncTests : WriterTests
ZipEntry? prev = null;
using (var fs = File.OpenRead(filename))
{
var rd = ReaderFactory.OpenAsyncReader(
var rd = await ReaderFactory.OpenAsyncReader(
new AsyncOnlyStream(fs),
new ReaderOptions { LookForHeader = false }
);

View File

@@ -131,7 +131,7 @@ public class ZipArchiveAsyncTests : ArchiveTests
var entry = await archive.EntriesAsync.SingleAsync(x =>
x.Key.NotNull().EndsWith("jpg", StringComparison.OrdinalIgnoreCase)
);
archive.RemoveEntry(entry);
await archive.RemoveEntryAsync(entry);
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.Deflate);
writerOptions.ArchiveEncoding.Default = Encoding.GetEncoding(866);
@@ -151,7 +151,7 @@ public class ZipArchiveAsyncTests : ArchiveTests
await using (var archive = ZipArchive.OpenAsyncArchive(unmodified))
{
archive.AddEntry("jpg\\test.jpg", jpg);
await archive.AddEntryAsync("jpg\\test.jpg", jpg);
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.Deflate);
writerOptions.ArchiveEncoding.Default = Encoding.GetEncoding(866);
@@ -231,21 +231,22 @@ public class ZipArchiveAsyncTests : ArchiveTests
var progressReports = new System.Collections.Generic.List<ProgressReport>();
var progress = new Progress<ProgressReport>(report => progressReports.Add(report));
#if NETFRAMEWORK
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.zip")))
#else
await using (
Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.zip"))
)
#endif
{
IAsyncArchive archive = ZipArchive.OpenAsyncArchive(new AsyncOnlyStream(stream));
try
{
await archive.WriteToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
progress
);
}
finally
{
await archive.DisposeAsync();
}
await using IAsyncArchive archive = ZipArchive.OpenAsyncArchive(
new AsyncOnlyStream(stream)
);
await archive.WriteToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
progress
);
}
VerifyFiles();

View File

@@ -20,7 +20,7 @@ public class ZipReaderAsyncTests : ReaderTests
{
var path = Path.Combine(TEST_ARCHIVES_PATH, "PrePostHeaders.zip");
using Stream stream = new ForwardOnlyStream(File.OpenRead(path));
await using var reader = ReaderFactory.OpenAsyncReader(new AsyncOnlyStream(stream));
await using var reader = await ReaderFactory.OpenAsyncReader(new AsyncOnlyStream(stream));
var count = 0;
while (await reader.MoveToNextEntryAsync())
{
@@ -65,7 +65,7 @@ public class ZipReaderAsyncTests : ReaderTests
using Stream stream = new ForwardOnlyStream(
File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd.zip"))
);
await using var reader = ReaderFactory.OpenAsyncReader(new AsyncOnlyStream(stream));
await using var reader = await ReaderFactory.OpenAsyncReader(new AsyncOnlyStream(stream));
var x = 0;
while (await reader.MoveToNextEntryAsync())
{
@@ -150,7 +150,7 @@ public class ZipReaderAsyncTests : ReaderTests
using var stream = new TestStream(
File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd.zip"))
);
await using (var reader = ReaderFactory.OpenAsyncReader(new AsyncOnlyStream(stream)))
await using (var reader = await ReaderFactory.OpenAsyncReader(new AsyncOnlyStream(stream)))
{
while (await reader.MoveToNextEntryAsync())
{
@@ -174,7 +174,7 @@ public class ZipReaderAsyncTests : ReaderTests
File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd.zip"))
)
);
await using var reader = ReaderFactory.OpenAsyncReader(stream);
await using var reader = await ReaderFactory.OpenAsyncReader(stream);
while (await reader.MoveToNextEntryAsync())
{
if (!reader.Entry.IsDirectory)

View File

@@ -51,17 +51,6 @@
"resolved": "1.0.0",
"contentHash": "vSN/L1uaVwKsiLa95bYu2SGkF0iY3xMblTfxc8alSziPuVfJpj3geVqHGAA75J7cZkMuKpFVikz82Lo6y6LLdA=="
},
"xunit": {
"type": "Direct",
"requested": "[2.9.3, )",
"resolved": "2.9.3",
"contentHash": "TlXQBinK35LpOPKHAqbLY4xlEen9TBafjs0V5KnA4wZsoQLQJiirCR4CbIXvOH8NzkW4YeJKP5P/Bnrodm0h9Q==",
"dependencies": {
"xunit.analyzers": "1.18.0",
"xunit.assert": "2.9.3",
"xunit.core": "[2.9.3]"
}
},
"xunit.runner.visualstudio": {
"type": "Direct",
"requested": "[3.1.5, )",
@@ -71,6 +60,23 @@
"Microsoft.TestPlatform.ObjectModel": "17.13.0"
}
},
"xunit.v3": {
"type": "Direct",
"requested": "[3.2.1, )",
"resolved": "3.2.1",
"contentHash": "oefMPnMEQv9JXlc1mmj4XnNmylLWJA6XHncTcyM3LBvbepO+rsWfmIZ2gb2tO6WU29De4RxvEFHT5xxmsrjn8Q==",
"dependencies": {
"xunit.v3.mtp-v1": "[3.2.1]"
}
},
"Microsoft.ApplicationInsights": {
"type": "Transitive",
"resolved": "2.23.0",
"contentHash": "nWArUZTdU7iqZLycLKWe0TDms48KKGE6pONH2terYNa8REXiqixrMOkf1sk5DHGMaUTqONU2YkS4SAXBhLStgw==",
"dependencies": {
"System.Diagnostics.DiagnosticSource": "5.0.0"
}
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
"resolved": "8.0.0",
@@ -91,6 +97,37 @@
"resolved": "8.0.0",
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
},
"Microsoft.Testing.Extensions.Telemetry": {
"type": "Transitive",
"resolved": "1.9.1",
"contentHash": "No5AudZMmSb+uNXjlgL2y3/stHD2IT4uxqc5yHwkE+/nNux9jbKcaJMvcp9SwgP4DVD8L9/P3OUz8mmmcvEIdQ==",
"dependencies": {
"Microsoft.ApplicationInsights": "2.23.0",
"Microsoft.Testing.Platform": "1.9.1",
"System.Diagnostics.DiagnosticSource": "6.0.0"
}
},
"Microsoft.Testing.Extensions.TrxReport.Abstractions": {
"type": "Transitive",
"resolved": "1.9.1",
"contentHash": "AL46Xe1WBi85Ntd4mNPvat5ZSsZ2uejiVqoKCypr8J3wK0elA5xJ3AN4G/Q4GIwzUFnggZoH/DBjnr9J18IO/g==",
"dependencies": {
"Microsoft.Testing.Platform": "1.9.1"
}
},
"Microsoft.Testing.Platform": {
"type": "Transitive",
"resolved": "1.9.1",
"contentHash": "QafNtNSmEI0zazdebnsIkDKmFtTSpmx/5PLOjURWwozcPb3tvRxzosQSL8xwYNM1iPhhKiBksXZyRSE2COisrA=="
},
"Microsoft.Testing.Platform.MSBuild": {
"type": "Transitive",
"resolved": "1.9.1",
"contentHash": "oTUtyR4X/s9ytuiNA29FGsNCCH0rNmY5Wdm14NCKLjTM1cT9edVSlA+rGS/mVmusPqcP0l/x9qOnMXg16v87RQ==",
"dependencies": {
"Microsoft.Testing.Platform": "1.9.1"
}
},
"Microsoft.TestPlatform.ObjectModel": {
"type": "Transitive",
"resolved": "17.13.0",
@@ -99,10 +136,32 @@
"System.Reflection.Metadata": "1.6.0"
}
},
"Microsoft.Win32.Registry": {
"type": "Transitive",
"resolved": "5.0.0",
"contentHash": "dDoKi0PnDz31yAyETfRntsLArTlVAVzUzCIvvEDsDsucrl33Dl8pIJG06ePTJTI3tGpeyHS9Cq7Foc/s4EeKcg==",
"dependencies": {
"System.Security.AccessControl": "5.0.0",
"System.Security.Principal.Windows": "5.0.0"
}
},
"System.Collections.Immutable": {
"type": "Transitive",
"resolved": "1.5.0",
"contentHash": "EXKiDFsChZW0RjrZ4FYHu9aW6+P4MCgEDCklsVseRfhoO0F+dXeMSsMRAlVXIo06kGJ/zv+2w1a2uc2+kxxSaQ=="
"resolved": "6.0.0",
"contentHash": "l4zZJ1WU2hqpQQHXz1rvC3etVZN+2DLmQMO79FhOTZHMn8tDRr+WU287sbomD0BETlmKDn0ygUgVy9k5xkkJdA==",
"dependencies": {
"System.Memory": "4.5.4",
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
}
},
"System.Diagnostics.DiagnosticSource": {
"type": "Transitive",
"resolved": "6.0.0",
"contentHash": "frQDfv0rl209cKm1lnwTgFPzNigy2EKk1BS3uAvHvlBVKe5cymGyHO+Sj+NLv5VF/AhHsqPIUUwya5oV4CHMUw==",
"dependencies": {
"System.Memory": "4.5.4",
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
}
},
"System.Numerics.Vectors": {
"type": "Transitive",
@@ -122,6 +181,19 @@
"resolved": "6.1.2",
"contentHash": "2hBr6zdbIBTDE3EhK7NSVNdX58uTK6iHW/P/Axmm9sl1xoGSLqDvMtpecn226TNwHByFokYwJmt/aQQNlO5CRw=="
},
"System.Security.AccessControl": {
"type": "Transitive",
"resolved": "5.0.0",
"contentHash": "dagJ1mHZO3Ani8GH0PHpPEe/oYO+rVdbQjvjJkBRNQkX4t0r1iaeGn8+/ybkSLEan3/slM0t59SVdHzuHf2jmw==",
"dependencies": {
"System.Security.Principal.Windows": "5.0.0"
}
},
"System.Security.Principal.Windows": {
"type": "Transitive",
"resolved": "5.0.0",
"contentHash": "t0MGLukB5WAVU9bO3MGzvlGnyJPgUlcwerXn1kzBRjwLKixT96XV0Uza41W49gVd8zEMFu9vQEFlv0IOrytICA=="
},
"System.Threading.Tasks.Extensions": {
"type": "Transitive",
"resolved": "4.6.3",
@@ -135,44 +207,75 @@
"resolved": "4.6.1",
"contentHash": "+RJT4qaekpZ7DDLhf+LTjq+E48jieKiY9ulJ+BoxKmZblIJfIJT8Ufcaa/clQqnYvWs8jugfGSMu8ylS0caG0w=="
},
"xunit.abstractions": {
"type": "Transitive",
"resolved": "2.0.3",
"contentHash": "pot1I4YOxlWjIb5jmwvvQNbTrZ3lJQ+jUGkGjWE3hEFM0l5gOnBWS+H3qsex68s5cO52g+44vpGzhAt+42vwKg=="
},
"xunit.analyzers": {
"type": "Transitive",
"resolved": "1.18.0",
"contentHash": "OtFMHN8yqIcYP9wcVIgJrq01AfTxijjAqVDy/WeQVSyrDC1RzBWeQPztL49DN2syXRah8TYnfvk035s7L95EZQ=="
"resolved": "1.26.0",
"contentHash": "YrWZOfuU1Scg4iGizAlMNALOxVS+HPSVilfscNDEJAyrTIVdF4c+8o+Aerw2RYnrJxafj/F56YkJOKCURUWQmA=="
},
"xunit.assert": {
"xunit.v3.assert": {
"type": "Transitive",
"resolved": "2.9.3",
"contentHash": "/Kq28fCE7MjOV42YLVRAJzRF0WmEqsmflm0cfpMjGtzQ2lR5mYVj1/i0Y8uDAOLczkL3/jArrwehfMD0YogMAA=="
},
"xunit.core": {
"type": "Transitive",
"resolved": "2.9.3",
"contentHash": "BiAEvqGvyme19wE0wTKdADH+NloYqikiU0mcnmiNyXaF9HyHmE6sr/3DC5vnBkgsWaE6yPyWszKSPSApWdRVeQ==",
"resolved": "3.2.1",
"contentHash": "7hGxs+sfgPCiHg7CbWL8Vsmg8WS4vBfipZ7rfE+FEyS7ksU4+0vcV08TQvLIXLPAfinT06zVoK83YjRcMXcXLw==",
"dependencies": {
"xunit.extensibility.core": "[2.9.3]",
"xunit.extensibility.execution": "[2.9.3]"
"System.Collections.Immutable": "6.0.0",
"System.Memory": "4.5.5"
}
},
"xunit.extensibility.core": {
"xunit.v3.common": {
"type": "Transitive",
"resolved": "2.9.3",
"contentHash": "kf3si0YTn2a8J8eZNb+zFpwfoyvIrQ7ivNk5ZYA5yuYk1bEtMe4DxJ2CF/qsRgmEnDr7MnW1mxylBaHTZ4qErA==",
"resolved": "3.2.1",
"contentHash": "NUh3pPTC3Py4XTnjoCCCIEzvdKTQ9apu0ikDNCrUETBtfHHXcoUmIl5bOfJLQQu7awhu8eaZHjJnG7rx9lUZpg==",
"dependencies": {
"xunit.abstractions": "2.0.3"
"Microsoft.Bcl.AsyncInterfaces": "6.0.0"
}
},
"xunit.extensibility.execution": {
"xunit.v3.core.mtp-v1": {
"type": "Transitive",
"resolved": "2.9.3",
"contentHash": "yMb6vMESlSrE3Wfj7V6cjQ3S4TXdXpRqYeNEI3zsX31uTsGMJjEw6oD5F5u1cHnMptjhEECnmZSsPxB6ChZHDQ==",
"resolved": "3.2.1",
"contentHash": "PeClKsdYS8TN7q8UxcIKgMVEf1xjqa5XWaizzt+WfLp8+85ZKT+LAQ2/ct+eYqazFzaGSJCAj96+1Z2USkWV6A==",
"dependencies": {
"xunit.extensibility.core": "[2.9.3]"
"Microsoft.Testing.Extensions.Telemetry": "1.9.1",
"Microsoft.Testing.Extensions.TrxReport.Abstractions": "1.9.1",
"Microsoft.Testing.Platform": "1.9.1",
"Microsoft.Testing.Platform.MSBuild": "1.9.1",
"xunit.v3.extensibility.core": "[3.2.1]",
"xunit.v3.runner.inproc.console": "[3.2.1]"
}
},
"xunit.v3.extensibility.core": {
"type": "Transitive",
"resolved": "3.2.1",
"contentHash": "soZuThF5CwB/ZZ2HY/ivdinyM/6MvmjsHTG0vNw3fRd1ZKcmLzfxVb3fB6R3G5yoaN4Bh+aWzFGjOvYO05OzkA==",
"dependencies": {
"xunit.v3.common": "[3.2.1]"
}
},
"xunit.v3.mtp-v1": {
"type": "Transitive",
"resolved": "3.2.1",
"contentHash": "lREcN7+kZmHqLmivhfzN+BHBYf3nQzMEojX5390qDplnXjaHYUxH49XmrWEbCx+va3ZTiIR2vVWPJWCs2UFBFQ==",
"dependencies": {
"xunit.analyzers": "1.26.0",
"xunit.v3.assert": "[3.2.1]",
"xunit.v3.core.mtp-v1": "[3.2.1]"
}
},
"xunit.v3.runner.common": {
"type": "Transitive",
"resolved": "3.2.1",
"contentHash": "oF0jwl0xH45/RWjDcaCPOeeI6HCoyiEXIT8yvByd37rhJorjL/Ri8S9A/Vql8DBPjCfQWd6Url5JRmeiQ55isA==",
"dependencies": {
"Microsoft.Win32.Registry": "[5.0.0]",
"xunit.v3.common": "[3.2.1]"
}
},
"xunit.v3.runner.inproc.console": {
"type": "Transitive",
"resolved": "3.2.1",
"contentHash": "EC/VLj1E9BPWfmzdEMQEqouxh0rWAdX6SXuiiDRf0yXXsQo3E2PNLKCyJ9V8hmkGH/nBvM7pHLFbuCf00vCynw==",
"dependencies": {
"xunit.v3.extensibility.core": "[3.2.1]",
"xunit.v3.runner.common": "[3.2.1]"
}
},
"sharpcompress": {
@@ -270,23 +373,26 @@
"resolved": "1.0.0",
"contentHash": "vSN/L1uaVwKsiLa95bYu2SGkF0iY3xMblTfxc8alSziPuVfJpj3geVqHGAA75J7cZkMuKpFVikz82Lo6y6LLdA=="
},
"xunit": {
"type": "Direct",
"requested": "[2.9.3, )",
"resolved": "2.9.3",
"contentHash": "TlXQBinK35LpOPKHAqbLY4xlEen9TBafjs0V5KnA4wZsoQLQJiirCR4CbIXvOH8NzkW4YeJKP5P/Bnrodm0h9Q==",
"dependencies": {
"xunit.analyzers": "1.18.0",
"xunit.assert": "2.9.3",
"xunit.core": "[2.9.3]"
}
},
"xunit.runner.visualstudio": {
"type": "Direct",
"requested": "[3.1.5, )",
"resolved": "3.1.5",
"contentHash": "tKi7dSTwP4m5m9eXPM2Ime4Kn7xNf4x4zT9sdLO/G4hZVnQCRiMTWoSZqI/pYTVeI27oPPqHBKYI/DjJ9GsYgA=="
},
"xunit.v3": {
"type": "Direct",
"requested": "[3.2.1, )",
"resolved": "3.2.1",
"contentHash": "oefMPnMEQv9JXlc1mmj4XnNmylLWJA6XHncTcyM3LBvbepO+rsWfmIZ2gb2tO6WU29De4RxvEFHT5xxmsrjn8Q==",
"dependencies": {
"xunit.v3.mtp-v1": "[3.2.1]"
}
},
"Microsoft.ApplicationInsights": {
"type": "Transitive",
"resolved": "2.23.0",
"contentHash": "nWArUZTdU7iqZLycLKWe0TDms48KKGE6pONH2terYNa8REXiqixrMOkf1sk5DHGMaUTqONU2YkS4SAXBhLStgw=="
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
"resolved": "8.0.0",
@@ -307,6 +413,36 @@
"resolved": "8.0.0",
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
},
"Microsoft.Testing.Extensions.Telemetry": {
"type": "Transitive",
"resolved": "1.9.1",
"contentHash": "No5AudZMmSb+uNXjlgL2y3/stHD2IT4uxqc5yHwkE+/nNux9jbKcaJMvcp9SwgP4DVD8L9/P3OUz8mmmcvEIdQ==",
"dependencies": {
"Microsoft.ApplicationInsights": "2.23.0",
"Microsoft.Testing.Platform": "1.9.1"
}
},
"Microsoft.Testing.Extensions.TrxReport.Abstractions": {
"type": "Transitive",
"resolved": "1.9.1",
"contentHash": "AL46Xe1WBi85Ntd4mNPvat5ZSsZ2uejiVqoKCypr8J3wK0elA5xJ3AN4G/Q4GIwzUFnggZoH/DBjnr9J18IO/g==",
"dependencies": {
"Microsoft.Testing.Platform": "1.9.1"
}
},
"Microsoft.Testing.Platform": {
"type": "Transitive",
"resolved": "1.9.1",
"contentHash": "QafNtNSmEI0zazdebnsIkDKmFtTSpmx/5PLOjURWwozcPb3tvRxzosQSL8xwYNM1iPhhKiBksXZyRSE2COisrA=="
},
"Microsoft.Testing.Platform.MSBuild": {
"type": "Transitive",
"resolved": "1.9.1",
"contentHash": "oTUtyR4X/s9ytuiNA29FGsNCCH0rNmY5Wdm14NCKLjTM1cT9edVSlA+rGS/mVmusPqcP0l/x9qOnMXg16v87RQ==",
"dependencies": {
"Microsoft.Testing.Platform": "1.9.1"
}
},
"Microsoft.TestPlatform.ObjectModel": {
"type": "Transitive",
"resolved": "18.0.1",
@@ -321,53 +457,91 @@
"Newtonsoft.Json": "13.0.3"
}
},
"Microsoft.Win32.Registry": {
"type": "Transitive",
"resolved": "5.0.0",
"contentHash": "dDoKi0PnDz31yAyETfRntsLArTlVAVzUzCIvvEDsDsucrl33Dl8pIJG06ePTJTI3tGpeyHS9Cq7Foc/s4EeKcg=="
},
"Newtonsoft.Json": {
"type": "Transitive",
"resolved": "13.0.3",
"contentHash": "HrC5BXdl00IP9zeV+0Z848QWPAoCr9P3bDEZguI+gkLcBKAOxix/tLEAAHC+UvDNPv4a2d18lOReHMOagPa+zQ=="
},
"xunit.abstractions": {
"type": "Transitive",
"resolved": "2.0.3",
"contentHash": "pot1I4YOxlWjIb5jmwvvQNbTrZ3lJQ+jUGkGjWE3hEFM0l5gOnBWS+H3qsex68s5cO52g+44vpGzhAt+42vwKg=="
},
"xunit.analyzers": {
"type": "Transitive",
"resolved": "1.18.0",
"contentHash": "OtFMHN8yqIcYP9wcVIgJrq01AfTxijjAqVDy/WeQVSyrDC1RzBWeQPztL49DN2syXRah8TYnfvk035s7L95EZQ=="
"resolved": "1.26.0",
"contentHash": "YrWZOfuU1Scg4iGizAlMNALOxVS+HPSVilfscNDEJAyrTIVdF4c+8o+Aerw2RYnrJxafj/F56YkJOKCURUWQmA=="
},
"xunit.assert": {
"xunit.v3.assert": {
"type": "Transitive",
"resolved": "2.9.3",
"contentHash": "/Kq28fCE7MjOV42YLVRAJzRF0WmEqsmflm0cfpMjGtzQ2lR5mYVj1/i0Y8uDAOLczkL3/jArrwehfMD0YogMAA=="
"resolved": "3.2.1",
"contentHash": "7hGxs+sfgPCiHg7CbWL8Vsmg8WS4vBfipZ7rfE+FEyS7ksU4+0vcV08TQvLIXLPAfinT06zVoK83YjRcMXcXLw=="
},
"xunit.core": {
"xunit.v3.common": {
"type": "Transitive",
"resolved": "2.9.3",
"contentHash": "BiAEvqGvyme19wE0wTKdADH+NloYqikiU0mcnmiNyXaF9HyHmE6sr/3DC5vnBkgsWaE6yPyWszKSPSApWdRVeQ==",
"resolved": "3.2.1",
"contentHash": "NUh3pPTC3Py4XTnjoCCCIEzvdKTQ9apu0ikDNCrUETBtfHHXcoUmIl5bOfJLQQu7awhu8eaZHjJnG7rx9lUZpg==",
"dependencies": {
"xunit.extensibility.core": "[2.9.3]",
"xunit.extensibility.execution": "[2.9.3]"
"Microsoft.Bcl.AsyncInterfaces": "6.0.0"
}
},
"xunit.extensibility.core": {
"xunit.v3.core.mtp-v1": {
"type": "Transitive",
"resolved": "2.9.3",
"contentHash": "kf3si0YTn2a8J8eZNb+zFpwfoyvIrQ7ivNk5ZYA5yuYk1bEtMe4DxJ2CF/qsRgmEnDr7MnW1mxylBaHTZ4qErA==",
"resolved": "3.2.1",
"contentHash": "PeClKsdYS8TN7q8UxcIKgMVEf1xjqa5XWaizzt+WfLp8+85ZKT+LAQ2/ct+eYqazFzaGSJCAj96+1Z2USkWV6A==",
"dependencies": {
"xunit.abstractions": "2.0.3"
"Microsoft.Testing.Extensions.Telemetry": "1.9.1",
"Microsoft.Testing.Extensions.TrxReport.Abstractions": "1.9.1",
"Microsoft.Testing.Platform": "1.9.1",
"Microsoft.Testing.Platform.MSBuild": "1.9.1",
"xunit.v3.extensibility.core": "[3.2.1]",
"xunit.v3.runner.inproc.console": "[3.2.1]"
}
},
"xunit.extensibility.execution": {
"xunit.v3.extensibility.core": {
"type": "Transitive",
"resolved": "2.9.3",
"contentHash": "yMb6vMESlSrE3Wfj7V6cjQ3S4TXdXpRqYeNEI3zsX31uTsGMJjEw6oD5F5u1cHnMptjhEECnmZSsPxB6ChZHDQ==",
"resolved": "3.2.1",
"contentHash": "soZuThF5CwB/ZZ2HY/ivdinyM/6MvmjsHTG0vNw3fRd1ZKcmLzfxVb3fB6R3G5yoaN4Bh+aWzFGjOvYO05OzkA==",
"dependencies": {
"xunit.extensibility.core": "[2.9.3]"
"xunit.v3.common": "[3.2.1]"
}
},
"xunit.v3.mtp-v1": {
"type": "Transitive",
"resolved": "3.2.1",
"contentHash": "lREcN7+kZmHqLmivhfzN+BHBYf3nQzMEojX5390qDplnXjaHYUxH49XmrWEbCx+va3ZTiIR2vVWPJWCs2UFBFQ==",
"dependencies": {
"xunit.analyzers": "1.26.0",
"xunit.v3.assert": "[3.2.1]",
"xunit.v3.core.mtp-v1": "[3.2.1]"
}
},
"xunit.v3.runner.common": {
"type": "Transitive",
"resolved": "3.2.1",
"contentHash": "oF0jwl0xH45/RWjDcaCPOeeI6HCoyiEXIT8yvByd37rhJorjL/Ri8S9A/Vql8DBPjCfQWd6Url5JRmeiQ55isA==",
"dependencies": {
"Microsoft.Win32.Registry": "[5.0.0]",
"xunit.v3.common": "[3.2.1]"
}
},
"xunit.v3.runner.inproc.console": {
"type": "Transitive",
"resolved": "3.2.1",
"contentHash": "EC/VLj1E9BPWfmzdEMQEqouxh0rWAdX6SXuiiDRf0yXXsQo3E2PNLKCyJ9V8hmkGH/nBvM7pHLFbuCf00vCynw==",
"dependencies": {
"xunit.v3.extensibility.core": "[3.2.1]",
"xunit.v3.runner.common": "[3.2.1]"
}
},
"sharpcompress": {
"type": "Project"
},
"Microsoft.Bcl.AsyncInterfaces": {
"type": "CentralTransitive",
"requested": "[10.0.0, )",
"resolved": "10.0.0",
"contentHash": "vFuwSLj9QJBbNR0NeNO4YVASUbokxs+i/xbuu8B+Fs4FAZg5QaFa6eGrMaRqTzzNI5tAb97T7BhSxtLckFyiRA=="
}
}
}

View File

@@ -0,0 +1,3 @@
{
"$schema": "https://xunit.net/schema/current/xunit.runner.schema.json"
}