Compare commits

..

1 Commits

Author SHA1 Message Date
Adam Hathcock
d915fbf6d6 first pass of Open Async 2025-11-30 11:53:57 +00:00
37 changed files with 2359 additions and 809 deletions

1
.gitignore vendored
View File

@@ -15,6 +15,7 @@ tests/TestArchives/*/Scratch
tests/TestArchives/*/Scratch2
.vs
tools
.vscode
.idea/
.DS_Store

View File

@@ -1,9 +0,0 @@
{
"recommendations": [
"ms-dotnettools.csdevkit",
"ms-dotnettools.csharp",
"ms-dotnettools.vscode-dotnet-runtime",
"csharpier.csharpier-vscode",
"formulahendry.dotnet-test-explorer"
]
}

97
.vscode/launch.json vendored
View File

@@ -1,97 +0,0 @@
{
"version": "0.2.0",
"configurations": [
{
"name": "Debug Tests (net10.0)",
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build",
"program": "dotnet",
"args": [
"test",
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
"-f",
"net10.0",
"--no-build",
"--verbosity=normal"
],
"cwd": "${workspaceFolder}",
"console": "internalConsole",
"stopAtEntry": false
},
{
"name": "Debug Specific Test (net10.0)",
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build",
"program": "dotnet",
"args": [
"test",
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
"-f",
"net10.0",
"--no-build",
"--filter",
"FullyQualifiedName~${input:testName}"
],
"cwd": "${workspaceFolder}",
"console": "internalConsole",
"stopAtEntry": false
},
{
"name": "Debug Performance Tests",
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build",
"program": "dotnet",
"args": [
"run",
"--project",
"${workspaceFolder}/tests/SharpCompress.Performance/SharpCompress.Performance.csproj",
"--no-build"
],
"cwd": "${workspaceFolder}",
"console": "internalConsole",
"stopAtEntry": false
},
{
"name": "Debug Build Script",
"type": "coreclr",
"request": "launch",
"program": "dotnet",
"args": [
"run",
"--project",
"${workspaceFolder}/build/build.csproj",
"--",
"${input:buildTarget}"
],
"cwd": "${workspaceFolder}",
"console": "internalConsole",
"stopAtEntry": false
}
],
"inputs": [
{
"id": "testName",
"type": "promptString",
"description": "Enter test name or pattern (e.g., TestMethodName or ClassName)",
"default": ""
},
{
"id": "buildTarget",
"type": "pickString",
"description": "Select build target",
"options": [
"clean",
"restore",
"build",
"test",
"format",
"publish",
"default"
],
"default": "build"
}
]
}

29
.vscode/settings.json vendored
View File

@@ -1,29 +0,0 @@
{
"dotnet.defaultSolution": "SharpCompress.sln",
"files.exclude": {
"**/bin": true,
"**/obj": true
},
"files.watcherExclude": {
"**/bin/**": true,
"**/obj/**": true,
"**/artifacts/**": true
},
"search.exclude": {
"**/bin": true,
"**/obj": true,
"**/artifacts": true
},
"editor.formatOnSave": false,
"[csharp]": {
"editor.defaultFormatter": "csharpier.csharpier-vscode",
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.fixAll": "explicit"
}
},
"csharpier.enableDebugLogs": false,
"omnisharp.enableRoslynAnalyzers": true,
"omnisharp.enableEditorConfigSupport": true,
"dotnet-test-explorer.testProjectPath": "tests/**/*.csproj"
}

178
.vscode/tasks.json vendored
View File

@@ -1,178 +0,0 @@
{
"version": "2.0.0",
"tasks": [
{
"label": "build",
"command": "dotnet",
"type": "process",
"args": [
"build",
"${workspaceFolder}/SharpCompress.sln",
"/property:GenerateFullPaths=true",
"/consoleloggerparameters:NoSummary;ForceNoAlign"
],
"problemMatcher": "$msCompile",
"group": {
"kind": "build",
"isDefault": true
}
},
{
"label": "build-release",
"command": "dotnet",
"type": "process",
"args": [
"build",
"${workspaceFolder}/SharpCompress.sln",
"-c",
"Release",
"/property:GenerateFullPaths=true",
"/consoleloggerparameters:NoSummary;ForceNoAlign"
],
"problemMatcher": "$msCompile",
"group": "build"
},
{
"label": "build-library",
"command": "dotnet",
"type": "process",
"args": [
"build",
"${workspaceFolder}/src/SharpCompress/SharpCompress.csproj",
"/property:GenerateFullPaths=true",
"/consoleloggerparameters:NoSummary;ForceNoAlign"
],
"problemMatcher": "$msCompile",
"group": "build"
},
{
"label": "restore",
"command": "dotnet",
"type": "process",
"args": [
"restore",
"${workspaceFolder}/SharpCompress.sln"
],
"problemMatcher": "$msCompile"
},
{
"label": "clean",
"command": "dotnet",
"type": "process",
"args": [
"clean",
"${workspaceFolder}/SharpCompress.sln"
],
"problemMatcher": "$msCompile"
},
{
"label": "test",
"command": "dotnet",
"type": "process",
"args": [
"test",
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
"--no-build",
"--verbosity=normal"
],
"problemMatcher": "$msCompile",
"group": {
"kind": "test",
"isDefault": true
},
"dependsOn": "build"
},
{
"label": "test-net10",
"command": "dotnet",
"type": "process",
"args": [
"test",
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
"-f",
"net10.0",
"--no-build",
"--verbosity=normal"
],
"problemMatcher": "$msCompile",
"group": "test",
"dependsOn": "build"
},
{
"label": "test-net48",
"command": "dotnet",
"type": "process",
"args": [
"test",
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
"-f",
"net48",
"--no-build",
"--verbosity=normal"
],
"problemMatcher": "$msCompile",
"group": "test",
"dependsOn": "build"
},
{
"label": "format",
"command": "dotnet",
"type": "process",
"args": [
"csharpier",
"."
],
"problemMatcher": []
},
{
"label": "format-check",
"command": "dotnet",
"type": "process",
"args": [
"csharpier",
"check",
"."
],
"problemMatcher": []
},
{
"label": "run-build-script",
"command": "dotnet",
"type": "process",
"args": [
"run",
"--project",
"${workspaceFolder}/build/build.csproj"
],
"problemMatcher": "$msCompile"
},
{
"label": "pack",
"command": "dotnet",
"type": "process",
"args": [
"pack",
"${workspaceFolder}/src/SharpCompress/SharpCompress.csproj",
"-c",
"Release",
"-o",
"${workspaceFolder}/artifacts/"
],
"problemMatcher": "$msCompile",
"dependsOn": "build-release"
},
{
"label": "performance-tests",
"command": "dotnet",
"type": "process",
"args": [
"run",
"--project",
"${workspaceFolder}/tests/SharpCompress.Performance/SharpCompress.Performance.csproj",
"-c",
"Release"
],
"problemMatcher": "$msCompile"
}
]
}

View File

@@ -49,30 +49,6 @@ SharpCompress is a pure C# compression library supporting multiple archive forma
- Use `dotnet test` to run tests
- Solution file: `SharpCompress.sln`
### Directory Structure
```
src/SharpCompress/
├── Archives/ # IArchive implementations (Zip, Tar, Rar, 7Zip, GZip)
├── Readers/ # IReader implementations (forward-only)
├── Writers/ # IWriter implementations (forward-only)
├── Compressors/ # Low-level compression streams (BZip2, Deflate, LZMA, etc.)
├── Factories/ # Format detection and factory pattern
├── Common/ # Shared types (ArchiveType, Entry, Options)
├── Crypto/ # Encryption implementations
└── IO/ # Stream utilities and wrappers
tests/SharpCompress.Test/
├── Zip/, Tar/, Rar/, SevenZip/, GZip/, BZip2/ # Format-specific tests
├── TestBase.cs # Base test class with helper methods
└── TestArchives/ # Test data (not checked into main test project)
```
### Factory Pattern
All format types implement factory interfaces (`IArchiveFactory`, `IReaderFactory`, `IWriterFactory`) for auto-detection:
- `ReaderFactory.Open()` - Auto-detects format by probing stream
- `WriterFactory.Open()` - Creates writer for specified `ArchiveType`
- Factories located in: `src/SharpCompress/Factories/`
## Nullable Reference Types
- Declare variables non-nullable, and check for `null` at entry points.
@@ -140,18 +116,3 @@ SharpCompress supports multiple archive and compression formats:
- Use test archives from `tests/TestArchives` directory for consistency.
- Test stream disposal and `LeaveStreamOpen` behavior.
- Test edge cases: empty archives, large files, corrupted archives, encrypted archives.
### Test Organization
- Base class: `TestBase` - Provides `TEST_ARCHIVES_PATH`, `SCRATCH_FILES_PATH`, temp directory management
- Framework: xUnit with AwesomeAssertions
- Test archives: `tests/TestArchives/` - Use existing archives, don't create new ones unnecessarily
- Match naming style of nearby test files
## Common Pitfalls
1. **Don't mix Archive and Reader APIs** - Archive needs seekable stream, Reader doesn't
2. **Solid archives (Rar, 7Zip)** - Use `ExtractAllEntries()` for best performance, not individual entry extraction
3. **Stream disposal** - Always set `LeaveStreamOpen` explicitly when needed (default is to close)
4. **Tar + non-seekable stream** - Must provide file size or it will throw
5. **Multi-framework differences** - Some features differ between .NET Framework and modern .NET (e.g., Mono.Posix)
6. **Format detection** - Use `ReaderFactory.Open()` for auto-detection, test with actual archive files

View File

@@ -1,7 +1,10 @@
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -12,10 +15,8 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
where TEntry : IArchiveEntry
where TVolume : IVolume
{
private readonly LazyReadOnlyCollection<TVolume> _lazyVolumes;
private readonly LazyReadOnlyCollection<TEntry> _lazyEntries;
private bool _disposed;
private readonly SourceStream? _sourceStream;
protected SourceStream? SourceStream { get; internal set; }
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionBegin;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionEnd;
@@ -24,21 +25,24 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
public event EventHandler<FilePartExtractionBeginEventArgs>? FilePartExtractionBegin;
protected ReaderOptions ReaderOptions { get; }
protected Lazy<IReadOnlyCollection<TVolume>> LazyVolumes { get; internal set; }
protected Lazy<IReadOnlyCollection<TEntry>> LazyEntries { get; internal set; }
internal AbstractArchive(ArchiveType type, SourceStream sourceStream)
{
Type = type;
ReaderOptions = sourceStream.ReaderOptions;
_sourceStream = sourceStream;
_lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(_sourceStream));
_lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
SourceStream = sourceStream;
LazyVolumes = new Lazy<IReadOnlyCollection<TVolume>>(() => LoadVolumes(SourceStream).ToList());
LazyEntries = new Lazy<IReadOnlyCollection<TEntry>>(() => LoadEntries(Volumes).ToList());
}
internal AbstractArchive(ArchiveType type)
{
Type = type;
ReaderOptions = new();
_lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
_lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
LazyVolumes = new Lazy<IReadOnlyCollection<TVolume>>(() => Enumerable.Empty<TVolume>().ToList());
LazyEntries = new Lazy<IReadOnlyCollection<TEntry>>(() => Enumerable.Empty<TEntry>().ToList());
}
public ArchiveType Type { get; }
@@ -61,12 +65,12 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveEntries across the one or many parts of the RarArchive.
/// </summary>
public virtual ICollection<TEntry> Entries => _lazyEntries;
public virtual ICollection<TEntry> Entries => LazyEntries.Value;
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveVolumes across the one or many parts of the RarArchive.
/// </summary>
public ICollection<TVolume> Volumes => _lazyVolumes;
public ICollection<TVolume> Volumes => LazyVolumes.Value;
/// <summary>
/// The total size of the files compressed in the archive.
@@ -82,18 +86,26 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
protected abstract IEnumerable<TVolume> LoadVolumes(SourceStream sourceStream);
protected abstract IEnumerable<TEntry> LoadEntries(IEnumerable<TVolume> volumes);
protected virtual Task<IReadOnlyCollection<TEntry>> LoadEntriesAsync(IEnumerable<TVolume> volumes, CancellationToken cancellationToken)
{
return Task.FromResult<IReadOnlyCollection<TEntry>>(LoadEntries(volumes).ToList());
}
protected virtual Task<IReadOnlyCollection<TVolume>> LoadVolumesAsync(SourceStream sourceStream, CancellationToken cancellationToken)
{
return Task.FromResult<IReadOnlyCollection<TVolume>>(LoadVolumes(sourceStream).ToList());
}
IEnumerable<IArchiveEntry> IArchive.Entries => Entries.Cast<IArchiveEntry>();
IEnumerable<IVolume> IArchive.Volumes => _lazyVolumes.Cast<IVolume>();
IEnumerable<IVolume> IArchive.Volumes => LazyVolumes.Value.Cast<IVolume>();
public virtual void Dispose()
{
if (!_disposed)
{
_lazyVolumes.ForEach(v => v.Dispose());
_lazyEntries.GetLoaded().Cast<Entry>().ForEach(x => x.Close());
_sourceStream?.Dispose();
LazyVolumes.Value.ForEach(v => v.Dispose());
LazyEntries.Value.Cast<Entry>().ForEach(x => x.Close());
SourceStream?.Dispose();
_disposed = true;
}
@@ -101,8 +113,8 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
void IArchiveExtractionListener.EnsureEntriesLoaded()
{
_lazyEntries.EnsureFullyLoaded();
_lazyVolumes.EnsureFullyLoaded();
LazyEntries.Value.EnsureFullyLoaded();
LazyVolumes.Value.EnsureFullyLoaded();
}
void IExtractionListener.FireCompressedBytesRead(
@@ -161,11 +173,6 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
/// </summary>
public virtual bool IsSolid => false;
/// <summary>
/// Archive is ENCRYPTED (this means the Archive has password-protected files).
/// </summary>
public virtual bool IsEncrypted => false;
/// <summary>
/// The archive can find all the parts of the archive needed to fully extract the archive. This forces the parsing of the entire archive.
/// </summary>

View File

@@ -2,11 +2,9 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.BZip2;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Factories;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -28,6 +26,25 @@ public static class ArchiveFactory
return FindFactory<IArchiveFactory>(stream).Open(stream, readerOptions);
}
/// <summary>
/// Opens an Archive for random access
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
/// <returns></returns>
public static async Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
readerOptions ??= new ReaderOptions();
stream = SharpCompressStream.Create(stream, bufferSize: readerOptions.BufferSize);
var factory = await FindFactoryAsync<IArchiveFactory>(stream, cancellationToken);
return await factory.OpenAsync(stream, readerOptions, cancellationToken);
}
public static IWritableArchive Create(ArchiveType type)
{
var factory = Factory
@@ -53,6 +70,22 @@ public static class ArchiveFactory
return Open(new FileInfo(filePath), options);
}
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static Task<IArchive> OpenAsync(
string filePath,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
filePath.NotNullOrEmpty(nameof(filePath));
return OpenAsync(new FileInfo(filePath), options, cancellationToken);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
@@ -65,6 +98,24 @@ public static class ArchiveFactory
return FindFactory<IArchiveFactory>(fileInfo).Open(fileInfo, options);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
options ??= new ReaderOptions { LeaveStreamOpen = false };
var factory = await FindFactoryAsync<IArchiveFactory>(fileInfo, cancellationToken);
return await factory.OpenAsync(fileInfo, options, cancellationToken);
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
@@ -91,6 +142,38 @@ public static class ArchiveFactory
return FindFactory<IMultiArchiveFactory>(fileInfo).Open(filesArray, options);
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
fileInfos.NotNull(nameof(fileInfos));
var filesArray = fileInfos.ToArray();
if (filesArray.Length == 0)
{
throw new InvalidOperationException("No files to open");
}
var fileInfo = filesArray[0];
if (filesArray.Length == 1)
{
return await OpenAsync(fileInfo, options, cancellationToken);
}
fileInfo.NotNull(nameof(fileInfo));
options ??= new ReaderOptions { LeaveStreamOpen = false };
var factory = await FindFactoryAsync<IMultiArchiveFactory>(fileInfo, cancellationToken);
return await factory.OpenAsync(filesArray, options, cancellationToken);
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
@@ -117,6 +200,38 @@ public static class ArchiveFactory
return FindFactory<IMultiArchiveFactory>(firstStream).Open(streamsArray, options);
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
IEnumerable<Stream> streams,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
streams.NotNull(nameof(streams));
var streamsArray = streams.ToArray();
if (streamsArray.Length == 0)
{
throw new InvalidOperationException("No streams");
}
var firstStream = streamsArray[0];
if (streamsArray.Length == 1)
{
return await OpenAsync(firstStream, options, cancellationToken);
}
firstStream.NotNull(nameof(firstStream));
options ??= new ReaderOptions();
var factory = await FindFactoryAsync<IMultiArchiveFactory>(firstStream, cancellationToken);
return await factory.OpenAsync(streamsArray, options, cancellationToken);
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
@@ -130,15 +245,40 @@ public static class ArchiveFactory
archive.WriteToDirectory(destinationDirectory, options);
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static async Task WriteToDirectoryAsync(
string sourceArchive,
string destinationDirectory,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default
)
{
using var archive = await OpenAsync(sourceArchive, cancellationToken: cancellationToken);
await archive.WriteToDirectoryAsync(destinationDirectory, options, cancellationToken);
}
private static T FindFactory<T>(FileInfo finfo)
where T : IFactory
{
finfo.NotNull(nameof(finfo));
using Stream stream = finfo.OpenRead();
return FindFactory<T>(stream, finfo.Name);
return FindFactory<T>(stream);
}
private static T FindFactory<T>(Stream stream, string? fileName = null)
private static async Task<T> FindFactoryAsync<T>(
FileInfo finfo,
CancellationToken cancellationToken
)
where T : IFactory
{
finfo.NotNull(nameof(finfo));
using Stream stream = finfo.OpenRead();
return await FindFactoryAsync<T>(stream, cancellationToken);
}
private static T FindFactory<T>(Stream stream)
where T : IFactory
{
stream.NotNull(nameof(stream));
@@ -163,14 +303,36 @@ public static class ArchiveFactory
}
}
stream.Seek(startPosition, SeekOrigin.Begin);
var extensions = string.Join(", ", factories.Select(item => item.Name));
// Check if this is a compressed tar file (tar.bz2, tar.lz, etc.)
// These formats are supported by ReaderFactory but not by ArchiveFactory
var compressedTarMessage = TryGetCompressedTarMessage(stream, fileName);
if (compressedTarMessage != null)
throw new InvalidOperationException(
$"Cannot determine compressed stream type. Supported Archive Formats: {extensions}"
);
}
private static async Task<T> FindFactoryAsync<T>(Stream stream, CancellationToken cancellationToken)
where T : IFactory
{
stream.NotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
{
throw new InvalidOperationException(compressedTarMessage);
throw new ArgumentException("Stream should be readable and seekable");
}
var factories = Factory.Factories.OfType<T>();
var startPosition = stream.Position;
foreach (var factory in factories)
{
stream.Seek(startPosition, SeekOrigin.Begin);
if (await factory.IsArchiveAsync(stream, cancellationToken).ConfigureAwait(false))
{
stream.Seek(startPosition, SeekOrigin.Begin);
return factory;
}
}
var extensions = string.Join(", ", factories.Select(item => item.Name));
@@ -191,6 +353,17 @@ public static class ArchiveFactory
return IsArchive(s, out type, bufferSize);
}
public static async Task<(bool, ArchiveType?)> IsArchiveAsync(
string filePath,
CancellationToken cancellationToken = default,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
filePath.NotNullOrEmpty(nameof(filePath));
using Stream s = File.OpenRead(filePath);
return await IsArchiveAsync(s, cancellationToken, bufferSize);
}
public static bool IsArchive(
Stream stream,
out ArchiveType? type,
@@ -222,6 +395,36 @@ public static class ArchiveFactory
return false;
}
public static async Task<(bool, ArchiveType?)> IsArchiveAsync(
Stream stream,
CancellationToken cancellationToken = default,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
stream.NotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
{
throw new ArgumentException("Stream should be readable and seekable");
}
var startPosition = stream.Position;
foreach (var factory in Factory.Factories)
{
var isArchive = await factory.IsArchiveAsync(stream, cancellationToken)
.ConfigureAwait(false);
stream.Position = startPosition;
if (isArchive)
{
return (true, factory.KnownArchiveType);
}
}
return (false, null);
}
/// <summary>
/// From a passed in archive (zip, rar, 7z, 001), return all parts.
/// </summary>
@@ -262,111 +465,4 @@ public static class ArchiveFactory
}
public static IArchiveFactory AutoFactory { get; } = new AutoArchiveFactory();
/// <summary>
/// Checks if the stream is a compressed tar file (tar.bz2, tar.lz, etc.) that should use ReaderFactory instead.
/// Returns an error message if detected, null otherwise.
/// </summary>
private static string? TryGetCompressedTarMessage(Stream stream, string? fileName)
{
var startPosition = stream.Position;
try
{
// Check if it's a BZip2 file
if (BZip2Stream.IsBZip2(stream))
{
stream.Seek(startPosition, SeekOrigin.Begin);
// Try to decompress and check if it contains a tar archive
using var decompressed = new BZip2Stream(stream, CompressionMode.Decompress, true);
if (IsTarStream(decompressed))
{
return "This appears to be a tar.bz2 archive. The Archive API requires seekable streams, but decompression streams are not seekable. "
+ "Please use ReaderFactory.Open() instead for forward-only extraction, "
+ "or decompress the file first and then open the resulting tar file with ArchiveFactory.Open().";
}
return null;
}
stream.Seek(startPosition, SeekOrigin.Begin);
// Check if it's an LZip file
if (LZipStream.IsLZipFile(stream))
{
stream.Seek(startPosition, SeekOrigin.Begin);
// Try to decompress and check if it contains a tar archive
using var decompressed = new LZipStream(stream, CompressionMode.Decompress);
if (IsTarStream(decompressed))
{
return "This appears to be a tar.lz archive. The Archive API requires seekable streams, but decompression streams are not seekable. "
+ "Please use ReaderFactory.Open() instead for forward-only extraction, "
+ "or decompress the file first and then open the resulting tar file with ArchiveFactory.Open().";
}
return null;
}
// Check file extension as a fallback for other compressed tar formats
if (fileName != null)
{
var lowerFileName = fileName.ToLowerInvariant();
if (
lowerFileName.EndsWith(".tar.bz2")
|| lowerFileName.EndsWith(".tbz")
|| lowerFileName.EndsWith(".tbz2")
|| lowerFileName.EndsWith(".tb2")
|| lowerFileName.EndsWith(".tz2")
|| lowerFileName.EndsWith(".tar.lz")
|| lowerFileName.EndsWith(".tar.xz")
|| lowerFileName.EndsWith(".txz")
|| lowerFileName.EndsWith(".tar.zst")
|| lowerFileName.EndsWith(".tar.zstd")
|| lowerFileName.EndsWith(".tzst")
|| lowerFileName.EndsWith(".tzstd")
|| lowerFileName.EndsWith(".tar.z")
|| lowerFileName.EndsWith(".tz")
|| lowerFileName.EndsWith(".taz")
)
{
return $"The file '{fileName}' appears to be a compressed tar archive. The Archive API requires seekable streams, but decompression streams are not seekable. "
+ "Please use ReaderFactory.Open() instead for forward-only extraction, "
+ "or decompress the file first and then open the resulting tar file with ArchiveFactory.Open().";
}
}
return null;
}
catch
{
// If we can't determine, just return null and let the normal error handling proceed
return null;
}
finally
{
try
{
stream.Seek(startPosition, SeekOrigin.Begin);
}
catch
{
// Ignore seek failures
}
}
}
/// <summary>
/// Checks if a stream contains a tar archive by trying to read a tar header.
/// </summary>
private static bool IsTarStream(Stream stream)
{
try
{
var tarHeader = new TarHeader(new ArchiveEncoding());
return tarHeader.Read(new BinaryReader(stream));
}
catch
{
return false;
}
}
}

View File

@@ -2,11 +2,13 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.GZip;
using SharpCompress.IO;
using SharpCompress.Polyfills;
using SharpCompress.Readers;
using SharpCompress.Readers.GZip;
using SharpCompress.Writers;
@@ -27,6 +29,22 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<GZipArchive> OpenAsync(
string filePath,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
filePath.NotNullOrEmpty(nameof(filePath));
return await OpenAsync(new FileInfo(filePath), readerOptions ?? new ReaderOptions(), cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
@@ -44,6 +62,32 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<GZipArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
fileInfo.NotNull(nameof(fileInfo));
var archive = new GZipArchive();
archive.SourceStream = new SourceStream(
fileInfo,
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
);
archive.LazyVolumes = new Lazy<IReadOnlyCollection<GZipVolume>>(() => archive.LoadVolumes(archive.SourceStream).ToList());
archive.LazyEntries = new Lazy<IReadOnlyCollection<GZipArchiveEntry>>(
() => archive.LoadEntriesAsync(archive.Volumes, cancellationToken).GetAwaiter().GetResult().ToList()
);
return archive;
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
@@ -65,6 +109,33 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
);
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<GZipArchive> OpenAsync(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
var archive = new GZipArchive();
archive.SourceStream = new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
);
archive.LazyVolumes = new Lazy<IReadOnlyCollection<GZipVolume>>(() => archive.LoadVolumes(archive.SourceStream).ToList());
archive.LazyEntries = new Lazy<IReadOnlyCollection<GZipArchiveEntry>>(
() => archive.LoadEntriesAsync(archive.Volumes, cancellationToken).GetAwaiter().GetResult().ToList()
);
return archive;
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
@@ -83,6 +154,33 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
);
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<GZipArchive> OpenAsync(
IEnumerable<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
var archive = new GZipArchive();
archive.SourceStream = new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
);
archive.LazyVolumes = new Lazy<IReadOnlyCollection<GZipVolume>>(() => archive.LoadVolumes(archive.SourceStream).ToList());
archive.LazyEntries = new Lazy<IReadOnlyCollection<GZipArchiveEntry>>(
() => archive.LoadEntriesAsync(archive.Volumes, cancellationToken).GetAwaiter().GetResult().ToList()
);
return archive;
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
@@ -102,21 +200,62 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
);
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<GZipArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
var archive = new GZipArchive();
archive.SourceStream = new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions());
archive.LazyVolumes = new Lazy<IReadOnlyCollection<GZipVolume>>(() => archive.LoadVolumes(archive.SourceStream).ToList());
archive.LazyEntries = new Lazy<IReadOnlyCollection<GZipArchiveEntry>>(
() => archive.LoadEntriesAsync(archive.Volumes, cancellationToken).GetAwaiter().GetResult().ToList()
);
return archive;
}
public static GZipArchive Create() => new();
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
* Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="sourceStream"></param>
private GZipArchive(SourceStream sourceStream)
: base(ArchiveType.GZip, sourceStream) { }
private GZipArchive()
: base(ArchiveType.GZip)
{
LazyVolumes = new Lazy<IReadOnlyCollection<GZipVolume>>(() => LoadVolumes(SourceStream!).ToList());
LazyEntries = new Lazy<IReadOnlyCollection<GZipArchiveEntry>>(() => LoadEntries(Volumes).ToList());
}
protected override IEnumerable<GZipVolume> LoadVolumes(SourceStream sourceStream)
{
sourceStream.LoadAllParts();
return sourceStream.Streams.Select(a => new GZipVolume(a, ReaderOptions, 0));
}
protected override async Task<IReadOnlyCollection<GZipVolume>> LoadVolumesAsync(SourceStream sourceStream, CancellationToken cancellationToken)
{
await sourceStream.LoadAllPartsAsync(cancellationToken).ConfigureAwait(false);
return sourceStream.Streams.Select(a => new GZipVolume(a, ReaderOptions, 0)).ToList();
}
public static bool IsGZipFile(string filePath) => IsGZipFile(new FileInfo(filePath));
public static bool IsGZipFile(FileInfo fileInfo)
@@ -130,6 +269,25 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
return IsGZipFile(stream);
}
public static Task<bool> IsGZipFileAsync(
string filePath,
CancellationToken cancellationToken = default
) => IsGZipFileAsync(new FileInfo(filePath), cancellationToken);
public static async Task<bool> IsGZipFileAsync(
FileInfo fileInfo,
CancellationToken cancellationToken = default
)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return await IsGZipFileAsync(stream, cancellationToken).ConfigureAwait(false);
}
public void SaveTo(string filePath) => SaveTo(new FileInfo(filePath));
public void SaveTo(FileInfo fileInfo)
@@ -167,6 +325,28 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
return true;
}
public static async Task<bool> IsGZipFileAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
// read the header on the first read
byte[] header = new byte[10];
// workitem 8501: handle edge case (decompress empty stream)
if (!await stream.ReadFullyAsync(header, cancellationToken).ConfigureAwait(false))
{
return false;
}
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
return false;
}
return true;
}
internal GZipArchive()
: base(ArchiveType.GZip) { }
@@ -244,6 +424,17 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
);
}
protected override async Task<IReadOnlyCollection<GZipArchiveEntry>> LoadEntriesAsync(IEnumerable<GZipVolume> volumes, CancellationToken cancellationToken)
{
var list = new List<GZipArchiveEntry>();
var stream = volumes.Single().Stream;
list.Add(new GZipArchiveEntry(
this,
new GZipFilePart(stream, ReaderOptions.ArchiveEncoding)
));
return list.AsReadOnly();
}
protected override IReader CreateReaderForSolidExtraction()
{
var stream = Volumes.Single().Stream;
@@ -251,3 +442,4 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
return GZipReader.Open(stream);
}
}

View File

@@ -1,4 +1,6 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Factories;
using SharpCompress.Readers;
@@ -26,10 +28,34 @@ public interface IArchiveFactory : IFactory
/// <param name="readerOptions">reading options.</param>
IArchive Open(Stream stream, ReaderOptions? readerOptions = null);
/// <summary>
/// Opens an Archive for random access.
/// </summary>
/// <param name="stream">An open, readable and seekable stream.</param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">the cancellation token.</param>
Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo">the file to open.</param>
/// <param name="readerOptions">reading options.</param>
IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null);
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo">the file to open.</param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">the cancellation token.</param>
Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
}

View File

@@ -1,5 +1,7 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Factories;
using SharpCompress.Readers;
@@ -27,10 +29,34 @@ public interface IMultiArchiveFactory : IFactory
/// <param name="readerOptions">reading options.</param>
IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null);
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">the cancellation token.</param>
Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
/// <summary>
/// Constructor with IEnumerable Stream objects, multi and split support.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions">reading options.</param>
IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null);
/// <summary>
/// Constructor with IEnumerable Stream objects, multi and split support.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">the cancellation token.</param>
Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
}

View File

@@ -2,6 +2,8 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
@@ -84,8 +86,6 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
public override bool IsSolid => Volumes.First().IsSolidArchive;
public override bool IsEncrypted => Entries.First(x => !x.IsDirectory).IsEncrypted;
public virtual int MinVersion => Volumes.First().MinVersion;
public virtual int MaxVersion => Volumes.First().MaxVersion;
@@ -193,6 +193,24 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
return IsRarFile(stream);
}
public static Task<bool> IsRarFileAsync(
string filePath,
CancellationToken cancellationToken = default
) => IsRarFileAsync(new FileInfo(filePath), cancellationToken);
public static async Task<bool> IsRarFileAsync(
FileInfo fileInfo,
CancellationToken cancellationToken = default
)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return await IsRarFileAsync(stream, cancellationToken).ConfigureAwait(false);
}
public static bool IsRarFile(Stream stream, ReaderOptions? options = null)
{
try
@@ -206,5 +224,23 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
}
}
public static async Task<bool> IsRarFileAsync(
Stream stream,
CancellationToken cancellationToken = default,
ReaderOptions? options = null
)
{
try
{
await MarkHeader.ReadAsync(stream, true, false, cancellationToken)
.ConfigureAwait(false);
return true;
}
catch
{
return false;
}
}
#endregion
}

View File

@@ -2,10 +2,14 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.SevenZip;
using SharpCompress.Compressors.LZMA.Utilites;
using SharpCompress.IO;
using SharpCompress.Polyfills;
using SharpCompress.Readers;
namespace SharpCompress.Archives.SevenZip;
@@ -25,6 +29,18 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static Task<SevenZipArchive> OpenAsync(
string filePath,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
@@ -42,6 +58,18 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static Task<SevenZipArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
@@ -63,6 +91,18 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
);
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static Task<SevenZipArchive> OpenAsync(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
@@ -84,6 +124,18 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
);
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static Task<SevenZipArchive> OpenAsync(
IEnumerable<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
@@ -103,6 +155,18 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
);
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static Task<SevenZipArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
@@ -118,6 +182,11 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
public static bool IsSevenZipFile(string filePath) => IsSevenZipFile(new FileInfo(filePath));
public static Task<bool> IsSevenZipFileAsync(
string filePath,
CancellationToken cancellationToken = default
) => IsSevenZipFileAsync(new FileInfo(filePath), cancellationToken);
public static bool IsSevenZipFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
@@ -128,6 +197,19 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
return IsSevenZipFile(stream);
}
public static async Task<bool> IsSevenZipFileAsync(
FileInfo fileInfo,
CancellationToken cancellationToken = default
)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return await IsSevenZipFileAsync(stream, cancellationToken).ConfigureAwait(false);
}
internal SevenZipArchive()
: base(ArchiveType.SevenZip) { }
@@ -163,6 +245,8 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
return entries;
}
protected override Task<IReadOnlyCollection<SevenZipArchiveEntry>> LoadEntriesAsync(IEnumerable<SevenZipVolume> volumes, CancellationToken cancellationToken) => throw new NotImplementedException();
private void LoadFactory(Stream stream)
{
if (_database is null)
@@ -174,6 +258,8 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
}
}
protected override Task<IReadOnlyCollection<SevenZipVolume>> LoadVolumesAsync(SourceStream sourceStream, CancellationToken cancellationToken) => throw new NotImplementedException();
public static bool IsSevenZipFile(Stream stream)
{
try
@@ -186,6 +272,21 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
}
}
public static async Task<bool> IsSevenZipFileAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
try
{
return await SignatureMatchAsync(stream, cancellationToken).ConfigureAwait(false);
}
catch
{
return false;
}
}
private static ReadOnlySpan<byte> Signature =>
new byte[] { (byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C };
@@ -196,6 +297,16 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
return signatureBytes.SequenceEqual(Signature);
}
private static async Task<bool> SignatureMatchAsync(
Stream stream,
CancellationToken cancellationToken
)
{
var signatureBytes = new byte[6];
await stream.ReadFullyAsync(signatureBytes, cancellationToken).ConfigureAwait(false);
return signatureBytes.SequenceEqual(Signature.ToArray());
}
protected override IReader CreateReaderForSolidExtraction() =>
new SevenZipReader(ReaderOptions, this);
@@ -205,8 +316,6 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
.GroupBy(x => x.FilePart.Folder)
.Any(folder => folder.Count() > 1);
public override bool IsEncrypted => Entries.First(x => !x.IsDirectory).IsEncrypted;
public override long TotalSize =>
_database?._packSizes.Aggregate(0L, (total, packSize) => total + packSize) ?? 0;

View File

@@ -2,12 +2,14 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Tar;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
using SharpCompress.Polyfills;
using SharpCompress.Readers;
using SharpCompress.Readers.Tar;
using SharpCompress.Writers;
@@ -28,6 +30,22 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<TarArchive> OpenAsync(
string filePath,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
filePath.NotNullOrEmpty(nameof(filePath));
return await OpenAsync(new FileInfo(filePath), readerOptions ?? new ReaderOptions(), cancellationToken);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
@@ -45,6 +63,32 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<TarArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
fileInfo.NotNull(nameof(fileInfo));
var archive = new TarArchive();
archive.SourceStream = new SourceStream(
fileInfo,
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
);
archive.LazyVolumes = new Lazy<IReadOnlyCollection<TarVolume>>(() => archive.LoadVolumes(archive.SourceStream).ToList());
archive.LazyEntries = new Lazy<IReadOnlyCollection<TarArchiveEntry>>(
() => archive.LoadEntriesAsync(archive.Volumes, cancellationToken).GetAwaiter().GetResult().ToList()
);
return archive;
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
@@ -66,6 +110,33 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
);
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<TarArchive> OpenAsync(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
var archive = new TarArchive();
archive.SourceStream = new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
);
archive.LazyVolumes = new Lazy<IReadOnlyCollection<TarVolume>>(() => archive.LoadVolumes(archive.SourceStream).ToList());
archive.LazyEntries = new Lazy<IReadOnlyCollection<TarArchiveEntry>>(
() => archive.LoadEntriesAsync(archive.Volumes, cancellationToken).GetAwaiter().GetResult().ToList()
);
return archive;
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
@@ -84,6 +155,33 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
);
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<TarArchive> OpenAsync(
IEnumerable<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
var archive = new TarArchive();
archive.SourceStream = new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
);
archive.LazyVolumes = new Lazy<IReadOnlyCollection<TarVolume>>(() => archive.LoadVolumes(archive.SourceStream).ToList());
archive.LazyEntries = new Lazy<IReadOnlyCollection<TarArchiveEntry>>(
() => archive.LoadEntriesAsync(archive.Volumes, cancellationToken).GetAwaiter().GetResult().ToList()
);
return archive;
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
@@ -103,6 +201,34 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
);
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<TarArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
var archive = new TarArchive();
archive.SourceStream = new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions());
archive.LazyVolumes = new Lazy<IReadOnlyCollection<TarVolume>>(() => archive.LoadVolumes(archive.SourceStream).ToList());
archive.LazyEntries = new Lazy<IReadOnlyCollection<TarArchiveEntry>>(
() => archive.LoadEntriesAsync(archive.Volumes, cancellationToken).GetAwaiter().GetResult().ToList()
);
return archive;
}
public static bool IsTarFile(string filePath) => IsTarFile(new FileInfo(filePath));
public static bool IsTarFile(FileInfo fileInfo)
@@ -115,6 +241,24 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
return IsTarFile(stream);
}
public static async Task<bool> IsTarFileAsync(
string filePath,
CancellationToken cancellationToken = default
) => await IsTarFileAsync(new FileInfo(filePath), cancellationToken);
public static async Task<bool> IsTarFileAsync(
FileInfo fileInfo,
CancellationToken cancellationToken = default
)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return await IsTarFileAsync(stream, cancellationToken);
}
public static bool IsTarFile(Stream stream)
{
try
@@ -131,6 +275,26 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
return false;
}
public static async Task<bool> IsTarFileAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
try
{
var tarHeader = new TarHeader(new ArchiveEncoding());
var readSucceeded = await tarHeader.ReadAsync(stream, cancellationToken)
.ConfigureAwait(false);
var isEmptyArchive =
tarHeader.Name?.Length == 0
&& tarHeader.Size == 0
&& Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
return readSucceeded || isEmptyArchive;
}
catch { }
return false;
}
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream sourceStream)
{
sourceStream.NotNull("SourceStream is null").LoadAllParts(); //request all streams
@@ -145,7 +309,11 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
: base(ArchiveType.Tar, sourceStream) { }
private TarArchive()
: base(ArchiveType.Tar) { }
: base(ArchiveType.Tar)
{
LazyVolumes = new Lazy<IReadOnlyCollection<TarVolume>>(() => LoadVolumes(SourceStream!).ToList());
LazyEntries = new Lazy<IReadOnlyCollection<TarArchiveEntry>>(() => LoadEntries(Volumes).ToList());
}
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
{
@@ -205,6 +373,67 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
}
}
private async Task<IReadOnlyCollection<TarArchiveEntry>> LoadEntriesAsync(IEnumerable<TarVolume> volumes, CancellationToken cancellationToken)
{
var list = new List<TarArchiveEntry>();
var stream = volumes.Single().Stream;
TarHeader? previousHeader = null;
await foreach (
var header in TarHeaderFactory.ReadHeaderAsync(
StreamingMode.Seekable,
stream,
ReaderOptions.ArchiveEncoding,
cancellationToken
).WithCancellation(cancellationToken)
)
{
if (header != null)
{
if (header.EntryType == EntryType.LongName)
{
previousHeader = header;
}
else
{
if (previousHeader != null)
{
var entry = new TarArchiveEntry(
this,
new TarFilePart(previousHeader, stream),
CompressionType.None
);
var oldStreamPos = stream.Position;
using (var entryStream = entry.OpenEntryStream())
{
using var memoryStream = new MemoryStream();
await entryStream.CopyToAsync(memoryStream, cancellationToken).ConfigureAwait(false);
memoryStream.Position = 0;
var bytes = memoryStream.ToArray();
header.Name = ReaderOptions.ArchiveEncoding.Decode(bytes).TrimNulls();
}
stream.Position = oldStreamPos;
previousHeader = null;
}
list.Add(new TarArchiveEntry(
this,
new TarFilePart(header, stream),
CompressionType.None
));
}
}
else
{
throw new IncompleteArchiveException("Failed to read TAR header");
}
}
return list.AsReadOnly();
}
public static TarArchive Create() => new();
protected override TarArchiveEntry CreateEntryInternal(

View File

@@ -18,7 +18,7 @@ namespace SharpCompress.Archives.Zip;
public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
{
private readonly SeekableZipHeaderFactory? headerFactory;
protected internal SeekableZipHeaderFactory? headerFactory;
/// <summary>
/// Gets or sets the compression level applied to files added to the archive,
@@ -144,6 +144,14 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
return IsZipFile(stream, password, bufferSize);
}
// ... other using statements
namespace SharpCompress.Archives.Zip;
//... existing class
public static bool IsZipFile(
Stream stream,
string? password = null,
@@ -177,6 +185,41 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
}
}
public static async Task<bool> IsZipFileAsync(
Stream stream,
string? password = null,
CancellationToken cancellationToken = default,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = await headerFactory
.ReadStreamHeaderAsync(stream, cancellationToken)
.FirstOrDefaultAsync(x => x.ZipHeaderType != ZipHeaderType.Split, cancellationToken)
.ConfigureAwait(false);
if (header is null)
{
return false;
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
public static bool IsZipMulti(
Stream stream,
string? password = null,
@@ -219,6 +262,50 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
}
}
public static async Task<bool> IsZipMultiAsync(
Stream stream,
string? password = null,
CancellationToken cancellationToken = default,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = await headerFactory
.ReadStreamHeaderAsync(stream, cancellationToken)
.FirstOrDefaultAsync(x => x.ZipHeaderType != ZipHeaderType.Split, cancellationToken)
.ConfigureAwait(false);
if (header is null)
{
if (stream.CanSeek) //could be multipart. Test for central directory - might not be z64 safe
{
var z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
var x = z.ReadSeekableHeader(stream).FirstOrDefault();
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
}
else
{
return false;
}
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream stream)
{
stream.LoadAllParts(); //request all streams

View File

@@ -1,5 +1,7 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Rar.Headers;
@@ -25,6 +27,20 @@ internal class MarkHeader : IRarHeader
throw new EndOfStreamException();
}
private static async Task<byte> GetByteAsync(
Stream stream,
CancellationToken cancellationToken
)
{
byte[] buffer = new byte[1];
var read = await stream.ReadAsync(buffer, 0, 1, cancellationToken).ConfigureAwait(false);
if (read == 1)
{
return buffer[0];
}
throw new EndOfStreamException();
}
public static MarkHeader Read(Stream stream, bool leaveStreamOpen, bool lookForHeader)
{
var maxScanIndex = lookForHeader ? MAX_SFX_SIZE : 0;
@@ -129,4 +145,115 @@ internal class MarkHeader : IRarHeader
throw new InvalidFormatException("Rar signature not found");
}
public static async Task<MarkHeader> ReadAsync(
Stream stream,
bool leaveStreamOpen,
bool lookForHeader,
CancellationToken cancellationToken
)
{
var maxScanIndex = lookForHeader ? MAX_SFX_SIZE : 0;
try
{
var start = -1;
var b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
while (start <= maxScanIndex)
{
cancellationToken.ThrowIfCancellationRequested();
// Rar old signature: 52 45 7E 5E
// Rar4 signature: 52 61 72 21 1A 07 00
// Rar5 signature: 52 61 72 21 1A 07 01 00
if (b == 0x52)
{
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b == 0x61)
{
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x72)
{
continue;
}
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x21)
{
continue;
}
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x1a)
{
continue;
}
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x07)
{
continue;
}
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b == 1)
{
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0)
{
continue;
}
return new MarkHeader(true); // Rar5
}
else if (b == 0)
{
return new MarkHeader(false); // Rar4
}
}
else if (b == 0x45)
{
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x7e)
{
continue;
}
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x5e)
{
continue;
}
throw new InvalidFormatException(
"Rar format version pre-4 is unsupported."
);
}
}
else
{
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
}
}
}
catch (Exception e)
{
if (!leaveStreamOpen)
{
stream.Dispose();
}
throw new InvalidFormatException("Error trying to read rar signature.", e);
}
throw new InvalidFormatException("Rar signature not found");
}
}

View File

@@ -2,6 +2,8 @@ using System;
using System.Buffers.Binary;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Tar.Headers;
@@ -187,6 +189,92 @@ internal sealed class TarHeader
return true;
}
internal async Task<bool> ReadAsync(Stream stream, CancellationToken cancellationToken)
{
string? longName = null;
string? longLinkName = null;
var hasLongValue = true;
byte[] buffer;
EntryType entryType;
do
{
buffer = await ReadBlockAsync(stream, cancellationToken).ConfigureAwait(false);
if (buffer.Length == 0)
{
return false;
}
entryType = ReadEntryType(buffer);
// LongName and LongLink headers can follow each other and need
// to apply to the header that follows them.
if (entryType == EntryType.LongName)
{
longName = await ReadLongNameAsync(stream, buffer, cancellationToken)
.ConfigureAwait(false);
continue;
}
else if (entryType == EntryType.LongLink)
{
longLinkName = await ReadLongNameAsync(stream, buffer, cancellationToken)
.ConfigureAwait(false);
continue;
}
hasLongValue = false;
} while (hasLongValue);
// Check header checksum
if (!checkChecksum(buffer))
{
return false;
}
Name = longName ?? ArchiveEncoding.Decode(buffer, 0, 100).TrimNulls();
EntryType = entryType;
Size = ReadSize(buffer);
// for symlinks, additionally read the linkname
if (entryType == EntryType.SymLink || entryType == EntryType.HardLink)
{
LinkName = longLinkName ?? ArchiveEncoding.Decode(buffer, 157, 100).TrimNulls();
}
Mode = ReadAsciiInt64Base8(buffer, 100, 7);
if (entryType == EntryType.Directory)
{
Mode |= 0b1_000_000_000;
}
UserId = ReadAsciiInt64Base8oldGnu(buffer, 108, 7);
GroupId = ReadAsciiInt64Base8oldGnu(buffer, 116, 7);
var unixTimeStamp = ReadAsciiInt64Base8(buffer, 136, 11);
LastModifiedTime = EPOCH.AddSeconds(unixTimeStamp).ToLocalTime();
Magic = ArchiveEncoding.Decode(buffer, 257, 6).TrimNulls();
if (!string.IsNullOrEmpty(Magic) && "ustar".Equals(Magic))
{
var namePrefix = ArchiveEncoding.Decode(buffer, 345, 157).TrimNulls();
if (!string.IsNullOrEmpty(namePrefix))
{
Name = namePrefix + "/" + Name;
}
}
if (entryType != EntryType.LongName && Name.Length == 0)
{
return false;
}
return true;
}
private string ReadLongName(BinaryReader reader, byte[] buffer)
{
var size = ReadSize(buffer);
@@ -211,6 +299,39 @@ internal sealed class TarHeader
return ArchiveEncoding.Decode(nameBytes, 0, nameBytes.Length).TrimNulls();
}
private async Task<string> ReadLongNameAsync(
Stream stream,
byte[] buffer,
CancellationToken cancellationToken
)
{
var size = ReadSize(buffer);
// Validate size to prevent memory exhaustion from malformed headers
if (size < 0 || size > MAX_LONG_NAME_SIZE)
{
throw new InvalidFormatException(
$"Long name size {size} is invalid or exceeds maximum allowed size of {MAX_LONG_NAME_SIZE} bytes"
);
}
var nameLength = (int)size;
var nameBytes = new byte[nameLength];
await ReadFullyAsync(stream, nameBytes, 0, nameLength, cancellationToken)
.ConfigureAwait(false);
var remainingBytesToRead = BLOCK_SIZE - (nameLength % BLOCK_SIZE);
// Read the rest of the block and discard the data
if (remainingBytesToRead > 0 && remainingBytesToRead < BLOCK_SIZE)
{
var discardBuffer = new byte[remainingBytesToRead];
await ReadFullyAsync(stream, discardBuffer, 0, remainingBytesToRead, cancellationToken)
.ConfigureAwait(false);
}
return ArchiveEncoding.Decode(nameBytes, 0, nameBytes.Length).TrimNulls();
}
private static EntryType ReadEntryType(byte[] buffer) => (EntryType)buffer[156];
private long ReadSize(byte[] buffer)
@@ -234,6 +355,60 @@ internal sealed class TarHeader
return buffer;
}
private static async Task<byte[]> ReadBlockAsync(
Stream stream,
CancellationToken cancellationToken
)
{
var buffer = new byte[BLOCK_SIZE];
var bytesRead = 0;
while (bytesRead < BLOCK_SIZE)
{
var read = await stream.ReadAsync(
buffer,
bytesRead,
BLOCK_SIZE - bytesRead,
cancellationToken
);
if (read == 0)
{
// end of stream. If we read nothing, return empty array.
// if we read some, but not a full block, it's an error.
if (bytesRead == 0)
{
return Array.Empty<byte>();
}
throw new InvalidFormatException("Buffer is invalid size");
}
bytesRead += read;
}
return buffer;
}
private static async Task ReadFullyAsync(
Stream stream,
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
var totalRead = 0;
while (totalRead < count)
{
cancellationToken.ThrowIfCancellationRequested();
var read = await stream
.ReadAsync(buffer, offset + totalRead, count - totalRead, cancellationToken)
.ConfigureAwait(false);
if (read == 0)
{
throw new EndOfStreamException("End of stream reached, but more bytes were expected.");
}
totalRead += read;
}
}
private static void WriteStringBytes(ReadOnlySpan<byte> name, Span<byte> buffer, int length)
{
name.CopyTo(buffer);

View File

@@ -1,5 +1,8 @@
using System.Collections.Generic;
using System.IO;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
@@ -54,6 +57,54 @@ internal static class TarHeaderFactory
}
}
internal static async IAsyncEnumerable<TarHeader?> ReadHeaderAsync(
StreamingMode mode,
Stream stream,
ArchiveEncoding archiveEncoding,
[EnumeratorCancellation] CancellationToken cancellationToken
)
{
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
TarHeader? header = null;
try
{
header = new TarHeader(archiveEncoding);
if (!await header.ReadAsync(stream, cancellationToken).ConfigureAwait(false))
{
yield break;
}
switch (mode)
{
case StreamingMode.Seekable:
{
header.DataStartPosition = stream.Position;
//skip to nearest 512
stream.Position += PadTo512(header.Size);
}
break;
case StreamingMode.Streaming:
{
header.PackedStream = new TarReadOnlySubStream(stream, header.Size);
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
}
catch
{
header = null;
}
yield return header;
}
}
private static long PadTo512(long size)
{
var zeros = (int)(size % 512);

View File

@@ -1,4 +1,7 @@
using System.Buffers.Binary;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
@@ -19,6 +22,22 @@ internal class DirectoryEndHeader : ZipHeader
Comment = reader.ReadBytes(CommentLength);
}
internal async Task ReadAsync(Stream stream, CancellationToken cancellationToken)
{
VolumeNumber = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
FirstVolumeWithDirectory = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken)
.ConfigureAwait(false);
TotalNumberOfEntriesInDisk = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken)
.ConfigureAwait(false);
TotalNumberOfEntries = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken)
.ConfigureAwait(false);
DirectorySize = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken).ConfigureAwait(false);
DirectoryStartOffsetRelativeToDisk = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken)
.ConfigureAwait(false);
CommentLength = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
Comment = await ZipHeaderFactory.ReadBytesAsync(stream, CommentLength, cancellationToken).ConfigureAwait(false);
}
public ushort VolumeNumber { get; private set; }
public ushort FirstVolumeWithDirectory { get; private set; }

View File

@@ -1,5 +1,8 @@
using System.Buffers.Binary;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
@@ -90,7 +93,114 @@ internal class DirectoryEntryHeader : ZipFileEntry
if (unixTimeExtra is not null)
{
// Tuple order is last modified time, last access time, and creation time.
var unixTimeTuple = ((UnixTimeExtraField)unixTimeExtra).UnicodeTimes;
if (unixTimeTuple.Item1.HasValue)
{
var dosTime = Utility.DateTimeToDosTime(unixTimeTuple.Item1.Value);
LastModifiedDate = (ushort)(dosTime >> 16);
LastModifiedTime = (ushort)(dosTime & 0x0FFFF);
}
else if (unixTimeTuple.Item2.HasValue)
{
var dosTime = Utility.DateTimeToDosTime(unixTimeTuple.Item2.Value);
LastModifiedDate = (ushort)(dosTime >> 16);
LastModifiedTime = (ushort)(dosTime & 0x0FFFF);
}
else if (unixTimeTuple.Item3.HasValue)
{
var dosTime = Utility.DateTimeToDosTime(unixTimeTuple.Item3.Value);
LastModifiedDate = (ushort)(dosTime >> 16);
LastModifiedTime = (ushort)(dosTime & 0x0FFFF);
}
}
}
internal async Task ReadAsync(Stream stream, CancellationToken cancellationToken)
{
Version = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
VersionNeededToExtract = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken)
.ConfigureAwait(false);
Flags = (HeaderFlags)await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
CompressionMethod = (ZipCompressionMethod)
await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
OriginalLastModifiedTime =
LastModifiedTime = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
OriginalLastModifiedDate =
LastModifiedDate = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
Crc = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken).ConfigureAwait(false);
CompressedSize = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken).ConfigureAwait(false);
UncompressedSize = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken).ConfigureAwait(false);
var nameLength = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
var extraLength = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
var commentLength = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
DiskNumberStart = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
InternalFileAttributes = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken)
.ConfigureAwait(false);
ExternalFileAttributes = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken)
.ConfigureAwait(false);
RelativeOffsetOfEntryHeader = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken)
.ConfigureAwait(false);
var name = await ZipHeaderFactory.ReadBytesAsync(stream, nameLength, cancellationToken).ConfigureAwait(false);
var extra = await ZipHeaderFactory.ReadBytesAsync(stream, extraLength, cancellationToken).ConfigureAwait(false);
var comment = await ZipHeaderFactory.ReadBytesAsync(stream, commentLength, cancellationToken)
.ConfigureAwait(false);
if (Flags.HasFlag(HeaderFlags.Efs))
{
Name = ArchiveEncoding.DecodeUTF8(name);
Comment = ArchiveEncoding.DecodeUTF8(comment);
}
else
{
Name = ArchiveEncoding.Decode(name);
Comment = ArchiveEncoding.Decode(comment);
}
LoadExtra(extra);
var unicodePathExtra = Extra.FirstOrDefault(u =>
u.Type == ExtraDataType.UnicodePathExtraField
);
if (unicodePathExtra != null && ArchiveEncoding.Forced == null)
{
Name = ((ExtraUnicodePathExtraField)unicodePathExtra).UnicodeName;
}
var zip64ExtraData = Extra.OfType<Zip64ExtendedInformationExtraField>().FirstOrDefault();
if (zip64ExtraData != null)
{
zip64ExtraData.Process(
UncompressedSize,
CompressedSize,
RelativeOffsetOfEntryHeader,
DiskNumberStart
);
if (CompressedSize == uint.MaxValue)
{
CompressedSize = zip64ExtraData.CompressedSize;
}
if (UncompressedSize == uint.MaxValue)
{
UncompressedSize = zip64ExtraData.UncompressedSize;
}
if (RelativeOffsetOfEntryHeader == uint.MaxValue)
{
RelativeOffsetOfEntryHeader = zip64ExtraData.RelativeOffsetOfEntryHeader;
}
}
var unixTimeExtra = Extra.FirstOrDefault(u => u.Type == ExtraDataType.UnixTimeExtraField);
if (unixTimeExtra is not null)
{
var unixTimeTuple = ((UnixTimeExtraField)unixTimeExtra).UnicodeTimes;
if (unixTimeTuple.Item1.HasValue)

View File

@@ -1,5 +1,8 @@
using System.Buffers.Binary;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
@@ -96,5 +99,87 @@ internal class LocalEntryHeader : ZipFileEntry
}
}
internal async Task ReadAsync(Stream stream, CancellationToken cancellationToken)
{
Version = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
Flags = (HeaderFlags)await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
CompressionMethod = (ZipCompressionMethod)
await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
OriginalLastModifiedTime =
LastModifiedTime = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
OriginalLastModifiedDate =
LastModifiedDate = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
Crc = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken).ConfigureAwait(false);
CompressedSize = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken).ConfigureAwait(false);
UncompressedSize = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken).ConfigureAwait(false);
var nameLength = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
var extraLength = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
var name = await ZipHeaderFactory.ReadBytesAsync(stream, nameLength, cancellationToken).ConfigureAwait(false);
var extra = await ZipHeaderFactory.ReadBytesAsync(stream, extraLength, cancellationToken).ConfigureAwait(false);
if (Flags.HasFlag(HeaderFlags.Efs))
{
Name = ArchiveEncoding.DecodeUTF8(name);
}
else
{
Name = ArchiveEncoding.Decode(name);
}
LoadExtra(extra);
var unicodePathExtra = Extra.FirstOrDefault(u =>
u.Type == ExtraDataType.UnicodePathExtraField
);
if (unicodePathExtra != null && ArchiveEncoding.Forced == null)
{
Name = ((ExtraUnicodePathExtraField)unicodePathExtra).UnicodeName;
}
var zip64ExtraData = Extra.OfType<Zip64ExtendedInformationExtraField>().FirstOrDefault();
if (zip64ExtraData != null)
{
zip64ExtraData.Process(UncompressedSize, CompressedSize, 0, 0);
if (CompressedSize == uint.MaxValue)
{
CompressedSize = zip64ExtraData.CompressedSize;
}
if (UncompressedSize == uint.MaxValue)
{
UncompressedSize = zip64ExtraData.UncompressedSize;
}
}
var unixTimeExtra = Extra.FirstOrDefault(u => u.Type == ExtraDataType.UnixTimeExtraField);
if (unixTimeExtra is not null)
{
var unixTimeTuple = ((UnixTimeExtraField)unixTimeExtra).UnicodeTimes;
if (unixTimeTuple.Item1.HasValue)
{
var dosTime = Utility.DateTimeToDosTime(unixTimeTuple.Item1.Value);
LastModifiedDate = (ushort)(dosTime >> 16);
LastModifiedTime = (ushort)(dosTime & 0x0FFFF);
}
else if (unixTimeTuple.Item2.HasValue)
{
var dosTime = Utility.DateTimeToDosTime(unixTimeTuple.Item2.Value);
LastModifiedDate = (ushort)(dosTime >> 16);
LastModifiedTime = (ushort)(dosTime & 0x0FFFF);
}
else if (unixTimeTuple.Item3.HasValue)
{
var dosTime = Utility.DateTimeToDosTime(unixTimeTuple.Item3.Value);
LastModifiedDate = (ushort)(dosTime >> 16);
LastModifiedTime = (ushort)(dosTime & 0x0FFFF);
}
}
}
internal ushort Version { get; private set; }
}

View File

@@ -1,4 +1,7 @@
using System.Buffers.Binary;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
@@ -26,6 +29,35 @@ internal class Zip64DirectoryEndHeader : ZipHeader
);
}
internal async Task ReadAsync(Stream stream, CancellationToken cancellationToken)
{
SizeOfDirectoryEndRecord = (long)
await ZipHeaderFactory.ReadUInt64Async(stream, cancellationToken).ConfigureAwait(false);
VersionMadeBy = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
VersionNeededToExtract = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken)
.ConfigureAwait(false);
VolumeNumber = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken).ConfigureAwait(false);
FirstVolumeWithDirectory = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken)
.ConfigureAwait(false);
TotalNumberOfEntriesInDisk = (long)
await ZipHeaderFactory.ReadUInt64Async(stream, cancellationToken).ConfigureAwait(false);
TotalNumberOfEntries = (long)
await ZipHeaderFactory.ReadUInt64Async(stream, cancellationToken).ConfigureAwait(false);
DirectorySize = (long)
await ZipHeaderFactory.ReadUInt64Async(stream, cancellationToken).ConfigureAwait(false);
DirectoryStartOffsetRelativeToDisk = (long)
await ZipHeaderFactory.ReadUInt64Async(stream, cancellationToken).ConfigureAwait(false);
DataSector = await ZipHeaderFactory.ReadBytesAsync(
stream,
(int)(
SizeOfDirectoryEndRecord
- SIZE_OF_FIXED_HEADER_DATA_EXCEPT_SIGNATURE_AND_SIZE_FIELDS
),
cancellationToken
)
.ConfigureAwait(false);
}
private const int SIZE_OF_FIXED_HEADER_DATA_EXCEPT_SIGNATURE_AND_SIZE_FIELDS = 44;
public long SizeOfDirectoryEndRecord { get; private set; }

View File

@@ -1,4 +1,7 @@
using System.Buffers.Binary;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
@@ -14,6 +17,16 @@ internal class Zip64DirectoryEndLocatorHeader : ZipHeader
TotalNumberOfVolumes = reader.ReadUInt32();
}
internal async Task ReadAsync(Stream stream, CancellationToken cancellationToken)
{
FirstVolumeWithDirectory = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken)
.ConfigureAwait(false);
RelativeOffsetOfTheEndOfDirectoryRecord = (long)
await ZipHeaderFactory.ReadUInt64Async(stream, cancellationToken).ConfigureAwait(false);
TotalNumberOfVolumes = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken)
.ConfigureAwait(false);
}
public uint FirstVolumeWithDirectory { get; private set; }
public long RelativeOffsetOfTheEndOfDirectoryRecord { get; private set; }

View File

@@ -1,6 +1,9 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
@@ -85,6 +88,90 @@ internal sealed class SeekableZipHeaderFactory : ZipHeaderFactory
}
}
internal async IAsyncEnumerable<ZipHeader> ReadSeekableHeaderAsync(
Stream stream,
[EnumeratorCancellation] CancellationToken cancellationToken
)
{
// Still need BinaryReader for synchronous SeekBackToHeader, until SeekBackToHeaderAsync is implemented
var reader = new BinaryReader(stream);
SeekBackToHeader(stream, reader); // Synchronous due to stream.Seek and BinaryReader
var eocd_location = stream.Position;
var entry = new DirectoryEndHeader();
await entry.ReadAsync(stream, cancellationToken).ConfigureAwait(false);
if (entry.IsZip64)
{
_zip64 = true;
stream.Seek(eocd_location - ZIP64_EOCD_LENGTH - 4, SeekOrigin.Begin); // Synchronous seek
var zip64_locator = await ReadUInt32Async(stream, cancellationToken)
.ConfigureAwait(false);
if (zip64_locator != ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR)
{
throw new ArchiveException("Failed to locate the Zip64 Directory Locator");
}
var zip64Locator = new Zip64DirectoryEndLocatorHeader();
await zip64Locator.ReadAsync(stream, cancellationToken).ConfigureAwait(false);
stream.Seek(
zip64Locator.RelativeOffsetOfTheEndOfDirectoryRecord,
SeekOrigin.Begin
); // Synchronous seek
var zip64Signature = await ReadUInt32Async(stream, cancellationToken)
.ConfigureAwait(false);
if (zip64Signature != ZIP64_END_OF_CENTRAL_DIRECTORY)
{
throw new ArchiveException("Failed to locate the Zip64 Header");
}
var zip64Entry = new Zip64DirectoryEndHeader();
await zip64Entry.ReadAsync(stream, cancellationToken).ConfigureAwait(false);
stream.Seek(
zip64Entry.DirectoryStartOffsetRelativeToDisk,
SeekOrigin.Begin
); // Synchronous seek
}
else
{
stream.Seek(entry.DirectoryStartOffsetRelativeToDisk, SeekOrigin.Begin); // Synchronous seek
}
var position = stream.Position;
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
stream.Position = position; // Synchronous seek
var signature = await ReadUInt32Async(stream, cancellationToken)
.ConfigureAwait(false);
var nextHeader = await ReadHeaderAsync(
signature,
stream,
_zip64,
cancellationToken
).ConfigureAwait(false);
position = stream.Position;
if (nextHeader is null)
{
yield break;
}
if (nextHeader is DirectoryEntryHeader entryHeader)
{
entryHeader.HasData = entryHeader.CompressedSize != 0;
yield return entryHeader;
}
else if (nextHeader is DirectoryEndHeader endHeader)
{
yield return endHeader;
}
}
}
private static bool IsMatch(byte[] haystack, int position, byte[] needle)
{
for (var i = 0; i < needle.Length; i++)

View File

@@ -1,7 +1,10 @@
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
@@ -200,4 +203,230 @@ internal class StreamingZipHeaderFactory : ZipHeaderFactory
yield return header;
}
}
internal async IAsyncEnumerable<ZipHeader> ReadStreamHeaderAsync(
Stream stream,
[System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken
)
{
if (stream is not SharpCompressStream)
{
if (stream is SourceStream src)
{
stream = new SharpCompressStream(
stream,
src.ReaderOptions.LeaveStreamOpen,
bufferSize: src.ReaderOptions.BufferSize
);
}
else
{
throw new ArgumentException("Stream must be a SharpCompressStream", nameof(stream));
}
}
var rewindableStream = (SharpCompressStream)stream;
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
uint headerBytes = 0;
if (
_lastEntryHeader != null
&& FlagUtility.HasFlag(_lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor)
)
{
if (_lastEntryHeader.Part is null)
{
continue;
}
var pos = rewindableStream.CanSeek ? (long?)rewindableStream.Position : null;
var crc = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
if (crc == POST_DATA_DESCRIPTOR)
{
crc = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
}
_lastEntryHeader.Crc = crc;
ulong compSize = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
ulong uncompSize = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
headerBytes = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
bool isSentinel = compSize == 0xFFFFFFFF || uncompSize == 0xFFFFFFFF;
bool isHeader = headerBytes == 0x04034b50 || headerBytes == 0x02014b50;
if (!isHeader && !isSentinel)
{
compSize = (uncompSize << 32) | compSize;
uncompSize =
((ulong)headerBytes << 32)
| await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
headerBytes = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
}
else if (isSentinel)
{
compSize = await ReadUInt64Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
uncompSize = await ReadUInt64Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
}
_lastEntryHeader.CompressedSize = (long)compSize;
_lastEntryHeader.UncompressedSize = (long)uncompSize;
if (pos.HasValue)
{
_lastEntryHeader.DataStartPosition = pos - _lastEntryHeader.CompressedSize;
}
}
else if (_lastEntryHeader != null && _lastEntryHeader.IsZip64)
{
if (_lastEntryHeader.Part is null)
continue;
var pos = rewindableStream.CanSeek ? (long?)rewindableStream.Position : null;
headerBytes = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
await ReadBytesAsync(rewindableStream, 12, cancellationToken).ConfigureAwait(false); //skip a bunch of fields we don't care about
var crc = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
if (crc == POST_DATA_DESCRIPTOR)
{
crc = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
}
_lastEntryHeader.Crc = crc;
var compressed_size = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
var uncompressed_size = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
var test_header = !(headerBytes == 0x04034b50 || headerBytes == 0x02014b50);
var test_64bit = ((long)uncompressed_size << 32) | compressed_size;
if (test_64bit == _lastEntryHeader.CompressedSize && test_header)
{
_lastEntryHeader.UncompressedSize =
((long)await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false) << 32) | headerBytes;
headerBytes = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
}
else
{
_lastEntryHeader.UncompressedSize = uncompressed_size;
}
if (pos.HasValue)
{
_lastEntryHeader.DataStartPosition = pos - _lastEntryHeader.CompressedSize;
rewindableStream.Position = pos.Value + 4;
}
}
else
{
headerBytes = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
}
_lastEntryHeader = null;
var header = await ReadHeaderAsync(
headerBytes,
rewindableStream,
cancellationToken: cancellationToken
);
if (header is null)
{
yield break;
}
if (header.ZipHeaderType == ZipHeaderType.LocalEntry)
{
var local_header = ((LocalEntryHeader)header);
var dir_header = _entries?.FirstOrDefault(entry =>
entry.Key == local_header.Name
&& local_header.CompressedSize == 0
&& local_header.UncompressedSize == 0
&& local_header.Crc == 0
&& local_header.IsDirectory == false
);
if (dir_header != null)
{
local_header.UncompressedSize = dir_header.Size;
local_header.CompressedSize = dir_header.CompressedSize;
local_header.Crc = (uint)dir_header.Crc;
}
if (local_header.CompressedSize > 0)
{
header.HasData = true;
}
else if (local_header.Flags.HasFlag(HeaderFlags.UsePostDataDescriptor))
{
var nextHeaderBytes = await ReadUInt32Async(
rewindableStream,
cancellationToken
);
((IStreamStack)rewindableStream).Rewind(sizeof(uint));
header.HasData = !IsHeader(nextHeaderBytes);
}
else
{
header.HasData = false;
}
}
yield return header;
}
}
private static async Task<byte[]> ReadBytesAsync(
Stream stream,
int count,
CancellationToken cancellationToken
)
{
var buffer = new byte[count];
var read = await stream.ReadAsync(buffer, 0, count, cancellationToken);
if (read < count)
{
throw new EndOfStreamException();
}
return buffer;
}
private static async Task<uint> ReadUInt32Async(
Stream stream,
CancellationToken cancellationToken
)
{
var buffer = await ReadBytesAsync(stream, 4, cancellationToken);
return BinaryPrimitives.ReadUInt32LittleEndian(buffer);
}
private static async Task<ushort> ReadUInt16Async(
Stream stream,
CancellationToken cancellationToken
)
{
var buffer = await ReadBytesAsync(stream, 2, cancellationToken);
return BinaryPrimitives.ReadUInt16LittleEndian(buffer);
}
}

View File

@@ -1,6 +1,9 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
@@ -106,6 +109,129 @@ internal class ZipHeaderFactory
}
}
protected async Task<ZipHeader?> ReadHeaderAsync(
uint headerBytes,
Stream stream,
bool zip64 = false,
CancellationToken cancellationToken = default
)
{
switch (headerBytes)
{
case ENTRY_HEADER_BYTES:
{
var entryHeader = new LocalEntryHeader(_archiveEncoding);
await entryHeader.ReadAsync(stream, cancellationToken).ConfigureAwait(false);
await LoadHeaderAsync(entryHeader, stream, cancellationToken).ConfigureAwait(false);
_lastEntryHeader = entryHeader;
return entryHeader;
}
case DIRECTORY_START_HEADER_BYTES:
{
var entry = new DirectoryEntryHeader(_archiveEncoding);
await entry.ReadAsync(stream, cancellationToken).ConfigureAwait(false);
return entry;
}
case POST_DATA_DESCRIPTOR:
{
if (
_lastEntryHeader != null
&& FlagUtility.HasFlag(
_lastEntryHeader.NotNull().Flags,
HeaderFlags.UsePostDataDescriptor
)
)
{
_lastEntryHeader.Crc = await ReadUInt32Async(stream, cancellationToken)
.ConfigureAwait(false);
_lastEntryHeader.CompressedSize = zip64
? (long)await ReadUInt64Async(stream, cancellationToken)
.ConfigureAwait(false)
: await ReadUInt32Async(stream, cancellationToken).ConfigureAwait(false);
_lastEntryHeader.UncompressedSize = zip64
? (long)await ReadUInt64Async(stream, cancellationToken)
.ConfigureAwait(false)
: await ReadUInt32Async(stream, cancellationToken).ConfigureAwait(false);
}
else
{
await ReadBytesAsync(stream, zip64 ? 20 : 12, cancellationToken)
.ConfigureAwait(false);
}
return null;
}
case DIGITAL_SIGNATURE:
return null;
case DIRECTORY_END_HEADER_BYTES:
{
var entry = new DirectoryEndHeader();
await entry.ReadAsync(stream, cancellationToken).ConfigureAwait(false);
return entry;
}
case SPLIT_ARCHIVE_HEADER_BYTES:
{
return new SplitHeader();
}
case ZIP64_END_OF_CENTRAL_DIRECTORY:
{
var entry = new Zip64DirectoryEndHeader();
await entry.ReadAsync(stream, cancellationToken).ConfigureAwait(false);
return entry;
}
case ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR:
{
var entry = new Zip64DirectoryEndLocatorHeader();
await entry.ReadAsync(stream, cancellationToken).ConfigureAwait(false);
return entry;
}
default:
return null;
}
}
internal static async Task<byte[]> ReadBytesAsync(
Stream stream,
int count,
CancellationToken cancellationToken
)
{
var buffer = new byte[count];
var read = await stream.ReadAsync(buffer, 0, count, cancellationToken);
if (read < count)
{
throw new EndOfStreamException();
}
return buffer;
}
internal static async Task<uint> ReadUInt32Async(
Stream stream,
CancellationToken cancellationToken
)
{
var buffer = await ReadBytesAsync(stream, 4, cancellationToken);
return BinaryPrimitives.ReadUInt32LittleEndian(buffer);
}
internal static async Task<ushort> ReadUInt16Async(
Stream stream,
CancellationToken cancellationToken
)
{
var buffer = await ReadBytesAsync(stream, 2, cancellationToken);
return BinaryPrimitives.ReadUInt16LittleEndian(buffer);
}
internal static async Task<ulong> ReadUInt64Async(
Stream stream,
CancellationToken cancellationToken
)
{
var buffer = await ReadBytesAsync(stream, 8, cancellationToken);
return BinaryPrimitives.ReadUInt64LittleEndian(buffer);
}
internal static bool IsHeader(uint headerBytes)
{
switch (headerBytes)
@@ -176,12 +302,6 @@ internal class ZipHeaderFactory
return;
}
//if (FlagUtility.HasFlag(entryHeader.Flags, HeaderFlags.UsePostDataDescriptor))
//{
// entryHeader.PackedStream = new ReadOnlySubStream(stream);
//}
//else
//{
switch (_mode)
{
case StreamingMode.Seekable:
@@ -202,7 +322,87 @@ internal class ZipHeaderFactory
throw new InvalidFormatException("Invalid StreamingMode");
}
}
}
//}
private async Task LoadHeaderAsync(
ZipFileEntry entryHeader,
Stream stream,
CancellationToken cancellationToken
)
{
if (FlagUtility.HasFlag(entryHeader.Flags, HeaderFlags.Encrypted))
{
if (
!entryHeader.IsDirectory
&& entryHeader.CompressedSize == 0
&& FlagUtility.HasFlag(entryHeader.Flags, HeaderFlags.UsePostDataDescriptor)
)
{
throw new NotSupportedException(
"SharpCompress cannot currently read non-seekable Zip Streams with encrypted data that has been written in a non-seekable manner."
);
}
if (_password is null)
{
throw new CryptographicException("No password supplied for encrypted zip.");
}
entryHeader.Password = _password;
if (entryHeader.CompressionMethod == ZipCompressionMethod.WinzipAes)
{
var data = entryHeader.Extra.SingleOrDefault(x =>
x.Type == ExtraDataType.WinZipAes
);
if (data != null)
{
var keySize = (WinzipAesKeySize)data.DataBytes[4];
var salt = new byte[WinzipAesEncryptionData.KeyLengthInBytes(keySize) / 2];
var passwordVerifyValue = new byte[2];
await stream
.ReadAsync(salt, 0, salt.Length, cancellationToken)
.ConfigureAwait(false);
await stream
.ReadAsync(passwordVerifyValue, 0, 2, cancellationToken)
.ConfigureAwait(false);
entryHeader.WinzipAesEncryptionData = new WinzipAesEncryptionData(
keySize,
salt,
passwordVerifyValue,
_password
);
entryHeader.CompressedSize -= (uint)(salt.Length + 2);
}
}
}
if (entryHeader.IsDirectory)
{
return;
}
switch (_mode)
{
case StreamingMode.Seekable:
{
entryHeader.DataStartPosition = stream.Position;
stream.Position += entryHeader.CompressedSize;
break;
}
case StreamingMode.Streaming:
{
entryHeader.PackedStream = stream;
break;
}
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
}
}

View File

@@ -1,6 +1,9 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.GZip;
using SharpCompress.Archives.Tar;
@@ -46,6 +49,12 @@ public class GZipFactory
int bufferSize = ReaderOptions.DefaultBufferSize
) => GZipArchive.IsGZipFile(stream);
public override Task<bool> IsArchiveAsync(
Stream stream,
CancellationToken cancellationToken,
string? password = null
) => GZipArchive.IsGZipFileAsync(stream, cancellationToken);
#endregion
#region IArchiveFactory
@@ -54,10 +63,24 @@ public class GZipFactory
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
GZipArchive.Open(stream, readerOptions);
/// <inheritdoc/>
public async Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => await GZipArchive.OpenAsync(stream, readerOptions, cancellationToken).ConfigureAwait(false);
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
GZipArchive.Open(fileInfo, readerOptions);
/// <inheritdoc/>
public async Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => await GZipArchive.OpenAsync(fileInfo, readerOptions, cancellationToken).ConfigureAwait(false);
#endregion
#region IMultiArchiveFactory
@@ -66,10 +89,24 @@ public class GZipFactory
public IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null) =>
GZipArchive.Open(streams, readerOptions);
/// <inheritdoc/>
public async Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => await GZipArchive.OpenAsync(streams, readerOptions, cancellationToken).ConfigureAwait(false);
/// <inheritdoc/>
public IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null) =>
GZipArchive.Open(fileInfos, readerOptions);
/// <inheritdoc/>
public async Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => await GZipArchive.OpenAsync(fileInfos, readerOptions, cancellationToken).ConfigureAwait(false);
#endregion
#region IReaderFactory

View File

@@ -1,5 +1,7 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Readers;
namespace SharpCompress.Factories;
@@ -42,6 +44,18 @@ public interface IFactory
int bufferSize = ReaderOptions.DefaultBufferSize
);
/// <summary>
/// Returns true if the stream represents an archive of the format defined by this type.
/// </summary>
/// <param name="stream">A stream, pointing to the beginning of the archive.</param>
/// <param name="cancellationToken">the cancellation token</param>
/// <param name="password">optional password</param>
Task<bool> IsArchiveAsync(
Stream stream,
CancellationToken cancellationToken,
string? password = null
);
/// <summary>
/// From a passed in archive (zip, rar, 7z, 001), return all parts.
/// </summary>

View File

@@ -1,5 +1,8 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Rar;
using SharpCompress.Common;
@@ -35,6 +38,12 @@ public class RarFactory : Factory, IArchiveFactory, IMultiArchiveFactory, IReade
int bufferSize = ReaderOptions.DefaultBufferSize
) => RarArchive.IsRarFile(stream);
public override Task<bool> IsArchiveAsync(
Stream stream,
CancellationToken cancellationToken,
string? password = null
) => RarArchive.IsRarFileAsync(stream, cancellationToken);
/// <inheritdoc/>
public override FileInfo? GetFilePart(int index, FileInfo part1) =>
RarArchiveVolumeFactory.GetFilePart(index, part1);
@@ -47,10 +56,24 @@ public class RarFactory : Factory, IArchiveFactory, IMultiArchiveFactory, IReade
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
RarArchive.Open(stream, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
RarArchive.Open(fileInfo, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
#endregion
#region IMultiArchiveFactory
@@ -59,10 +82,24 @@ public class RarFactory : Factory, IArchiveFactory, IMultiArchiveFactory, IReade
public IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null) =>
RarArchive.Open(streams, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <inheritdoc/>
public IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null) =>
RarArchive.Open(fileInfos, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
#endregion
#region IReaderFactory

View File

@@ -1,5 +1,8 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.SevenZip;
using SharpCompress.Common;
@@ -34,6 +37,12 @@ public class SevenZipFactory : Factory, IArchiveFactory, IMultiArchiveFactory
int bufferSize = ReaderOptions.DefaultBufferSize
) => SevenZipArchive.IsSevenZipFile(stream);
public override Task<bool> IsArchiveAsync(
Stream stream,
CancellationToken cancellationToken,
string? password = null
) => SevenZipArchive.IsSevenZipFileAsync(stream, cancellationToken);
#endregion
#region IArchiveFactory
@@ -42,10 +51,24 @@ public class SevenZipFactory : Factory, IArchiveFactory, IMultiArchiveFactory
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
SevenZipArchive.Open(stream, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
SevenZipArchive.Open(fileInfo, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
#endregion
#region IMultiArchiveFactory
@@ -54,10 +77,24 @@ public class SevenZipFactory : Factory, IArchiveFactory, IMultiArchiveFactory
public IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null) =>
SevenZipArchive.Open(streams, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <inheritdoc/>
public IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null) =>
SevenZipArchive.Open(fileInfos, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
#endregion
#region reader

View File

@@ -2,6 +2,8 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Tar;
using SharpCompress.Common;
@@ -57,238 +59,39 @@ public class TarFactory
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
if (!stream.CanSeek)
{
return TarArchive.IsTarFile(stream); // For non-seekable streams, just check if it's a tar file
}
) => TarArchive.IsTarFile(stream);
var startPosition = stream.Position;
// First check if it's a regular tar file
if (TarArchive.IsTarFile(stream))
{
stream.Seek(startPosition, SeekOrigin.Begin); // Seek back for consistency
return true;
}
// Seek back after the tar file check
stream.Seek(startPosition, SeekOrigin.Begin);
if (compressionOptions == null)
{
return false;
}
try
{
// Try each compression option to see if it contains a tar file
foreach (var testOption in compressionOptions)
{
if (testOption.Type == CompressionType.None)
{
continue; // Skip uncompressed
}
stream.Seek(startPosition, SeekOrigin.Begin);
try
{
if (testOption.CanHandle(stream))
{
stream.Seek(startPosition, SeekOrigin.Begin);
// Try to decompress and check if it contains a tar archive
// For compression formats that don't support leaveOpen, we need to save/restore position
var positionBeforeDecompress = stream.Position;
Stream? decompressedStream = null;
bool streamWasClosed = false;
try
{
decompressedStream = testOption.Type switch
{
CompressionType.BZip2 => new BZip2Stream(stream, CompressionMode.Decompress, true),
_ => testOption.CreateStream(stream) // For other types, may close the stream
};
if (TarArchive.IsTarFile(decompressedStream))
{
return true;
}
}
catch (ObjectDisposedException)
{
streamWasClosed = true;
throw; // Stream was closed, can't continue
}
finally
{
decompressedStream?.Dispose();
if (!streamWasClosed && stream.CanSeek)
{
try
{
stream.Seek(positionBeforeDecompress, SeekOrigin.Begin);
}
catch
{
// If seek fails, the stream might have been closed
}
}
}
// Seek back to start after decompression attempt
stream.Seek(startPosition, SeekOrigin.Begin);
}
}
catch
{
// If decompression fails, it's not this format - continue to next option
try
{
stream.Seek(startPosition, SeekOrigin.Begin);
}
catch
{
// Ignore seek failures
}
}
}
return false;
}
finally
{
try
{
stream.Seek(startPosition, SeekOrigin.Begin);
}
catch
{
// Ignore seek failures
}
}
}
public override Task<bool> IsArchiveAsync(
Stream stream,
CancellationToken cancellationToken,
string? password = null
) => TarArchive.IsTarFileAsync(stream, cancellationToken);
#endregion
#region IArchiveFactory
/// <inheritdoc/>
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
readerOptions ??= new ReaderOptions();
// Try to detect and handle compressed tar formats
if (stream.CanSeek)
{
var startPosition = stream.Position;
// Try each compression option to see if we can decompress it
foreach (var testOption in compressionOptions)
{
if (testOption.Type == CompressionType.None)
{
continue; // Skip uncompressed
}
stream.Seek(startPosition, SeekOrigin.Begin);
if (testOption.CanHandle(stream))
{
stream.Seek(startPosition, SeekOrigin.Begin);
// Decompress the entire stream into a seekable MemoryStream
using var decompressedStream = testOption.CreateStream(stream);
var memoryStream = new MemoryStream();
decompressedStream.CopyTo(memoryStream);
memoryStream.Position = 0;
// Verify it's actually a tar file
if (TarArchive.IsTarFile(memoryStream))
{
memoryStream.Position = 0;
// Return a TarArchive from the decompressed memory stream
// The TarArchive will own the MemoryStream and dispose it when disposed
var options = new ReaderOptions
{
LeaveStreamOpen = false, // Ensure the MemoryStream is disposed with the archive
ArchiveEncoding = readerOptions?.ArchiveEncoding ?? new ArchiveEncoding()
};
return TarArchive.Open(memoryStream, options);
}
memoryStream.Dispose();
}
}
stream.Seek(startPosition, SeekOrigin.Begin);
}
// Fall back to normal tar archive opening
return TarArchive.Open(stream, readerOptions);
}
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
TarArchive.Open(stream, readerOptions);
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
readerOptions ??= new ReaderOptions();
public async Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => await TarArchive.OpenAsync(stream, readerOptions, cancellationToken).ConfigureAwait(false);
// Try to detect and handle compressed tar formats by file extension and content
using var fileStream = fileInfo.OpenRead();
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
TarArchive.Open(fileInfo, readerOptions);
// Try each compression option
foreach (var testOption in compressionOptions)
{
if (testOption.Type == CompressionType.None)
{
continue; // Skip uncompressed
}
// Check if file extension matches
var fileName = fileInfo.Name.ToLowerInvariant();
if (testOption.KnownExtensions.Any(ext => fileName.EndsWith(ext)))
{
fileStream.Position = 0;
// Verify it's the right compression format
if (testOption.CanHandle(fileStream))
{
fileStream.Position = 0;
// Decompress the entire file into a seekable MemoryStream
using var decompressedStream = testOption.CreateStream(fileStream);
var memoryStream = new MemoryStream();
decompressedStream.CopyTo(memoryStream);
memoryStream.Position = 0;
// Verify it's actually a tar file
if (TarArchive.IsTarFile(memoryStream))
{
memoryStream.Position = 0;
// Return a TarArchive from the decompressed memory stream
// The TarArchive will own the MemoryStream and dispose it when disposed
var options = new ReaderOptions
{
LeaveStreamOpen = false, // Ensure the MemoryStream is disposed with the archive
ArchiveEncoding = readerOptions?.ArchiveEncoding ?? new ArchiveEncoding()
};
return TarArchive.Open(memoryStream, options);
}
memoryStream.Dispose();
}
}
}
// fileStream will be closed by the using statement
// Fall back to normal tar archive opening
return TarArchive.Open(fileInfo, readerOptions);
}
/// <inheritdoc/>
public async Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => await TarArchive.OpenAsync(fileInfo, readerOptions, cancellationToken).ConfigureAwait(false);
#endregion
@@ -298,10 +101,24 @@ public class TarFactory
public IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null) =>
TarArchive.Open(streams, readerOptions);
/// <inheritdoc/>
public async Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => await TarArchive.OpenAsync(streams, readerOptions, cancellationToken).ConfigureAwait(false);
/// <inheritdoc/>
public IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null) =>
TarArchive.Open(fileInfos, readerOptions);
/// <inheritdoc/>
public async Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => await TarArchive.OpenAsync(fileInfos, readerOptions, cancellationToken).ConfigureAwait(false);
#endregion
#region IReaderFactory
@@ -463,6 +280,7 @@ public class TarFactory
#endregion
#region IWriteableArchiveFactory
/// <inheritdoc/>

View File

@@ -1,5 +1,8 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
@@ -79,6 +82,44 @@ public class ZipFactory
return false;
}
public override async Task<bool> IsArchiveAsync(
Stream stream,
CancellationToken cancellationToken,
string? password = null
)
{
var startPosition = stream.CanSeek ? stream.Position : -1;
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(
stream,
bufferSize: ReaderOptions.DefaultBufferSize
);
}
if (await ZipArchive.IsZipFileAsync(stream, password, cancellationToken).ConfigureAwait(false))
{
return true;
}
if (!stream.CanSeek)
{
return false;
}
stream.Position = startPosition;
if (await ZipArchive.IsZipMultiAsync(stream, password, cancellationToken).ConfigureAwait(false))
{
return true;
}
stream.Position = startPosition;
return false;
}
/// <inheritdoc/>
public override FileInfo? GetFilePart(int index, FileInfo part1) =>
ZipArchiveVolumeFactory.GetFilePart(index, part1);
@@ -91,10 +132,22 @@ public class ZipFactory
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
ZipArchive.Open(stream, readerOptions);
public Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
ZipArchive.Open(fileInfo, readerOptions);
public Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
#endregion
#region IMultiArchiveFactory
@@ -103,10 +156,22 @@ public class ZipFactory
public IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null) =>
ZipArchive.Open(streams, readerOptions);
public Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <inheritdoc/>
public IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null) =>
ZipArchive.Open(fileInfos, readerOptions);
public Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
#endregion
#region IReaderFactory

View File

@@ -0,0 +1,38 @@
#if !NETFRAMEWORK && !NETSTANDARD2_0
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Polyfills;
internal static class AsyncEnumerableExtensions
{
public static async Task<T?> FirstOrDefaultAsync<T>(
this IAsyncEnumerable<T> source,
CancellationToken cancellationToken = default
)
{
await foreach (var item in source.WithCancellation(cancellationToken))
{
return item;
}
return default;
}
public static async Task<T?> FirstOrDefaultAsync<T>(
this IAsyncEnumerable<T> source,
Func<T, bool> predicate,
CancellationToken cancellationToken = default
)
{
await foreach (var item in source.WithCancellation(cancellationToken))
{
if (predicate(item))
{
return item;
}
}
return default;
}
}
#endif

View File

@@ -1,63 +0,0 @@
using System;
using System.IO;
using SharpCompress.Archives;
using Xunit;
namespace SharpCompress.Test;
public class ArchiveFactoryCompressedTarTests : TestBase
{
[Fact]
public void ArchiveFactory_Open_TarBz2_ThrowsHelpfulException()
{
var testFile = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2");
var exception = Assert.Throws<InvalidOperationException>(() =>
{
using var archive = ArchiveFactory.Open(testFile);
});
Assert.Contains("tar.bz2", exception.Message);
Assert.Contains("ReaderFactory", exception.Message);
}
[Fact]
public void ArchiveFactory_Open_TarLz_ThrowsHelpfulException()
{
var testFile = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.lz");
var exception = Assert.Throws<InvalidOperationException>(() =>
{
using var archive = ArchiveFactory.Open(testFile);
});
Assert.Contains("tar.lz", exception.Message);
Assert.Contains("ReaderFactory", exception.Message);
}
[Fact]
public void ArchiveFactory_Open_TarBz2Stream_ThrowsHelpfulException()
{
var testFile = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2");
using var stream = File.OpenRead(testFile);
var exception = Assert.Throws<InvalidOperationException>(() =>
{
using var archive = ArchiveFactory.Open(stream);
});
Assert.Contains("tar.bz2", exception.Message);
Assert.Contains("ReaderFactory", exception.Message);
}
[Fact]
public void ArchiveFactory_Open_TarLzStream_ThrowsHelpfulException()
{
var testFile = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.lz");
using var stream = File.OpenRead(testFile);
var exception = Assert.Throws<InvalidOperationException>(() =>
{
using var archive = ArchiveFactory.Open(stream);
});
Assert.Contains("tar.lz", exception.Message);
Assert.Contains("ReaderFactory", exception.Message);
}
}

View File

@@ -633,13 +633,4 @@ public class RarArchiveTests : ArchiveTests
"Rar5.encrypted_filesOnly.rar",
"Failure jpg exe Empty тест.txt jpg\\test.jpg exe\\test.exe"
);
[Fact]
public void Rar_TestEncryptedDetection()
{
using var passwordProtectedFilesArchive = RarArchive.Open(
Path.Combine(TEST_ARCHIVES_PATH, "Rar.encrypted_filesOnly.rar")
);
Assert.True(passwordProtectedFilesArchive.IsEncrypted);
}
}

View File

@@ -224,15 +224,6 @@ public class SevenZipArchiveTests : ArchiveTests
);
}
[Fact]
public void SevenZipArchive_TestEncryptedDetection()
{
using var passwordProtectedFilesArchive = SevenZipArchive.Open(
Path.Combine(TEST_ARCHIVES_PATH, "7Zip.encryptedFiles.7z")
);
Assert.True(passwordProtectedFilesArchive.IsEncrypted);
}
[Fact]
public void SevenZipArchive_TestSolidDetection()
{