Compare commits

..

19 Commits

Author SHA1 Message Date
copilot-swe-agent[bot]
f59b14a278 WIP: Implement compressed tar detection in TarFactory (seeking clarification)
Enhanced TarFactory.IsArchive() to detect compressed tar formats (tar.bz2, tar.lz, etc.) by:
- Checking if stream contains tar header directly (for uncompressed)
- Testing each compression format to see if it contains a tar file

Enhanced TarFactory.Open() methods to decompress compressed tar files to MemoryStream for seekable access.

Current blockers:
- Some compression stream constructors (LZipStream) don't support leaveOpen parameter
- Stream disposal during detection phase causes issues with factory detection flow

Awaiting clarification from @adamhathcock on preferred architecture approach.

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-09 16:39:41 +00:00
copilot-swe-agent[bot]
3870cc8d34 Improve technical accuracy of error messages
Clarify that the Archive API requires seekable streams, but decompression streams are not seekable, rather than stating that decompression requires random access.

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-09 08:41:37 +00:00
copilot-swe-agent[bot]
242e442a8c Add helpful error messages for compressed tar archives in ArchiveFactory
Detect when users try to open tar.bz2, tar.lz, and other compressed tar formats with ArchiveFactory.Open() and provide clear guidance to use ReaderFactory.Open() instead. These formats require forward-only reading due to decompression stream limitations and cannot be used with the random-access Archive API.

Includes tests to verify the helpful error messages are shown.

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-09 08:36:50 +00:00
copilot-swe-agent[bot]
d95d1e928b Initial plan 2025-12-09 08:20:53 +00:00
Adam Hathcock
2a3086a0d7 Merge pull request #1060 from HeroponRikiBestest/rar-7z-password 2025-12-03 19:58:47 +00:00
HeroponRikIBestest
41c3cc1a18 Csharpier 2025-12-03 12:05:16 -05:00
HeroponRikIBestest
1b1df86a11 Improve logic 2025-12-02 10:02:49 -05:00
HeroponRikIBestest
e0660e7775 Add tests 2025-12-02 09:55:24 -05:00
HeroponRikIBestest
99a6c4de88 Add archive-level IsEncrypted flag 2025-12-02 09:47:06 -05:00
Adam Hathcock
ffa765bd97 Merge pull request #1057 from adamhathcock/adam/add-copilot-instructions 2025-11-30 13:48:55 +00:00
Adam Hathcock
b1696524b3 Merge pull request #1058 from adamhathcock/copilot/sub-pr-1057 2025-11-30 13:48:31 +00:00
copilot-swe-agent[bot]
6a37c55085 Consolidate agent instructions into AGENTS.md
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-30 12:52:22 +00:00
copilot-swe-agent[bot]
9c1c6fff9f Initial plan 2025-11-30 12:49:36 +00:00
Adam Hathcock
db8c6f4bcb first pass of instructions...consolidate? 2025-11-30 12:47:57 +00:00
Adam Hathcock
ff17ecda7d Merge pull request #1055 from adamhathcock/adam/vscode-fixes
add vscode config
2025-11-30 12:47:16 +00:00
Adam Hathcock
692058677c Merge pull request #1056 from adamhathcock/copilot/sub-pr-1055
Fix launch.json debug configurations to use net10.0
2025-11-30 12:41:38 +00:00
copilot-swe-agent[bot]
1e90d69912 Update launch.json to use net10.0 instead of net8.0
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-30 12:22:31 +00:00
copilot-swe-agent[bot]
64a1cc68e1 Initial plan 2025-11-30 12:20:37 +00:00
Adam Hathcock
20353f35ff add vscode config 2025-11-30 12:05:08 +00:00
37 changed files with 809 additions and 2359 deletions

1
.gitignore vendored
View File

@@ -15,7 +15,6 @@ tests/TestArchives/*/Scratch
tests/TestArchives/*/Scratch2
.vs
tools
.vscode
.idea/
.DS_Store

9
.vscode/extensions.json vendored Normal file
View File

@@ -0,0 +1,9 @@
{
"recommendations": [
"ms-dotnettools.csdevkit",
"ms-dotnettools.csharp",
"ms-dotnettools.vscode-dotnet-runtime",
"csharpier.csharpier-vscode",
"formulahendry.dotnet-test-explorer"
]
}

97
.vscode/launch.json vendored Normal file
View File

@@ -0,0 +1,97 @@
{
"version": "0.2.0",
"configurations": [
{
"name": "Debug Tests (net10.0)",
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build",
"program": "dotnet",
"args": [
"test",
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
"-f",
"net10.0",
"--no-build",
"--verbosity=normal"
],
"cwd": "${workspaceFolder}",
"console": "internalConsole",
"stopAtEntry": false
},
{
"name": "Debug Specific Test (net10.0)",
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build",
"program": "dotnet",
"args": [
"test",
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
"-f",
"net10.0",
"--no-build",
"--filter",
"FullyQualifiedName~${input:testName}"
],
"cwd": "${workspaceFolder}",
"console": "internalConsole",
"stopAtEntry": false
},
{
"name": "Debug Performance Tests",
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build",
"program": "dotnet",
"args": [
"run",
"--project",
"${workspaceFolder}/tests/SharpCompress.Performance/SharpCompress.Performance.csproj",
"--no-build"
],
"cwd": "${workspaceFolder}",
"console": "internalConsole",
"stopAtEntry": false
},
{
"name": "Debug Build Script",
"type": "coreclr",
"request": "launch",
"program": "dotnet",
"args": [
"run",
"--project",
"${workspaceFolder}/build/build.csproj",
"--",
"${input:buildTarget}"
],
"cwd": "${workspaceFolder}",
"console": "internalConsole",
"stopAtEntry": false
}
],
"inputs": [
{
"id": "testName",
"type": "promptString",
"description": "Enter test name or pattern (e.g., TestMethodName or ClassName)",
"default": ""
},
{
"id": "buildTarget",
"type": "pickString",
"description": "Select build target",
"options": [
"clean",
"restore",
"build",
"test",
"format",
"publish",
"default"
],
"default": "build"
}
]
}

29
.vscode/settings.json vendored Normal file
View File

@@ -0,0 +1,29 @@
{
"dotnet.defaultSolution": "SharpCompress.sln",
"files.exclude": {
"**/bin": true,
"**/obj": true
},
"files.watcherExclude": {
"**/bin/**": true,
"**/obj/**": true,
"**/artifacts/**": true
},
"search.exclude": {
"**/bin": true,
"**/obj": true,
"**/artifacts": true
},
"editor.formatOnSave": false,
"[csharp]": {
"editor.defaultFormatter": "csharpier.csharpier-vscode",
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.fixAll": "explicit"
}
},
"csharpier.enableDebugLogs": false,
"omnisharp.enableRoslynAnalyzers": true,
"omnisharp.enableEditorConfigSupport": true,
"dotnet-test-explorer.testProjectPath": "tests/**/*.csproj"
}

178
.vscode/tasks.json vendored Normal file
View File

@@ -0,0 +1,178 @@
{
"version": "2.0.0",
"tasks": [
{
"label": "build",
"command": "dotnet",
"type": "process",
"args": [
"build",
"${workspaceFolder}/SharpCompress.sln",
"/property:GenerateFullPaths=true",
"/consoleloggerparameters:NoSummary;ForceNoAlign"
],
"problemMatcher": "$msCompile",
"group": {
"kind": "build",
"isDefault": true
}
},
{
"label": "build-release",
"command": "dotnet",
"type": "process",
"args": [
"build",
"${workspaceFolder}/SharpCompress.sln",
"-c",
"Release",
"/property:GenerateFullPaths=true",
"/consoleloggerparameters:NoSummary;ForceNoAlign"
],
"problemMatcher": "$msCompile",
"group": "build"
},
{
"label": "build-library",
"command": "dotnet",
"type": "process",
"args": [
"build",
"${workspaceFolder}/src/SharpCompress/SharpCompress.csproj",
"/property:GenerateFullPaths=true",
"/consoleloggerparameters:NoSummary;ForceNoAlign"
],
"problemMatcher": "$msCompile",
"group": "build"
},
{
"label": "restore",
"command": "dotnet",
"type": "process",
"args": [
"restore",
"${workspaceFolder}/SharpCompress.sln"
],
"problemMatcher": "$msCompile"
},
{
"label": "clean",
"command": "dotnet",
"type": "process",
"args": [
"clean",
"${workspaceFolder}/SharpCompress.sln"
],
"problemMatcher": "$msCompile"
},
{
"label": "test",
"command": "dotnet",
"type": "process",
"args": [
"test",
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
"--no-build",
"--verbosity=normal"
],
"problemMatcher": "$msCompile",
"group": {
"kind": "test",
"isDefault": true
},
"dependsOn": "build"
},
{
"label": "test-net10",
"command": "dotnet",
"type": "process",
"args": [
"test",
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
"-f",
"net10.0",
"--no-build",
"--verbosity=normal"
],
"problemMatcher": "$msCompile",
"group": "test",
"dependsOn": "build"
},
{
"label": "test-net48",
"command": "dotnet",
"type": "process",
"args": [
"test",
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
"-f",
"net48",
"--no-build",
"--verbosity=normal"
],
"problemMatcher": "$msCompile",
"group": "test",
"dependsOn": "build"
},
{
"label": "format",
"command": "dotnet",
"type": "process",
"args": [
"csharpier",
"."
],
"problemMatcher": []
},
{
"label": "format-check",
"command": "dotnet",
"type": "process",
"args": [
"csharpier",
"check",
"."
],
"problemMatcher": []
},
{
"label": "run-build-script",
"command": "dotnet",
"type": "process",
"args": [
"run",
"--project",
"${workspaceFolder}/build/build.csproj"
],
"problemMatcher": "$msCompile"
},
{
"label": "pack",
"command": "dotnet",
"type": "process",
"args": [
"pack",
"${workspaceFolder}/src/SharpCompress/SharpCompress.csproj",
"-c",
"Release",
"-o",
"${workspaceFolder}/artifacts/"
],
"problemMatcher": "$msCompile",
"dependsOn": "build-release"
},
{
"label": "performance-tests",
"command": "dotnet",
"type": "process",
"args": [
"run",
"--project",
"${workspaceFolder}/tests/SharpCompress.Performance/SharpCompress.Performance.csproj",
"-c",
"Release"
],
"problemMatcher": "$msCompile"
}
]
}

View File

@@ -49,6 +49,30 @@ SharpCompress is a pure C# compression library supporting multiple archive forma
- Use `dotnet test` to run tests
- Solution file: `SharpCompress.sln`
### Directory Structure
```
src/SharpCompress/
├── Archives/ # IArchive implementations (Zip, Tar, Rar, 7Zip, GZip)
├── Readers/ # IReader implementations (forward-only)
├── Writers/ # IWriter implementations (forward-only)
├── Compressors/ # Low-level compression streams (BZip2, Deflate, LZMA, etc.)
├── Factories/ # Format detection and factory pattern
├── Common/ # Shared types (ArchiveType, Entry, Options)
├── Crypto/ # Encryption implementations
└── IO/ # Stream utilities and wrappers
tests/SharpCompress.Test/
├── Zip/, Tar/, Rar/, SevenZip/, GZip/, BZip2/ # Format-specific tests
├── TestBase.cs # Base test class with helper methods
└── TestArchives/ # Test data (not checked into main test project)
```
### Factory Pattern
All format types implement factory interfaces (`IArchiveFactory`, `IReaderFactory`, `IWriterFactory`) for auto-detection:
- `ReaderFactory.Open()` - Auto-detects format by probing stream
- `WriterFactory.Open()` - Creates writer for specified `ArchiveType`
- Factories located in: `src/SharpCompress/Factories/`
## Nullable Reference Types
- Declare variables non-nullable, and check for `null` at entry points.
@@ -116,3 +140,18 @@ SharpCompress supports multiple archive and compression formats:
- Use test archives from `tests/TestArchives` directory for consistency.
- Test stream disposal and `LeaveStreamOpen` behavior.
- Test edge cases: empty archives, large files, corrupted archives, encrypted archives.
### Test Organization
- Base class: `TestBase` - Provides `TEST_ARCHIVES_PATH`, `SCRATCH_FILES_PATH`, temp directory management
- Framework: xUnit with AwesomeAssertions
- Test archives: `tests/TestArchives/` - Use existing archives, don't create new ones unnecessarily
- Match naming style of nearby test files
## Common Pitfalls
1. **Don't mix Archive and Reader APIs** - Archive needs seekable stream, Reader doesn't
2. **Solid archives (Rar, 7Zip)** - Use `ExtractAllEntries()` for best performance, not individual entry extraction
3. **Stream disposal** - Always set `LeaveStreamOpen` explicitly when needed (default is to close)
4. **Tar + non-seekable stream** - Must provide file size or it will throw
5. **Multi-framework differences** - Some features differ between .NET Framework and modern .NET (e.g., Mono.Posix)
6. **Format detection** - Use `ReaderFactory.Open()` for auto-detection, test with actual archive files

View File

@@ -1,10 +1,7 @@
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -15,8 +12,10 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
where TEntry : IArchiveEntry
where TVolume : IVolume
{
private readonly LazyReadOnlyCollection<TVolume> _lazyVolumes;
private readonly LazyReadOnlyCollection<TEntry> _lazyEntries;
private bool _disposed;
protected SourceStream? SourceStream { get; internal set; }
private readonly SourceStream? _sourceStream;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionBegin;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionEnd;
@@ -25,24 +24,21 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
public event EventHandler<FilePartExtractionBeginEventArgs>? FilePartExtractionBegin;
protected ReaderOptions ReaderOptions { get; }
protected Lazy<IReadOnlyCollection<TVolume>> LazyVolumes { get; internal set; }
protected Lazy<IReadOnlyCollection<TEntry>> LazyEntries { get; internal set; }
internal AbstractArchive(ArchiveType type, SourceStream sourceStream)
{
Type = type;
ReaderOptions = sourceStream.ReaderOptions;
SourceStream = sourceStream;
LazyVolumes = new Lazy<IReadOnlyCollection<TVolume>>(() => LoadVolumes(SourceStream).ToList());
LazyEntries = new Lazy<IReadOnlyCollection<TEntry>>(() => LoadEntries(Volumes).ToList());
_sourceStream = sourceStream;
_lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(_sourceStream));
_lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
}
internal AbstractArchive(ArchiveType type)
{
Type = type;
ReaderOptions = new();
LazyVolumes = new Lazy<IReadOnlyCollection<TVolume>>(() => Enumerable.Empty<TVolume>().ToList());
LazyEntries = new Lazy<IReadOnlyCollection<TEntry>>(() => Enumerable.Empty<TEntry>().ToList());
_lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
_lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
}
public ArchiveType Type { get; }
@@ -65,12 +61,12 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveEntries across the one or many parts of the RarArchive.
/// </summary>
public virtual ICollection<TEntry> Entries => LazyEntries.Value;
public virtual ICollection<TEntry> Entries => _lazyEntries;
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveVolumes across the one or many parts of the RarArchive.
/// </summary>
public ICollection<TVolume> Volumes => LazyVolumes.Value;
public ICollection<TVolume> Volumes => _lazyVolumes;
/// <summary>
/// The total size of the files compressed in the archive.
@@ -86,26 +82,18 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
protected abstract IEnumerable<TVolume> LoadVolumes(SourceStream sourceStream);
protected abstract IEnumerable<TEntry> LoadEntries(IEnumerable<TVolume> volumes);
protected virtual Task<IReadOnlyCollection<TEntry>> LoadEntriesAsync(IEnumerable<TVolume> volumes, CancellationToken cancellationToken)
{
return Task.FromResult<IReadOnlyCollection<TEntry>>(LoadEntries(volumes).ToList());
}
protected virtual Task<IReadOnlyCollection<TVolume>> LoadVolumesAsync(SourceStream sourceStream, CancellationToken cancellationToken)
{
return Task.FromResult<IReadOnlyCollection<TVolume>>(LoadVolumes(sourceStream).ToList());
}
IEnumerable<IArchiveEntry> IArchive.Entries => Entries.Cast<IArchiveEntry>();
IEnumerable<IVolume> IArchive.Volumes => LazyVolumes.Value.Cast<IVolume>();
IEnumerable<IVolume> IArchive.Volumes => _lazyVolumes.Cast<IVolume>();
public virtual void Dispose()
{
if (!_disposed)
{
LazyVolumes.Value.ForEach(v => v.Dispose());
LazyEntries.Value.Cast<Entry>().ForEach(x => x.Close());
SourceStream?.Dispose();
_lazyVolumes.ForEach(v => v.Dispose());
_lazyEntries.GetLoaded().Cast<Entry>().ForEach(x => x.Close());
_sourceStream?.Dispose();
_disposed = true;
}
@@ -113,8 +101,8 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
void IArchiveExtractionListener.EnsureEntriesLoaded()
{
LazyEntries.Value.EnsureFullyLoaded();
LazyVolumes.Value.EnsureFullyLoaded();
_lazyEntries.EnsureFullyLoaded();
_lazyVolumes.EnsureFullyLoaded();
}
void IExtractionListener.FireCompressedBytesRead(
@@ -173,6 +161,11 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
/// </summary>
public virtual bool IsSolid => false;
/// <summary>
/// Archive is ENCRYPTED (this means the Archive has password-protected files).
/// </summary>
public virtual bool IsEncrypted => false;
/// <summary>
/// The archive can find all the parts of the archive needed to fully extract the archive. This forces the parsing of the entire archive.
/// </summary>

View File

@@ -2,9 +2,11 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.BZip2;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Factories;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -26,25 +28,6 @@ public static class ArchiveFactory
return FindFactory<IArchiveFactory>(stream).Open(stream, readerOptions);
}
/// <summary>
/// Opens an Archive for random access
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
/// <returns></returns>
public static async Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
readerOptions ??= new ReaderOptions();
stream = SharpCompressStream.Create(stream, bufferSize: readerOptions.BufferSize);
var factory = await FindFactoryAsync<IArchiveFactory>(stream, cancellationToken);
return await factory.OpenAsync(stream, readerOptions, cancellationToken);
}
public static IWritableArchive Create(ArchiveType type)
{
var factory = Factory
@@ -70,22 +53,6 @@ public static class ArchiveFactory
return Open(new FileInfo(filePath), options);
}
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static Task<IArchive> OpenAsync(
string filePath,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
filePath.NotNullOrEmpty(nameof(filePath));
return OpenAsync(new FileInfo(filePath), options, cancellationToken);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
@@ -98,24 +65,6 @@ public static class ArchiveFactory
return FindFactory<IArchiveFactory>(fileInfo).Open(fileInfo, options);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
options ??= new ReaderOptions { LeaveStreamOpen = false };
var factory = await FindFactoryAsync<IArchiveFactory>(fileInfo, cancellationToken);
return await factory.OpenAsync(fileInfo, options, cancellationToken);
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
@@ -142,38 +91,6 @@ public static class ArchiveFactory
return FindFactory<IMultiArchiveFactory>(fileInfo).Open(filesArray, options);
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
fileInfos.NotNull(nameof(fileInfos));
var filesArray = fileInfos.ToArray();
if (filesArray.Length == 0)
{
throw new InvalidOperationException("No files to open");
}
var fileInfo = filesArray[0];
if (filesArray.Length == 1)
{
return await OpenAsync(fileInfo, options, cancellationToken);
}
fileInfo.NotNull(nameof(fileInfo));
options ??= new ReaderOptions { LeaveStreamOpen = false };
var factory = await FindFactoryAsync<IMultiArchiveFactory>(fileInfo, cancellationToken);
return await factory.OpenAsync(filesArray, options, cancellationToken);
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
@@ -200,38 +117,6 @@ public static class ArchiveFactory
return FindFactory<IMultiArchiveFactory>(firstStream).Open(streamsArray, options);
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
IEnumerable<Stream> streams,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
streams.NotNull(nameof(streams));
var streamsArray = streams.ToArray();
if (streamsArray.Length == 0)
{
throw new InvalidOperationException("No streams");
}
var firstStream = streamsArray[0];
if (streamsArray.Length == 1)
{
return await OpenAsync(firstStream, options, cancellationToken);
}
firstStream.NotNull(nameof(firstStream));
options ??= new ReaderOptions();
var factory = await FindFactoryAsync<IMultiArchiveFactory>(firstStream, cancellationToken);
return await factory.OpenAsync(streamsArray, options, cancellationToken);
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
@@ -245,40 +130,15 @@ public static class ArchiveFactory
archive.WriteToDirectory(destinationDirectory, options);
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static async Task WriteToDirectoryAsync(
string sourceArchive,
string destinationDirectory,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default
)
{
using var archive = await OpenAsync(sourceArchive, cancellationToken: cancellationToken);
await archive.WriteToDirectoryAsync(destinationDirectory, options, cancellationToken);
}
private static T FindFactory<T>(FileInfo finfo)
where T : IFactory
{
finfo.NotNull(nameof(finfo));
using Stream stream = finfo.OpenRead();
return FindFactory<T>(stream);
return FindFactory<T>(stream, finfo.Name);
}
private static async Task<T> FindFactoryAsync<T>(
FileInfo finfo,
CancellationToken cancellationToken
)
where T : IFactory
{
finfo.NotNull(nameof(finfo));
using Stream stream = finfo.OpenRead();
return await FindFactoryAsync<T>(stream, cancellationToken);
}
private static T FindFactory<T>(Stream stream)
private static T FindFactory<T>(Stream stream, string? fileName = null)
where T : IFactory
{
stream.NotNull(nameof(stream));
@@ -303,36 +163,14 @@ public static class ArchiveFactory
}
}
var extensions = string.Join(", ", factories.Select(item => item.Name));
stream.Seek(startPosition, SeekOrigin.Begin);
throw new InvalidOperationException(
$"Cannot determine compressed stream type. Supported Archive Formats: {extensions}"
);
}
private static async Task<T> FindFactoryAsync<T>(Stream stream, CancellationToken cancellationToken)
where T : IFactory
{
stream.NotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
// Check if this is a compressed tar file (tar.bz2, tar.lz, etc.)
// These formats are supported by ReaderFactory but not by ArchiveFactory
var compressedTarMessage = TryGetCompressedTarMessage(stream, fileName);
if (compressedTarMessage != null)
{
throw new ArgumentException("Stream should be readable and seekable");
}
var factories = Factory.Factories.OfType<T>();
var startPosition = stream.Position;
foreach (var factory in factories)
{
stream.Seek(startPosition, SeekOrigin.Begin);
if (await factory.IsArchiveAsync(stream, cancellationToken).ConfigureAwait(false))
{
stream.Seek(startPosition, SeekOrigin.Begin);
return factory;
}
throw new InvalidOperationException(compressedTarMessage);
}
var extensions = string.Join(", ", factories.Select(item => item.Name));
@@ -353,17 +191,6 @@ public static class ArchiveFactory
return IsArchive(s, out type, bufferSize);
}
public static async Task<(bool, ArchiveType?)> IsArchiveAsync(
string filePath,
CancellationToken cancellationToken = default,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
filePath.NotNullOrEmpty(nameof(filePath));
using Stream s = File.OpenRead(filePath);
return await IsArchiveAsync(s, cancellationToken, bufferSize);
}
public static bool IsArchive(
Stream stream,
out ArchiveType? type,
@@ -395,36 +222,6 @@ public static class ArchiveFactory
return false;
}
public static async Task<(bool, ArchiveType?)> IsArchiveAsync(
Stream stream,
CancellationToken cancellationToken = default,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
stream.NotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
{
throw new ArgumentException("Stream should be readable and seekable");
}
var startPosition = stream.Position;
foreach (var factory in Factory.Factories)
{
var isArchive = await factory.IsArchiveAsync(stream, cancellationToken)
.ConfigureAwait(false);
stream.Position = startPosition;
if (isArchive)
{
return (true, factory.KnownArchiveType);
}
}
return (false, null);
}
/// <summary>
/// From a passed in archive (zip, rar, 7z, 001), return all parts.
/// </summary>
@@ -465,4 +262,111 @@ public static class ArchiveFactory
}
public static IArchiveFactory AutoFactory { get; } = new AutoArchiveFactory();
/// <summary>
/// Checks if the stream is a compressed tar file (tar.bz2, tar.lz, etc.) that should use ReaderFactory instead.
/// Returns an error message if detected, null otherwise.
/// </summary>
private static string? TryGetCompressedTarMessage(Stream stream, string? fileName)
{
var startPosition = stream.Position;
try
{
// Check if it's a BZip2 file
if (BZip2Stream.IsBZip2(stream))
{
stream.Seek(startPosition, SeekOrigin.Begin);
// Try to decompress and check if it contains a tar archive
using var decompressed = new BZip2Stream(stream, CompressionMode.Decompress, true);
if (IsTarStream(decompressed))
{
return "This appears to be a tar.bz2 archive. The Archive API requires seekable streams, but decompression streams are not seekable. "
+ "Please use ReaderFactory.Open() instead for forward-only extraction, "
+ "or decompress the file first and then open the resulting tar file with ArchiveFactory.Open().";
}
return null;
}
stream.Seek(startPosition, SeekOrigin.Begin);
// Check if it's an LZip file
if (LZipStream.IsLZipFile(stream))
{
stream.Seek(startPosition, SeekOrigin.Begin);
// Try to decompress and check if it contains a tar archive
using var decompressed = new LZipStream(stream, CompressionMode.Decompress);
if (IsTarStream(decompressed))
{
return "This appears to be a tar.lz archive. The Archive API requires seekable streams, but decompression streams are not seekable. "
+ "Please use ReaderFactory.Open() instead for forward-only extraction, "
+ "or decompress the file first and then open the resulting tar file with ArchiveFactory.Open().";
}
return null;
}
// Check file extension as a fallback for other compressed tar formats
if (fileName != null)
{
var lowerFileName = fileName.ToLowerInvariant();
if (
lowerFileName.EndsWith(".tar.bz2")
|| lowerFileName.EndsWith(".tbz")
|| lowerFileName.EndsWith(".tbz2")
|| lowerFileName.EndsWith(".tb2")
|| lowerFileName.EndsWith(".tz2")
|| lowerFileName.EndsWith(".tar.lz")
|| lowerFileName.EndsWith(".tar.xz")
|| lowerFileName.EndsWith(".txz")
|| lowerFileName.EndsWith(".tar.zst")
|| lowerFileName.EndsWith(".tar.zstd")
|| lowerFileName.EndsWith(".tzst")
|| lowerFileName.EndsWith(".tzstd")
|| lowerFileName.EndsWith(".tar.z")
|| lowerFileName.EndsWith(".tz")
|| lowerFileName.EndsWith(".taz")
)
{
return $"The file '{fileName}' appears to be a compressed tar archive. The Archive API requires seekable streams, but decompression streams are not seekable. "
+ "Please use ReaderFactory.Open() instead for forward-only extraction, "
+ "or decompress the file first and then open the resulting tar file with ArchiveFactory.Open().";
}
}
return null;
}
catch
{
// If we can't determine, just return null and let the normal error handling proceed
return null;
}
finally
{
try
{
stream.Seek(startPosition, SeekOrigin.Begin);
}
catch
{
// Ignore seek failures
}
}
}
/// <summary>
/// Checks if a stream contains a tar archive by trying to read a tar header.
/// </summary>
private static bool IsTarStream(Stream stream)
{
try
{
var tarHeader = new TarHeader(new ArchiveEncoding());
return tarHeader.Read(new BinaryReader(stream));
}
catch
{
return false;
}
}
}

View File

@@ -2,13 +2,11 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.GZip;
using SharpCompress.IO;
using SharpCompress.Polyfills;
using SharpCompress.Readers;
using SharpCompress.Readers.GZip;
using SharpCompress.Writers;
@@ -29,22 +27,6 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<GZipArchive> OpenAsync(
string filePath,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
filePath.NotNullOrEmpty(nameof(filePath));
return await OpenAsync(new FileInfo(filePath), readerOptions ?? new ReaderOptions(), cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
@@ -62,32 +44,6 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<GZipArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
fileInfo.NotNull(nameof(fileInfo));
var archive = new GZipArchive();
archive.SourceStream = new SourceStream(
fileInfo,
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
);
archive.LazyVolumes = new Lazy<IReadOnlyCollection<GZipVolume>>(() => archive.LoadVolumes(archive.SourceStream).ToList());
archive.LazyEntries = new Lazy<IReadOnlyCollection<GZipArchiveEntry>>(
() => archive.LoadEntriesAsync(archive.Volumes, cancellationToken).GetAwaiter().GetResult().ToList()
);
return archive;
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
@@ -109,33 +65,6 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
);
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<GZipArchive> OpenAsync(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
var archive = new GZipArchive();
archive.SourceStream = new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
);
archive.LazyVolumes = new Lazy<IReadOnlyCollection<GZipVolume>>(() => archive.LoadVolumes(archive.SourceStream).ToList());
archive.LazyEntries = new Lazy<IReadOnlyCollection<GZipArchiveEntry>>(
() => archive.LoadEntriesAsync(archive.Volumes, cancellationToken).GetAwaiter().GetResult().ToList()
);
return archive;
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
@@ -154,33 +83,6 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
);
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<GZipArchive> OpenAsync(
IEnumerable<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
var archive = new GZipArchive();
archive.SourceStream = new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
);
archive.LazyVolumes = new Lazy<IReadOnlyCollection<GZipVolume>>(() => archive.LoadVolumes(archive.SourceStream).ToList());
archive.LazyEntries = new Lazy<IReadOnlyCollection<GZipArchiveEntry>>(
() => archive.LoadEntriesAsync(archive.Volumes, cancellationToken).GetAwaiter().GetResult().ToList()
);
return archive;
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
@@ -200,62 +102,21 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
);
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<GZipArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
var archive = new GZipArchive();
archive.SourceStream = new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions());
archive.LazyVolumes = new Lazy<IReadOnlyCollection<GZipVolume>>(() => archive.LoadVolumes(archive.SourceStream).ToList());
archive.LazyEntries = new Lazy<IReadOnlyCollection<GZipArchiveEntry>>(
() => archive.LoadEntriesAsync(archive.Volumes, cancellationToken).GetAwaiter().GetResult().ToList()
);
return archive;
}
public static GZipArchive Create() => new();
/// <summary>
* Constructor with a SourceStream able to handle FileInfo and Streams.
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="sourceStream"></param>
private GZipArchive(SourceStream sourceStream)
: base(ArchiveType.GZip, sourceStream) { }
private GZipArchive()
: base(ArchiveType.GZip)
{
LazyVolumes = new Lazy<IReadOnlyCollection<GZipVolume>>(() => LoadVolumes(SourceStream!).ToList());
LazyEntries = new Lazy<IReadOnlyCollection<GZipArchiveEntry>>(() => LoadEntries(Volumes).ToList());
}
protected override IEnumerable<GZipVolume> LoadVolumes(SourceStream sourceStream)
{
sourceStream.LoadAllParts();
return sourceStream.Streams.Select(a => new GZipVolume(a, ReaderOptions, 0));
}
protected override async Task<IReadOnlyCollection<GZipVolume>> LoadVolumesAsync(SourceStream sourceStream, CancellationToken cancellationToken)
{
await sourceStream.LoadAllPartsAsync(cancellationToken).ConfigureAwait(false);
return sourceStream.Streams.Select(a => new GZipVolume(a, ReaderOptions, 0)).ToList();
}
public static bool IsGZipFile(string filePath) => IsGZipFile(new FileInfo(filePath));
public static bool IsGZipFile(FileInfo fileInfo)
@@ -269,25 +130,6 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
return IsGZipFile(stream);
}
public static Task<bool> IsGZipFileAsync(
string filePath,
CancellationToken cancellationToken = default
) => IsGZipFileAsync(new FileInfo(filePath), cancellationToken);
public static async Task<bool> IsGZipFileAsync(
FileInfo fileInfo,
CancellationToken cancellationToken = default
)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return await IsGZipFileAsync(stream, cancellationToken).ConfigureAwait(false);
}
public void SaveTo(string filePath) => SaveTo(new FileInfo(filePath));
public void SaveTo(FileInfo fileInfo)
@@ -325,28 +167,6 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
return true;
}
public static async Task<bool> IsGZipFileAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
// read the header on the first read
byte[] header = new byte[10];
// workitem 8501: handle edge case (decompress empty stream)
if (!await stream.ReadFullyAsync(header, cancellationToken).ConfigureAwait(false))
{
return false;
}
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
return false;
}
return true;
}
internal GZipArchive()
: base(ArchiveType.GZip) { }
@@ -424,17 +244,6 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
);
}
protected override async Task<IReadOnlyCollection<GZipArchiveEntry>> LoadEntriesAsync(IEnumerable<GZipVolume> volumes, CancellationToken cancellationToken)
{
var list = new List<GZipArchiveEntry>();
var stream = volumes.Single().Stream;
list.Add(new GZipArchiveEntry(
this,
new GZipFilePart(stream, ReaderOptions.ArchiveEncoding)
));
return list.AsReadOnly();
}
protected override IReader CreateReaderForSolidExtraction()
{
var stream = Volumes.Single().Stream;
@@ -442,4 +251,3 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
return GZipReader.Open(stream);
}
}

View File

@@ -1,6 +1,4 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Factories;
using SharpCompress.Readers;
@@ -28,34 +26,10 @@ public interface IArchiveFactory : IFactory
/// <param name="readerOptions">reading options.</param>
IArchive Open(Stream stream, ReaderOptions? readerOptions = null);
/// <summary>
/// Opens an Archive for random access.
/// </summary>
/// <param name="stream">An open, readable and seekable stream.</param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">the cancellation token.</param>
Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo">the file to open.</param>
/// <param name="readerOptions">reading options.</param>
IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null);
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo">the file to open.</param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">the cancellation token.</param>
Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
}

View File

@@ -1,7 +1,5 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Factories;
using SharpCompress.Readers;
@@ -29,34 +27,10 @@ public interface IMultiArchiveFactory : IFactory
/// <param name="readerOptions">reading options.</param>
IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null);
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">the cancellation token.</param>
Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
/// <summary>
/// Constructor with IEnumerable Stream objects, multi and split support.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions">reading options.</param>
IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null);
/// <summary>
/// Constructor with IEnumerable Stream objects, multi and split support.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">the cancellation token.</param>
Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
}

View File

@@ -2,8 +2,6 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
@@ -86,6 +84,8 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
public override bool IsSolid => Volumes.First().IsSolidArchive;
public override bool IsEncrypted => Entries.First(x => !x.IsDirectory).IsEncrypted;
public virtual int MinVersion => Volumes.First().MinVersion;
public virtual int MaxVersion => Volumes.First().MaxVersion;
@@ -193,24 +193,6 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
return IsRarFile(stream);
}
public static Task<bool> IsRarFileAsync(
string filePath,
CancellationToken cancellationToken = default
) => IsRarFileAsync(new FileInfo(filePath), cancellationToken);
public static async Task<bool> IsRarFileAsync(
FileInfo fileInfo,
CancellationToken cancellationToken = default
)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return await IsRarFileAsync(stream, cancellationToken).ConfigureAwait(false);
}
public static bool IsRarFile(Stream stream, ReaderOptions? options = null)
{
try
@@ -224,23 +206,5 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
}
}
public static async Task<bool> IsRarFileAsync(
Stream stream,
CancellationToken cancellationToken = default,
ReaderOptions? options = null
)
{
try
{
await MarkHeader.ReadAsync(stream, true, false, cancellationToken)
.ConfigureAwait(false);
return true;
}
catch
{
return false;
}
}
#endregion
}

View File

@@ -2,14 +2,10 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.SevenZip;
using SharpCompress.Compressors.LZMA.Utilites;
using SharpCompress.IO;
using SharpCompress.Polyfills;
using SharpCompress.Readers;
namespace SharpCompress.Archives.SevenZip;
@@ -29,18 +25,6 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static Task<SevenZipArchive> OpenAsync(
string filePath,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
@@ -58,18 +42,6 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static Task<SevenZipArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
@@ -91,18 +63,6 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
);
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static Task<SevenZipArchive> OpenAsync(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
@@ -124,18 +84,6 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
);
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static Task<SevenZipArchive> OpenAsync(
IEnumerable<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
@@ -155,18 +103,6 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
);
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static Task<SevenZipArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
@@ -182,11 +118,6 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
public static bool IsSevenZipFile(string filePath) => IsSevenZipFile(new FileInfo(filePath));
public static Task<bool> IsSevenZipFileAsync(
string filePath,
CancellationToken cancellationToken = default
) => IsSevenZipFileAsync(new FileInfo(filePath), cancellationToken);
public static bool IsSevenZipFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
@@ -197,19 +128,6 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
return IsSevenZipFile(stream);
}
public static async Task<bool> IsSevenZipFileAsync(
FileInfo fileInfo,
CancellationToken cancellationToken = default
)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return await IsSevenZipFileAsync(stream, cancellationToken).ConfigureAwait(false);
}
internal SevenZipArchive()
: base(ArchiveType.SevenZip) { }
@@ -245,8 +163,6 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
return entries;
}
protected override Task<IReadOnlyCollection<SevenZipArchiveEntry>> LoadEntriesAsync(IEnumerable<SevenZipVolume> volumes, CancellationToken cancellationToken) => throw new NotImplementedException();
private void LoadFactory(Stream stream)
{
if (_database is null)
@@ -258,8 +174,6 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
}
}
protected override Task<IReadOnlyCollection<SevenZipVolume>> LoadVolumesAsync(SourceStream sourceStream, CancellationToken cancellationToken) => throw new NotImplementedException();
public static bool IsSevenZipFile(Stream stream)
{
try
@@ -272,21 +186,6 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
}
}
public static async Task<bool> IsSevenZipFileAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
try
{
return await SignatureMatchAsync(stream, cancellationToken).ConfigureAwait(false);
}
catch
{
return false;
}
}
private static ReadOnlySpan<byte> Signature =>
new byte[] { (byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C };
@@ -297,16 +196,6 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
return signatureBytes.SequenceEqual(Signature);
}
private static async Task<bool> SignatureMatchAsync(
Stream stream,
CancellationToken cancellationToken
)
{
var signatureBytes = new byte[6];
await stream.ReadFullyAsync(signatureBytes, cancellationToken).ConfigureAwait(false);
return signatureBytes.SequenceEqual(Signature.ToArray());
}
protected override IReader CreateReaderForSolidExtraction() =>
new SevenZipReader(ReaderOptions, this);
@@ -316,6 +205,8 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
.GroupBy(x => x.FilePart.Folder)
.Any(folder => folder.Count() > 1);
public override bool IsEncrypted => Entries.First(x => !x.IsDirectory).IsEncrypted;
public override long TotalSize =>
_database?._packSizes.Aggregate(0L, (total, packSize) => total + packSize) ?? 0;

View File

@@ -2,14 +2,12 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Tar;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
using SharpCompress.Polyfills;
using SharpCompress.Readers;
using SharpCompress.Readers.Tar;
using SharpCompress.Writers;
@@ -30,22 +28,6 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<TarArchive> OpenAsync(
string filePath,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
filePath.NotNullOrEmpty(nameof(filePath));
return await OpenAsync(new FileInfo(filePath), readerOptions ?? new ReaderOptions(), cancellationToken);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
@@ -63,32 +45,6 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<TarArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
fileInfo.NotNull(nameof(fileInfo));
var archive = new TarArchive();
archive.SourceStream = new SourceStream(
fileInfo,
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
);
archive.LazyVolumes = new Lazy<IReadOnlyCollection<TarVolume>>(() => archive.LoadVolumes(archive.SourceStream).ToList());
archive.LazyEntries = new Lazy<IReadOnlyCollection<TarArchiveEntry>>(
() => archive.LoadEntriesAsync(archive.Volumes, cancellationToken).GetAwaiter().GetResult().ToList()
);
return archive;
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
@@ -110,33 +66,6 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
);
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<TarArchive> OpenAsync(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
var archive = new TarArchive();
archive.SourceStream = new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
);
archive.LazyVolumes = new Lazy<IReadOnlyCollection<TarVolume>>(() => archive.LoadVolumes(archive.SourceStream).ToList());
archive.LazyEntries = new Lazy<IReadOnlyCollection<TarArchiveEntry>>(
() => archive.LoadEntriesAsync(archive.Volumes, cancellationToken).GetAwaiter().GetResult().ToList()
);
return archive;
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
@@ -155,33 +84,6 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
);
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<TarArchive> OpenAsync(
IEnumerable<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
var archive = new TarArchive();
archive.SourceStream = new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
);
archive.LazyVolumes = new Lazy<IReadOnlyCollection<TarVolume>>(() => archive.LoadVolumes(archive.SourceStream).ToList());
archive.LazyEntries = new Lazy<IReadOnlyCollection<TarArchiveEntry>>(
() => archive.LoadEntriesAsync(archive.Volumes, cancellationToken).GetAwaiter().GetResult().ToList()
);
return archive;
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
@@ -201,34 +103,6 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
);
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<TarArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
var archive = new TarArchive();
archive.SourceStream = new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions());
archive.LazyVolumes = new Lazy<IReadOnlyCollection<TarVolume>>(() => archive.LoadVolumes(archive.SourceStream).ToList());
archive.LazyEntries = new Lazy<IReadOnlyCollection<TarArchiveEntry>>(
() => archive.LoadEntriesAsync(archive.Volumes, cancellationToken).GetAwaiter().GetResult().ToList()
);
return archive;
}
public static bool IsTarFile(string filePath) => IsTarFile(new FileInfo(filePath));
public static bool IsTarFile(FileInfo fileInfo)
@@ -241,24 +115,6 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
return IsTarFile(stream);
}
public static async Task<bool> IsTarFileAsync(
string filePath,
CancellationToken cancellationToken = default
) => await IsTarFileAsync(new FileInfo(filePath), cancellationToken);
public static async Task<bool> IsTarFileAsync(
FileInfo fileInfo,
CancellationToken cancellationToken = default
)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return await IsTarFileAsync(stream, cancellationToken);
}
public static bool IsTarFile(Stream stream)
{
try
@@ -275,26 +131,6 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
return false;
}
public static async Task<bool> IsTarFileAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
try
{
var tarHeader = new TarHeader(new ArchiveEncoding());
var readSucceeded = await tarHeader.ReadAsync(stream, cancellationToken)
.ConfigureAwait(false);
var isEmptyArchive =
tarHeader.Name?.Length == 0
&& tarHeader.Size == 0
&& Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
return readSucceeded || isEmptyArchive;
}
catch { }
return false;
}
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream sourceStream)
{
sourceStream.NotNull("SourceStream is null").LoadAllParts(); //request all streams
@@ -309,11 +145,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
: base(ArchiveType.Tar, sourceStream) { }
private TarArchive()
: base(ArchiveType.Tar)
{
LazyVolumes = new Lazy<IReadOnlyCollection<TarVolume>>(() => LoadVolumes(SourceStream!).ToList());
LazyEntries = new Lazy<IReadOnlyCollection<TarArchiveEntry>>(() => LoadEntries(Volumes).ToList());
}
: base(ArchiveType.Tar) { }
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
{
@@ -373,67 +205,6 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
}
}
private async Task<IReadOnlyCollection<TarArchiveEntry>> LoadEntriesAsync(IEnumerable<TarVolume> volumes, CancellationToken cancellationToken)
{
var list = new List<TarArchiveEntry>();
var stream = volumes.Single().Stream;
TarHeader? previousHeader = null;
await foreach (
var header in TarHeaderFactory.ReadHeaderAsync(
StreamingMode.Seekable,
stream,
ReaderOptions.ArchiveEncoding,
cancellationToken
).WithCancellation(cancellationToken)
)
{
if (header != null)
{
if (header.EntryType == EntryType.LongName)
{
previousHeader = header;
}
else
{
if (previousHeader != null)
{
var entry = new TarArchiveEntry(
this,
new TarFilePart(previousHeader, stream),
CompressionType.None
);
var oldStreamPos = stream.Position;
using (var entryStream = entry.OpenEntryStream())
{
using var memoryStream = new MemoryStream();
await entryStream.CopyToAsync(memoryStream, cancellationToken).ConfigureAwait(false);
memoryStream.Position = 0;
var bytes = memoryStream.ToArray();
header.Name = ReaderOptions.ArchiveEncoding.Decode(bytes).TrimNulls();
}
stream.Position = oldStreamPos;
previousHeader = null;
}
list.Add(new TarArchiveEntry(
this,
new TarFilePart(header, stream),
CompressionType.None
));
}
}
else
{
throw new IncompleteArchiveException("Failed to read TAR header");
}
}
return list.AsReadOnly();
}
public static TarArchive Create() => new();
protected override TarArchiveEntry CreateEntryInternal(

View File

@@ -18,7 +18,7 @@ namespace SharpCompress.Archives.Zip;
public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
{
protected internal SeekableZipHeaderFactory? headerFactory;
private readonly SeekableZipHeaderFactory? headerFactory;
/// <summary>
/// Gets or sets the compression level applied to files added to the archive,
@@ -144,14 +144,6 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
return IsZipFile(stream, password, bufferSize);
}
// ... other using statements
namespace SharpCompress.Archives.Zip;
//... existing class
public static bool IsZipFile(
Stream stream,
string? password = null,
@@ -185,41 +177,6 @@ namespace SharpCompress.Archives.Zip;
}
}
public static async Task<bool> IsZipFileAsync(
Stream stream,
string? password = null,
CancellationToken cancellationToken = default,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = await headerFactory
.ReadStreamHeaderAsync(stream, cancellationToken)
.FirstOrDefaultAsync(x => x.ZipHeaderType != ZipHeaderType.Split, cancellationToken)
.ConfigureAwait(false);
if (header is null)
{
return false;
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
public static bool IsZipMulti(
Stream stream,
string? password = null,
@@ -262,50 +219,6 @@ namespace SharpCompress.Archives.Zip;
}
}
public static async Task<bool> IsZipMultiAsync(
Stream stream,
string? password = null,
CancellationToken cancellationToken = default,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = await headerFactory
.ReadStreamHeaderAsync(stream, cancellationToken)
.FirstOrDefaultAsync(x => x.ZipHeaderType != ZipHeaderType.Split, cancellationToken)
.ConfigureAwait(false);
if (header is null)
{
if (stream.CanSeek) //could be multipart. Test for central directory - might not be z64 safe
{
var z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
var x = z.ReadSeekableHeader(stream).FirstOrDefault();
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
}
else
{
return false;
}
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream stream)
{
stream.LoadAllParts(); //request all streams

View File

@@ -1,7 +1,5 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Rar.Headers;
@@ -27,20 +25,6 @@ internal class MarkHeader : IRarHeader
throw new EndOfStreamException();
}
private static async Task<byte> GetByteAsync(
Stream stream,
CancellationToken cancellationToken
)
{
byte[] buffer = new byte[1];
var read = await stream.ReadAsync(buffer, 0, 1, cancellationToken).ConfigureAwait(false);
if (read == 1)
{
return buffer[0];
}
throw new EndOfStreamException();
}
public static MarkHeader Read(Stream stream, bool leaveStreamOpen, bool lookForHeader)
{
var maxScanIndex = lookForHeader ? MAX_SFX_SIZE : 0;
@@ -145,115 +129,4 @@ internal class MarkHeader : IRarHeader
throw new InvalidFormatException("Rar signature not found");
}
public static async Task<MarkHeader> ReadAsync(
Stream stream,
bool leaveStreamOpen,
bool lookForHeader,
CancellationToken cancellationToken
)
{
var maxScanIndex = lookForHeader ? MAX_SFX_SIZE : 0;
try
{
var start = -1;
var b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
while (start <= maxScanIndex)
{
cancellationToken.ThrowIfCancellationRequested();
// Rar old signature: 52 45 7E 5E
// Rar4 signature: 52 61 72 21 1A 07 00
// Rar5 signature: 52 61 72 21 1A 07 01 00
if (b == 0x52)
{
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b == 0x61)
{
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x72)
{
continue;
}
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x21)
{
continue;
}
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x1a)
{
continue;
}
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x07)
{
continue;
}
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b == 1)
{
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0)
{
continue;
}
return new MarkHeader(true); // Rar5
}
else if (b == 0)
{
return new MarkHeader(false); // Rar4
}
}
else if (b == 0x45)
{
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x7e)
{
continue;
}
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x5e)
{
continue;
}
throw new InvalidFormatException(
"Rar format version pre-4 is unsupported."
);
}
}
else
{
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
}
}
}
catch (Exception e)
{
if (!leaveStreamOpen)
{
stream.Dispose();
}
throw new InvalidFormatException("Error trying to read rar signature.", e);
}
throw new InvalidFormatException("Rar signature not found");
}
}

View File

@@ -2,8 +2,6 @@ using System;
using System.Buffers.Binary;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Tar.Headers;
@@ -189,92 +187,6 @@ internal sealed class TarHeader
return true;
}
internal async Task<bool> ReadAsync(Stream stream, CancellationToken cancellationToken)
{
string? longName = null;
string? longLinkName = null;
var hasLongValue = true;
byte[] buffer;
EntryType entryType;
do
{
buffer = await ReadBlockAsync(stream, cancellationToken).ConfigureAwait(false);
if (buffer.Length == 0)
{
return false;
}
entryType = ReadEntryType(buffer);
// LongName and LongLink headers can follow each other and need
// to apply to the header that follows them.
if (entryType == EntryType.LongName)
{
longName = await ReadLongNameAsync(stream, buffer, cancellationToken)
.ConfigureAwait(false);
continue;
}
else if (entryType == EntryType.LongLink)
{
longLinkName = await ReadLongNameAsync(stream, buffer, cancellationToken)
.ConfigureAwait(false);
continue;
}
hasLongValue = false;
} while (hasLongValue);
// Check header checksum
if (!checkChecksum(buffer))
{
return false;
}
Name = longName ?? ArchiveEncoding.Decode(buffer, 0, 100).TrimNulls();
EntryType = entryType;
Size = ReadSize(buffer);
// for symlinks, additionally read the linkname
if (entryType == EntryType.SymLink || entryType == EntryType.HardLink)
{
LinkName = longLinkName ?? ArchiveEncoding.Decode(buffer, 157, 100).TrimNulls();
}
Mode = ReadAsciiInt64Base8(buffer, 100, 7);
if (entryType == EntryType.Directory)
{
Mode |= 0b1_000_000_000;
}
UserId = ReadAsciiInt64Base8oldGnu(buffer, 108, 7);
GroupId = ReadAsciiInt64Base8oldGnu(buffer, 116, 7);
var unixTimeStamp = ReadAsciiInt64Base8(buffer, 136, 11);
LastModifiedTime = EPOCH.AddSeconds(unixTimeStamp).ToLocalTime();
Magic = ArchiveEncoding.Decode(buffer, 257, 6).TrimNulls();
if (!string.IsNullOrEmpty(Magic) && "ustar".Equals(Magic))
{
var namePrefix = ArchiveEncoding.Decode(buffer, 345, 157).TrimNulls();
if (!string.IsNullOrEmpty(namePrefix))
{
Name = namePrefix + "/" + Name;
}
}
if (entryType != EntryType.LongName && Name.Length == 0)
{
return false;
}
return true;
}
private string ReadLongName(BinaryReader reader, byte[] buffer)
{
var size = ReadSize(buffer);
@@ -299,39 +211,6 @@ internal sealed class TarHeader
return ArchiveEncoding.Decode(nameBytes, 0, nameBytes.Length).TrimNulls();
}
private async Task<string> ReadLongNameAsync(
Stream stream,
byte[] buffer,
CancellationToken cancellationToken
)
{
var size = ReadSize(buffer);
// Validate size to prevent memory exhaustion from malformed headers
if (size < 0 || size > MAX_LONG_NAME_SIZE)
{
throw new InvalidFormatException(
$"Long name size {size} is invalid or exceeds maximum allowed size of {MAX_LONG_NAME_SIZE} bytes"
);
}
var nameLength = (int)size;
var nameBytes = new byte[nameLength];
await ReadFullyAsync(stream, nameBytes, 0, nameLength, cancellationToken)
.ConfigureAwait(false);
var remainingBytesToRead = BLOCK_SIZE - (nameLength % BLOCK_SIZE);
// Read the rest of the block and discard the data
if (remainingBytesToRead > 0 && remainingBytesToRead < BLOCK_SIZE)
{
var discardBuffer = new byte[remainingBytesToRead];
await ReadFullyAsync(stream, discardBuffer, 0, remainingBytesToRead, cancellationToken)
.ConfigureAwait(false);
}
return ArchiveEncoding.Decode(nameBytes, 0, nameBytes.Length).TrimNulls();
}
private static EntryType ReadEntryType(byte[] buffer) => (EntryType)buffer[156];
private long ReadSize(byte[] buffer)
@@ -355,60 +234,6 @@ internal sealed class TarHeader
return buffer;
}
private static async Task<byte[]> ReadBlockAsync(
Stream stream,
CancellationToken cancellationToken
)
{
var buffer = new byte[BLOCK_SIZE];
var bytesRead = 0;
while (bytesRead < BLOCK_SIZE)
{
var read = await stream.ReadAsync(
buffer,
bytesRead,
BLOCK_SIZE - bytesRead,
cancellationToken
);
if (read == 0)
{
// end of stream. If we read nothing, return empty array.
// if we read some, but not a full block, it's an error.
if (bytesRead == 0)
{
return Array.Empty<byte>();
}
throw new InvalidFormatException("Buffer is invalid size");
}
bytesRead += read;
}
return buffer;
}
private static async Task ReadFullyAsync(
Stream stream,
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
var totalRead = 0;
while (totalRead < count)
{
cancellationToken.ThrowIfCancellationRequested();
var read = await stream
.ReadAsync(buffer, offset + totalRead, count - totalRead, cancellationToken)
.ConfigureAwait(false);
if (read == 0)
{
throw new EndOfStreamException("End of stream reached, but more bytes were expected.");
}
totalRead += read;
}
}
private static void WriteStringBytes(ReadOnlySpan<byte> name, Span<byte> buffer, int length)
{
name.CopyTo(buffer);

View File

@@ -1,8 +1,5 @@
using System.Collections.Generic;
using System.IO;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
@@ -57,54 +54,6 @@ internal static class TarHeaderFactory
}
}
internal static async IAsyncEnumerable<TarHeader?> ReadHeaderAsync(
StreamingMode mode,
Stream stream,
ArchiveEncoding archiveEncoding,
[EnumeratorCancellation] CancellationToken cancellationToken
)
{
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
TarHeader? header = null;
try
{
header = new TarHeader(archiveEncoding);
if (!await header.ReadAsync(stream, cancellationToken).ConfigureAwait(false))
{
yield break;
}
switch (mode)
{
case StreamingMode.Seekable:
{
header.DataStartPosition = stream.Position;
//skip to nearest 512
stream.Position += PadTo512(header.Size);
}
break;
case StreamingMode.Streaming:
{
header.PackedStream = new TarReadOnlySubStream(stream, header.Size);
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
}
catch
{
header = null;
}
yield return header;
}
}
private static long PadTo512(long size)
{
var zeros = (int)(size % 512);

View File

@@ -1,7 +1,4 @@
using System.Buffers.Binary;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
@@ -22,22 +19,6 @@ internal class DirectoryEndHeader : ZipHeader
Comment = reader.ReadBytes(CommentLength);
}
internal async Task ReadAsync(Stream stream, CancellationToken cancellationToken)
{
VolumeNumber = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
FirstVolumeWithDirectory = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken)
.ConfigureAwait(false);
TotalNumberOfEntriesInDisk = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken)
.ConfigureAwait(false);
TotalNumberOfEntries = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken)
.ConfigureAwait(false);
DirectorySize = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken).ConfigureAwait(false);
DirectoryStartOffsetRelativeToDisk = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken)
.ConfigureAwait(false);
CommentLength = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
Comment = await ZipHeaderFactory.ReadBytesAsync(stream, CommentLength, cancellationToken).ConfigureAwait(false);
}
public ushort VolumeNumber { get; private set; }
public ushort FirstVolumeWithDirectory { get; private set; }

View File

@@ -1,8 +1,5 @@
using System.Buffers.Binary;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
@@ -93,114 +90,7 @@ internal class DirectoryEntryHeader : ZipFileEntry
if (unixTimeExtra is not null)
{
var unixTimeTuple = ((UnixTimeExtraField)unixTimeExtra).UnicodeTimes;
if (unixTimeTuple.Item1.HasValue)
{
var dosTime = Utility.DateTimeToDosTime(unixTimeTuple.Item1.Value);
LastModifiedDate = (ushort)(dosTime >> 16);
LastModifiedTime = (ushort)(dosTime & 0x0FFFF);
}
else if (unixTimeTuple.Item2.HasValue)
{
var dosTime = Utility.DateTimeToDosTime(unixTimeTuple.Item2.Value);
LastModifiedDate = (ushort)(dosTime >> 16);
LastModifiedTime = (ushort)(dosTime & 0x0FFFF);
}
else if (unixTimeTuple.Item3.HasValue)
{
var dosTime = Utility.DateTimeToDosTime(unixTimeTuple.Item3.Value);
LastModifiedDate = (ushort)(dosTime >> 16);
LastModifiedTime = (ushort)(dosTime & 0x0FFFF);
}
}
}
internal async Task ReadAsync(Stream stream, CancellationToken cancellationToken)
{
Version = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
VersionNeededToExtract = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken)
.ConfigureAwait(false);
Flags = (HeaderFlags)await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
CompressionMethod = (ZipCompressionMethod)
await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
OriginalLastModifiedTime =
LastModifiedTime = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
OriginalLastModifiedDate =
LastModifiedDate = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
Crc = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken).ConfigureAwait(false);
CompressedSize = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken).ConfigureAwait(false);
UncompressedSize = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken).ConfigureAwait(false);
var nameLength = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
var extraLength = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
var commentLength = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
DiskNumberStart = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
InternalFileAttributes = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken)
.ConfigureAwait(false);
ExternalFileAttributes = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken)
.ConfigureAwait(false);
RelativeOffsetOfEntryHeader = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken)
.ConfigureAwait(false);
var name = await ZipHeaderFactory.ReadBytesAsync(stream, nameLength, cancellationToken).ConfigureAwait(false);
var extra = await ZipHeaderFactory.ReadBytesAsync(stream, extraLength, cancellationToken).ConfigureAwait(false);
var comment = await ZipHeaderFactory.ReadBytesAsync(stream, commentLength, cancellationToken)
.ConfigureAwait(false);
if (Flags.HasFlag(HeaderFlags.Efs))
{
Name = ArchiveEncoding.DecodeUTF8(name);
Comment = ArchiveEncoding.DecodeUTF8(comment);
}
else
{
Name = ArchiveEncoding.Decode(name);
Comment = ArchiveEncoding.Decode(comment);
}
LoadExtra(extra);
var unicodePathExtra = Extra.FirstOrDefault(u =>
u.Type == ExtraDataType.UnicodePathExtraField
);
if (unicodePathExtra != null && ArchiveEncoding.Forced == null)
{
Name = ((ExtraUnicodePathExtraField)unicodePathExtra).UnicodeName;
}
var zip64ExtraData = Extra.OfType<Zip64ExtendedInformationExtraField>().FirstOrDefault();
if (zip64ExtraData != null)
{
zip64ExtraData.Process(
UncompressedSize,
CompressedSize,
RelativeOffsetOfEntryHeader,
DiskNumberStart
);
if (CompressedSize == uint.MaxValue)
{
CompressedSize = zip64ExtraData.CompressedSize;
}
if (UncompressedSize == uint.MaxValue)
{
UncompressedSize = zip64ExtraData.UncompressedSize;
}
if (RelativeOffsetOfEntryHeader == uint.MaxValue)
{
RelativeOffsetOfEntryHeader = zip64ExtraData.RelativeOffsetOfEntryHeader;
}
}
var unixTimeExtra = Extra.FirstOrDefault(u => u.Type == ExtraDataType.UnixTimeExtraField);
if (unixTimeExtra is not null)
{
// Tuple order is last modified time, last access time, and creation time.
var unixTimeTuple = ((UnixTimeExtraField)unixTimeExtra).UnicodeTimes;
if (unixTimeTuple.Item1.HasValue)

View File

@@ -1,8 +1,5 @@
using System.Buffers.Binary;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
@@ -99,87 +96,5 @@ internal class LocalEntryHeader : ZipFileEntry
}
}
internal async Task ReadAsync(Stream stream, CancellationToken cancellationToken)
{
Version = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
Flags = (HeaderFlags)await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
CompressionMethod = (ZipCompressionMethod)
await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
OriginalLastModifiedTime =
LastModifiedTime = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
OriginalLastModifiedDate =
LastModifiedDate = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
Crc = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken).ConfigureAwait(false);
CompressedSize = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken).ConfigureAwait(false);
UncompressedSize = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken).ConfigureAwait(false);
var nameLength = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
var extraLength = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
var name = await ZipHeaderFactory.ReadBytesAsync(stream, nameLength, cancellationToken).ConfigureAwait(false);
var extra = await ZipHeaderFactory.ReadBytesAsync(stream, extraLength, cancellationToken).ConfigureAwait(false);
if (Flags.HasFlag(HeaderFlags.Efs))
{
Name = ArchiveEncoding.DecodeUTF8(name);
}
else
{
Name = ArchiveEncoding.Decode(name);
}
LoadExtra(extra);
var unicodePathExtra = Extra.FirstOrDefault(u =>
u.Type == ExtraDataType.UnicodePathExtraField
);
if (unicodePathExtra != null && ArchiveEncoding.Forced == null)
{
Name = ((ExtraUnicodePathExtraField)unicodePathExtra).UnicodeName;
}
var zip64ExtraData = Extra.OfType<Zip64ExtendedInformationExtraField>().FirstOrDefault();
if (zip64ExtraData != null)
{
zip64ExtraData.Process(UncompressedSize, CompressedSize, 0, 0);
if (CompressedSize == uint.MaxValue)
{
CompressedSize = zip64ExtraData.CompressedSize;
}
if (UncompressedSize == uint.MaxValue)
{
UncompressedSize = zip64ExtraData.UncompressedSize;
}
}
var unixTimeExtra = Extra.FirstOrDefault(u => u.Type == ExtraDataType.UnixTimeExtraField);
if (unixTimeExtra is not null)
{
var unixTimeTuple = ((UnixTimeExtraField)unixTimeExtra).UnicodeTimes;
if (unixTimeTuple.Item1.HasValue)
{
var dosTime = Utility.DateTimeToDosTime(unixTimeTuple.Item1.Value);
LastModifiedDate = (ushort)(dosTime >> 16);
LastModifiedTime = (ushort)(dosTime & 0x0FFFF);
}
else if (unixTimeTuple.Item2.HasValue)
{
var dosTime = Utility.DateTimeToDosTime(unixTimeTuple.Item2.Value);
LastModifiedDate = (ushort)(dosTime >> 16);
LastModifiedTime = (ushort)(dosTime & 0x0FFFF);
}
else if (unixTimeTuple.Item3.HasValue)
{
var dosTime = Utility.DateTimeToDosTime(unixTimeTuple.Item3.Value);
LastModifiedDate = (ushort)(dosTime >> 16);
LastModifiedTime = (ushort)(dosTime & 0x0FFFF);
}
}
}
internal ushort Version { get; private set; }
}

View File

@@ -1,7 +1,4 @@
using System.Buffers.Binary;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
@@ -29,35 +26,6 @@ internal class Zip64DirectoryEndHeader : ZipHeader
);
}
internal async Task ReadAsync(Stream stream, CancellationToken cancellationToken)
{
SizeOfDirectoryEndRecord = (long)
await ZipHeaderFactory.ReadUInt64Async(stream, cancellationToken).ConfigureAwait(false);
VersionMadeBy = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
VersionNeededToExtract = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken)
.ConfigureAwait(false);
VolumeNumber = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken).ConfigureAwait(false);
FirstVolumeWithDirectory = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken)
.ConfigureAwait(false);
TotalNumberOfEntriesInDisk = (long)
await ZipHeaderFactory.ReadUInt64Async(stream, cancellationToken).ConfigureAwait(false);
TotalNumberOfEntries = (long)
await ZipHeaderFactory.ReadUInt64Async(stream, cancellationToken).ConfigureAwait(false);
DirectorySize = (long)
await ZipHeaderFactory.ReadUInt64Async(stream, cancellationToken).ConfigureAwait(false);
DirectoryStartOffsetRelativeToDisk = (long)
await ZipHeaderFactory.ReadUInt64Async(stream, cancellationToken).ConfigureAwait(false);
DataSector = await ZipHeaderFactory.ReadBytesAsync(
stream,
(int)(
SizeOfDirectoryEndRecord
- SIZE_OF_FIXED_HEADER_DATA_EXCEPT_SIGNATURE_AND_SIZE_FIELDS
),
cancellationToken
)
.ConfigureAwait(false);
}
private const int SIZE_OF_FIXED_HEADER_DATA_EXCEPT_SIGNATURE_AND_SIZE_FIELDS = 44;
public long SizeOfDirectoryEndRecord { get; private set; }

View File

@@ -1,7 +1,4 @@
using System.Buffers.Binary;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
@@ -17,16 +14,6 @@ internal class Zip64DirectoryEndLocatorHeader : ZipHeader
TotalNumberOfVolumes = reader.ReadUInt32();
}
internal async Task ReadAsync(Stream stream, CancellationToken cancellationToken)
{
FirstVolumeWithDirectory = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken)
.ConfigureAwait(false);
RelativeOffsetOfTheEndOfDirectoryRecord = (long)
await ZipHeaderFactory.ReadUInt64Async(stream, cancellationToken).ConfigureAwait(false);
TotalNumberOfVolumes = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken)
.ConfigureAwait(false);
}
public uint FirstVolumeWithDirectory { get; private set; }
public long RelativeOffsetOfTheEndOfDirectoryRecord { get; private set; }

View File

@@ -1,9 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
@@ -88,90 +85,6 @@ internal sealed class SeekableZipHeaderFactory : ZipHeaderFactory
}
}
internal async IAsyncEnumerable<ZipHeader> ReadSeekableHeaderAsync(
Stream stream,
[EnumeratorCancellation] CancellationToken cancellationToken
)
{
// Still need BinaryReader for synchronous SeekBackToHeader, until SeekBackToHeaderAsync is implemented
var reader = new BinaryReader(stream);
SeekBackToHeader(stream, reader); // Synchronous due to stream.Seek and BinaryReader
var eocd_location = stream.Position;
var entry = new DirectoryEndHeader();
await entry.ReadAsync(stream, cancellationToken).ConfigureAwait(false);
if (entry.IsZip64)
{
_zip64 = true;
stream.Seek(eocd_location - ZIP64_EOCD_LENGTH - 4, SeekOrigin.Begin); // Synchronous seek
var zip64_locator = await ReadUInt32Async(stream, cancellationToken)
.ConfigureAwait(false);
if (zip64_locator != ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR)
{
throw new ArchiveException("Failed to locate the Zip64 Directory Locator");
}
var zip64Locator = new Zip64DirectoryEndLocatorHeader();
await zip64Locator.ReadAsync(stream, cancellationToken).ConfigureAwait(false);
stream.Seek(
zip64Locator.RelativeOffsetOfTheEndOfDirectoryRecord,
SeekOrigin.Begin
); // Synchronous seek
var zip64Signature = await ReadUInt32Async(stream, cancellationToken)
.ConfigureAwait(false);
if (zip64Signature != ZIP64_END_OF_CENTRAL_DIRECTORY)
{
throw new ArchiveException("Failed to locate the Zip64 Header");
}
var zip64Entry = new Zip64DirectoryEndHeader();
await zip64Entry.ReadAsync(stream, cancellationToken).ConfigureAwait(false);
stream.Seek(
zip64Entry.DirectoryStartOffsetRelativeToDisk,
SeekOrigin.Begin
); // Synchronous seek
}
else
{
stream.Seek(entry.DirectoryStartOffsetRelativeToDisk, SeekOrigin.Begin); // Synchronous seek
}
var position = stream.Position;
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
stream.Position = position; // Synchronous seek
var signature = await ReadUInt32Async(stream, cancellationToken)
.ConfigureAwait(false);
var nextHeader = await ReadHeaderAsync(
signature,
stream,
_zip64,
cancellationToken
).ConfigureAwait(false);
position = stream.Position;
if (nextHeader is null)
{
yield break;
}
if (nextHeader is DirectoryEntryHeader entryHeader)
{
entryHeader.HasData = entryHeader.CompressedSize != 0;
yield return entryHeader;
}
else if (nextHeader is DirectoryEndHeader endHeader)
{
yield return endHeader;
}
}
}
private static bool IsMatch(byte[] haystack, int position, byte[] needle)
{
for (var i = 0; i < needle.Length; i++)

View File

@@ -1,10 +1,7 @@
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
@@ -203,230 +200,4 @@ internal class StreamingZipHeaderFactory : ZipHeaderFactory
yield return header;
}
}
internal async IAsyncEnumerable<ZipHeader> ReadStreamHeaderAsync(
Stream stream,
[System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken
)
{
if (stream is not SharpCompressStream)
{
if (stream is SourceStream src)
{
stream = new SharpCompressStream(
stream,
src.ReaderOptions.LeaveStreamOpen,
bufferSize: src.ReaderOptions.BufferSize
);
}
else
{
throw new ArgumentException("Stream must be a SharpCompressStream", nameof(stream));
}
}
var rewindableStream = (SharpCompressStream)stream;
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
uint headerBytes = 0;
if (
_lastEntryHeader != null
&& FlagUtility.HasFlag(_lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor)
)
{
if (_lastEntryHeader.Part is null)
{
continue;
}
var pos = rewindableStream.CanSeek ? (long?)rewindableStream.Position : null;
var crc = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
if (crc == POST_DATA_DESCRIPTOR)
{
crc = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
}
_lastEntryHeader.Crc = crc;
ulong compSize = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
ulong uncompSize = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
headerBytes = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
bool isSentinel = compSize == 0xFFFFFFFF || uncompSize == 0xFFFFFFFF;
bool isHeader = headerBytes == 0x04034b50 || headerBytes == 0x02014b50;
if (!isHeader && !isSentinel)
{
compSize = (uncompSize << 32) | compSize;
uncompSize =
((ulong)headerBytes << 32)
| await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
headerBytes = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
}
else if (isSentinel)
{
compSize = await ReadUInt64Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
uncompSize = await ReadUInt64Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
}
_lastEntryHeader.CompressedSize = (long)compSize;
_lastEntryHeader.UncompressedSize = (long)uncompSize;
if (pos.HasValue)
{
_lastEntryHeader.DataStartPosition = pos - _lastEntryHeader.CompressedSize;
}
}
else if (_lastEntryHeader != null && _lastEntryHeader.IsZip64)
{
if (_lastEntryHeader.Part is null)
continue;
var pos = rewindableStream.CanSeek ? (long?)rewindableStream.Position : null;
headerBytes = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
await ReadBytesAsync(rewindableStream, 12, cancellationToken).ConfigureAwait(false); //skip a bunch of fields we don't care about
var crc = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
if (crc == POST_DATA_DESCRIPTOR)
{
crc = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
}
_lastEntryHeader.Crc = crc;
var compressed_size = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
var uncompressed_size = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
var test_header = !(headerBytes == 0x04034b50 || headerBytes == 0x02014b50);
var test_64bit = ((long)uncompressed_size << 32) | compressed_size;
if (test_64bit == _lastEntryHeader.CompressedSize && test_header)
{
_lastEntryHeader.UncompressedSize =
((long)await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false) << 32) | headerBytes;
headerBytes = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
}
else
{
_lastEntryHeader.UncompressedSize = uncompressed_size;
}
if (pos.HasValue)
{
_lastEntryHeader.DataStartPosition = pos - _lastEntryHeader.CompressedSize;
rewindableStream.Position = pos.Value + 4;
}
}
else
{
headerBytes = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
}
_lastEntryHeader = null;
var header = await ReadHeaderAsync(
headerBytes,
rewindableStream,
cancellationToken: cancellationToken
);
if (header is null)
{
yield break;
}
if (header.ZipHeaderType == ZipHeaderType.LocalEntry)
{
var local_header = ((LocalEntryHeader)header);
var dir_header = _entries?.FirstOrDefault(entry =>
entry.Key == local_header.Name
&& local_header.CompressedSize == 0
&& local_header.UncompressedSize == 0
&& local_header.Crc == 0
&& local_header.IsDirectory == false
);
if (dir_header != null)
{
local_header.UncompressedSize = dir_header.Size;
local_header.CompressedSize = dir_header.CompressedSize;
local_header.Crc = (uint)dir_header.Crc;
}
if (local_header.CompressedSize > 0)
{
header.HasData = true;
}
else if (local_header.Flags.HasFlag(HeaderFlags.UsePostDataDescriptor))
{
var nextHeaderBytes = await ReadUInt32Async(
rewindableStream,
cancellationToken
);
((IStreamStack)rewindableStream).Rewind(sizeof(uint));
header.HasData = !IsHeader(nextHeaderBytes);
}
else
{
header.HasData = false;
}
}
yield return header;
}
}
private static async Task<byte[]> ReadBytesAsync(
Stream stream,
int count,
CancellationToken cancellationToken
)
{
var buffer = new byte[count];
var read = await stream.ReadAsync(buffer, 0, count, cancellationToken);
if (read < count)
{
throw new EndOfStreamException();
}
return buffer;
}
private static async Task<uint> ReadUInt32Async(
Stream stream,
CancellationToken cancellationToken
)
{
var buffer = await ReadBytesAsync(stream, 4, cancellationToken);
return BinaryPrimitives.ReadUInt32LittleEndian(buffer);
}
private static async Task<ushort> ReadUInt16Async(
Stream stream,
CancellationToken cancellationToken
)
{
var buffer = await ReadBytesAsync(stream, 2, cancellationToken);
return BinaryPrimitives.ReadUInt16LittleEndian(buffer);
}
}

View File

@@ -1,9 +1,6 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
@@ -109,129 +106,6 @@ internal class ZipHeaderFactory
}
}
protected async Task<ZipHeader?> ReadHeaderAsync(
uint headerBytes,
Stream stream,
bool zip64 = false,
CancellationToken cancellationToken = default
)
{
switch (headerBytes)
{
case ENTRY_HEADER_BYTES:
{
var entryHeader = new LocalEntryHeader(_archiveEncoding);
await entryHeader.ReadAsync(stream, cancellationToken).ConfigureAwait(false);
await LoadHeaderAsync(entryHeader, stream, cancellationToken).ConfigureAwait(false);
_lastEntryHeader = entryHeader;
return entryHeader;
}
case DIRECTORY_START_HEADER_BYTES:
{
var entry = new DirectoryEntryHeader(_archiveEncoding);
await entry.ReadAsync(stream, cancellationToken).ConfigureAwait(false);
return entry;
}
case POST_DATA_DESCRIPTOR:
{
if (
_lastEntryHeader != null
&& FlagUtility.HasFlag(
_lastEntryHeader.NotNull().Flags,
HeaderFlags.UsePostDataDescriptor
)
)
{
_lastEntryHeader.Crc = await ReadUInt32Async(stream, cancellationToken)
.ConfigureAwait(false);
_lastEntryHeader.CompressedSize = zip64
? (long)await ReadUInt64Async(stream, cancellationToken)
.ConfigureAwait(false)
: await ReadUInt32Async(stream, cancellationToken).ConfigureAwait(false);
_lastEntryHeader.UncompressedSize = zip64
? (long)await ReadUInt64Async(stream, cancellationToken)
.ConfigureAwait(false)
: await ReadUInt32Async(stream, cancellationToken).ConfigureAwait(false);
}
else
{
await ReadBytesAsync(stream, zip64 ? 20 : 12, cancellationToken)
.ConfigureAwait(false);
}
return null;
}
case DIGITAL_SIGNATURE:
return null;
case DIRECTORY_END_HEADER_BYTES:
{
var entry = new DirectoryEndHeader();
await entry.ReadAsync(stream, cancellationToken).ConfigureAwait(false);
return entry;
}
case SPLIT_ARCHIVE_HEADER_BYTES:
{
return new SplitHeader();
}
case ZIP64_END_OF_CENTRAL_DIRECTORY:
{
var entry = new Zip64DirectoryEndHeader();
await entry.ReadAsync(stream, cancellationToken).ConfigureAwait(false);
return entry;
}
case ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR:
{
var entry = new Zip64DirectoryEndLocatorHeader();
await entry.ReadAsync(stream, cancellationToken).ConfigureAwait(false);
return entry;
}
default:
return null;
}
}
internal static async Task<byte[]> ReadBytesAsync(
Stream stream,
int count,
CancellationToken cancellationToken
)
{
var buffer = new byte[count];
var read = await stream.ReadAsync(buffer, 0, count, cancellationToken);
if (read < count)
{
throw new EndOfStreamException();
}
return buffer;
}
internal static async Task<uint> ReadUInt32Async(
Stream stream,
CancellationToken cancellationToken
)
{
var buffer = await ReadBytesAsync(stream, 4, cancellationToken);
return BinaryPrimitives.ReadUInt32LittleEndian(buffer);
}
internal static async Task<ushort> ReadUInt16Async(
Stream stream,
CancellationToken cancellationToken
)
{
var buffer = await ReadBytesAsync(stream, 2, cancellationToken);
return BinaryPrimitives.ReadUInt16LittleEndian(buffer);
}
internal static async Task<ulong> ReadUInt64Async(
Stream stream,
CancellationToken cancellationToken
)
{
var buffer = await ReadBytesAsync(stream, 8, cancellationToken);
return BinaryPrimitives.ReadUInt64LittleEndian(buffer);
}
internal static bool IsHeader(uint headerBytes)
{
switch (headerBytes)
@@ -302,6 +176,12 @@ internal class ZipHeaderFactory
return;
}
//if (FlagUtility.HasFlag(entryHeader.Flags, HeaderFlags.UsePostDataDescriptor))
//{
// entryHeader.PackedStream = new ReadOnlySubStream(stream);
//}
//else
//{
switch (_mode)
{
case StreamingMode.Seekable:
@@ -322,87 +202,7 @@ internal class ZipHeaderFactory
throw new InvalidFormatException("Invalid StreamingMode");
}
}
}
private async Task LoadHeaderAsync(
ZipFileEntry entryHeader,
Stream stream,
CancellationToken cancellationToken
)
{
if (FlagUtility.HasFlag(entryHeader.Flags, HeaderFlags.Encrypted))
{
if (
!entryHeader.IsDirectory
&& entryHeader.CompressedSize == 0
&& FlagUtility.HasFlag(entryHeader.Flags, HeaderFlags.UsePostDataDescriptor)
)
{
throw new NotSupportedException(
"SharpCompress cannot currently read non-seekable Zip Streams with encrypted data that has been written in a non-seekable manner."
);
}
if (_password is null)
{
throw new CryptographicException("No password supplied for encrypted zip.");
}
entryHeader.Password = _password;
if (entryHeader.CompressionMethod == ZipCompressionMethod.WinzipAes)
{
var data = entryHeader.Extra.SingleOrDefault(x =>
x.Type == ExtraDataType.WinZipAes
);
if (data != null)
{
var keySize = (WinzipAesKeySize)data.DataBytes[4];
var salt = new byte[WinzipAesEncryptionData.KeyLengthInBytes(keySize) / 2];
var passwordVerifyValue = new byte[2];
await stream
.ReadAsync(salt, 0, salt.Length, cancellationToken)
.ConfigureAwait(false);
await stream
.ReadAsync(passwordVerifyValue, 0, 2, cancellationToken)
.ConfigureAwait(false);
entryHeader.WinzipAesEncryptionData = new WinzipAesEncryptionData(
keySize,
salt,
passwordVerifyValue,
_password
);
entryHeader.CompressedSize -= (uint)(salt.Length + 2);
}
}
}
if (entryHeader.IsDirectory)
{
return;
}
switch (_mode)
{
case StreamingMode.Seekable:
{
entryHeader.DataStartPosition = stream.Position;
stream.Position += entryHeader.CompressedSize;
break;
}
case StreamingMode.Streaming:
{
entryHeader.PackedStream = stream;
break;
}
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
//}
}
}

View File

@@ -1,9 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.GZip;
using SharpCompress.Archives.Tar;
@@ -49,12 +46,6 @@ public class GZipFactory
int bufferSize = ReaderOptions.DefaultBufferSize
) => GZipArchive.IsGZipFile(stream);
public override Task<bool> IsArchiveAsync(
Stream stream,
CancellationToken cancellationToken,
string? password = null
) => GZipArchive.IsGZipFileAsync(stream, cancellationToken);
#endregion
#region IArchiveFactory
@@ -63,24 +54,10 @@ public class GZipFactory
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
GZipArchive.Open(stream, readerOptions);
/// <inheritdoc/>
public async Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => await GZipArchive.OpenAsync(stream, readerOptions, cancellationToken).ConfigureAwait(false);
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
GZipArchive.Open(fileInfo, readerOptions);
/// <inheritdoc/>
public async Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => await GZipArchive.OpenAsync(fileInfo, readerOptions, cancellationToken).ConfigureAwait(false);
#endregion
#region IMultiArchiveFactory
@@ -89,24 +66,10 @@ public class GZipFactory
public IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null) =>
GZipArchive.Open(streams, readerOptions);
/// <inheritdoc/>
public async Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => await GZipArchive.OpenAsync(streams, readerOptions, cancellationToken).ConfigureAwait(false);
/// <inheritdoc/>
public IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null) =>
GZipArchive.Open(fileInfos, readerOptions);
/// <inheritdoc/>
public async Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => await GZipArchive.OpenAsync(fileInfos, readerOptions, cancellationToken).ConfigureAwait(false);
#endregion
#region IReaderFactory

View File

@@ -1,7 +1,5 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Readers;
namespace SharpCompress.Factories;
@@ -44,18 +42,6 @@ public interface IFactory
int bufferSize = ReaderOptions.DefaultBufferSize
);
/// <summary>
/// Returns true if the stream represents an archive of the format defined by this type.
/// </summary>
/// <param name="stream">A stream, pointing to the beginning of the archive.</param>
/// <param name="cancellationToken">the cancellation token</param>
/// <param name="password">optional password</param>
Task<bool> IsArchiveAsync(
Stream stream,
CancellationToken cancellationToken,
string? password = null
);
/// <summary>
/// From a passed in archive (zip, rar, 7z, 001), return all parts.
/// </summary>

View File

@@ -1,8 +1,5 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Rar;
using SharpCompress.Common;
@@ -38,12 +35,6 @@ public class RarFactory : Factory, IArchiveFactory, IMultiArchiveFactory, IReade
int bufferSize = ReaderOptions.DefaultBufferSize
) => RarArchive.IsRarFile(stream);
public override Task<bool> IsArchiveAsync(
Stream stream,
CancellationToken cancellationToken,
string? password = null
) => RarArchive.IsRarFileAsync(stream, cancellationToken);
/// <inheritdoc/>
public override FileInfo? GetFilePart(int index, FileInfo part1) =>
RarArchiveVolumeFactory.GetFilePart(index, part1);
@@ -56,24 +47,10 @@ public class RarFactory : Factory, IArchiveFactory, IMultiArchiveFactory, IReade
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
RarArchive.Open(stream, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
RarArchive.Open(fileInfo, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
#endregion
#region IMultiArchiveFactory
@@ -82,24 +59,10 @@ public class RarFactory : Factory, IArchiveFactory, IMultiArchiveFactory, IReade
public IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null) =>
RarArchive.Open(streams, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <inheritdoc/>
public IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null) =>
RarArchive.Open(fileInfos, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
#endregion
#region IReaderFactory

View File

@@ -1,8 +1,5 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.SevenZip;
using SharpCompress.Common;
@@ -37,12 +34,6 @@ public class SevenZipFactory : Factory, IArchiveFactory, IMultiArchiveFactory
int bufferSize = ReaderOptions.DefaultBufferSize
) => SevenZipArchive.IsSevenZipFile(stream);
public override Task<bool> IsArchiveAsync(
Stream stream,
CancellationToken cancellationToken,
string? password = null
) => SevenZipArchive.IsSevenZipFileAsync(stream, cancellationToken);
#endregion
#region IArchiveFactory
@@ -51,24 +42,10 @@ public class SevenZipFactory : Factory, IArchiveFactory, IMultiArchiveFactory
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
SevenZipArchive.Open(stream, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
SevenZipArchive.Open(fileInfo, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
#endregion
#region IMultiArchiveFactory
@@ -77,24 +54,10 @@ public class SevenZipFactory : Factory, IArchiveFactory, IMultiArchiveFactory
public IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null) =>
SevenZipArchive.Open(streams, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <inheritdoc/>
public IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null) =>
SevenZipArchive.Open(fileInfos, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
#endregion
#region reader

View File

@@ -2,8 +2,6 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Tar;
using SharpCompress.Common;
@@ -59,39 +57,238 @@ public class TarFactory
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => TarArchive.IsTarFile(stream);
)
{
if (!stream.CanSeek)
{
return TarArchive.IsTarFile(stream); // For non-seekable streams, just check if it's a tar file
}
public override Task<bool> IsArchiveAsync(
Stream stream,
CancellationToken cancellationToken,
string? password = null
) => TarArchive.IsTarFileAsync(stream, cancellationToken);
var startPosition = stream.Position;
// First check if it's a regular tar file
if (TarArchive.IsTarFile(stream))
{
stream.Seek(startPosition, SeekOrigin.Begin); // Seek back for consistency
return true;
}
// Seek back after the tar file check
stream.Seek(startPosition, SeekOrigin.Begin);
if (compressionOptions == null)
{
return false;
}
try
{
// Try each compression option to see if it contains a tar file
foreach (var testOption in compressionOptions)
{
if (testOption.Type == CompressionType.None)
{
continue; // Skip uncompressed
}
stream.Seek(startPosition, SeekOrigin.Begin);
try
{
if (testOption.CanHandle(stream))
{
stream.Seek(startPosition, SeekOrigin.Begin);
// Try to decompress and check if it contains a tar archive
// For compression formats that don't support leaveOpen, we need to save/restore position
var positionBeforeDecompress = stream.Position;
Stream? decompressedStream = null;
bool streamWasClosed = false;
try
{
decompressedStream = testOption.Type switch
{
CompressionType.BZip2 => new BZip2Stream(stream, CompressionMode.Decompress, true),
_ => testOption.CreateStream(stream) // For other types, may close the stream
};
if (TarArchive.IsTarFile(decompressedStream))
{
return true;
}
}
catch (ObjectDisposedException)
{
streamWasClosed = true;
throw; // Stream was closed, can't continue
}
finally
{
decompressedStream?.Dispose();
if (!streamWasClosed && stream.CanSeek)
{
try
{
stream.Seek(positionBeforeDecompress, SeekOrigin.Begin);
}
catch
{
// If seek fails, the stream might have been closed
}
}
}
// Seek back to start after decompression attempt
stream.Seek(startPosition, SeekOrigin.Begin);
}
}
catch
{
// If decompression fails, it's not this format - continue to next option
try
{
stream.Seek(startPosition, SeekOrigin.Begin);
}
catch
{
// Ignore seek failures
}
}
}
return false;
}
finally
{
try
{
stream.Seek(startPosition, SeekOrigin.Begin);
}
catch
{
// Ignore seek failures
}
}
}
#endregion
#region IArchiveFactory
/// <inheritdoc/>
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
TarArchive.Open(stream, readerOptions);
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
readerOptions ??= new ReaderOptions();
// Try to detect and handle compressed tar formats
if (stream.CanSeek)
{
var startPosition = stream.Position;
// Try each compression option to see if we can decompress it
foreach (var testOption in compressionOptions)
{
if (testOption.Type == CompressionType.None)
{
continue; // Skip uncompressed
}
stream.Seek(startPosition, SeekOrigin.Begin);
if (testOption.CanHandle(stream))
{
stream.Seek(startPosition, SeekOrigin.Begin);
// Decompress the entire stream into a seekable MemoryStream
using var decompressedStream = testOption.CreateStream(stream);
var memoryStream = new MemoryStream();
decompressedStream.CopyTo(memoryStream);
memoryStream.Position = 0;
// Verify it's actually a tar file
if (TarArchive.IsTarFile(memoryStream))
{
memoryStream.Position = 0;
// Return a TarArchive from the decompressed memory stream
// The TarArchive will own the MemoryStream and dispose it when disposed
var options = new ReaderOptions
{
LeaveStreamOpen = false, // Ensure the MemoryStream is disposed with the archive
ArchiveEncoding = readerOptions?.ArchiveEncoding ?? new ArchiveEncoding()
};
return TarArchive.Open(memoryStream, options);
}
memoryStream.Dispose();
}
}
stream.Seek(startPosition, SeekOrigin.Begin);
}
// Fall back to normal tar archive opening
return TarArchive.Open(stream, readerOptions);
}
/// <inheritdoc/>
public async Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => await TarArchive.OpenAsync(stream, readerOptions, cancellationToken).ConfigureAwait(false);
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
readerOptions ??= new ReaderOptions();
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
TarArchive.Open(fileInfo, readerOptions);
// Try to detect and handle compressed tar formats by file extension and content
using var fileStream = fileInfo.OpenRead();
/// <inheritdoc/>
public async Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => await TarArchive.OpenAsync(fileInfo, readerOptions, cancellationToken).ConfigureAwait(false);
// Try each compression option
foreach (var testOption in compressionOptions)
{
if (testOption.Type == CompressionType.None)
{
continue; // Skip uncompressed
}
// Check if file extension matches
var fileName = fileInfo.Name.ToLowerInvariant();
if (testOption.KnownExtensions.Any(ext => fileName.EndsWith(ext)))
{
fileStream.Position = 0;
// Verify it's the right compression format
if (testOption.CanHandle(fileStream))
{
fileStream.Position = 0;
// Decompress the entire file into a seekable MemoryStream
using var decompressedStream = testOption.CreateStream(fileStream);
var memoryStream = new MemoryStream();
decompressedStream.CopyTo(memoryStream);
memoryStream.Position = 0;
// Verify it's actually a tar file
if (TarArchive.IsTarFile(memoryStream))
{
memoryStream.Position = 0;
// Return a TarArchive from the decompressed memory stream
// The TarArchive will own the MemoryStream and dispose it when disposed
var options = new ReaderOptions
{
LeaveStreamOpen = false, // Ensure the MemoryStream is disposed with the archive
ArchiveEncoding = readerOptions?.ArchiveEncoding ?? new ArchiveEncoding()
};
return TarArchive.Open(memoryStream, options);
}
memoryStream.Dispose();
}
}
}
// fileStream will be closed by the using statement
// Fall back to normal tar archive opening
return TarArchive.Open(fileInfo, readerOptions);
}
#endregion
@@ -101,24 +298,10 @@ public class TarFactory
public IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null) =>
TarArchive.Open(streams, readerOptions);
/// <inheritdoc/>
public async Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => await TarArchive.OpenAsync(streams, readerOptions, cancellationToken).ConfigureAwait(false);
/// <inheritdoc/>
public IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null) =>
TarArchive.Open(fileInfos, readerOptions);
/// <inheritdoc/>
public async Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => await TarArchive.OpenAsync(fileInfos, readerOptions, cancellationToken).ConfigureAwait(false);
#endregion
#region IReaderFactory
@@ -280,7 +463,6 @@ public class TarFactory
#endregion
#region IWriteableArchiveFactory
/// <inheritdoc/>

View File

@@ -1,8 +1,5 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
@@ -82,44 +79,6 @@ public class ZipFactory
return false;
}
public override async Task<bool> IsArchiveAsync(
Stream stream,
CancellationToken cancellationToken,
string? password = null
)
{
var startPosition = stream.CanSeek ? stream.Position : -1;
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(
stream,
bufferSize: ReaderOptions.DefaultBufferSize
);
}
if (await ZipArchive.IsZipFileAsync(stream, password, cancellationToken).ConfigureAwait(false))
{
return true;
}
if (!stream.CanSeek)
{
return false;
}
stream.Position = startPosition;
if (await ZipArchive.IsZipMultiAsync(stream, password, cancellationToken).ConfigureAwait(false))
{
return true;
}
stream.Position = startPosition;
return false;
}
/// <inheritdoc/>
public override FileInfo? GetFilePart(int index, FileInfo part1) =>
ZipArchiveVolumeFactory.GetFilePart(index, part1);
@@ -132,22 +91,10 @@ public class ZipFactory
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
ZipArchive.Open(stream, readerOptions);
public Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
ZipArchive.Open(fileInfo, readerOptions);
public Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
#endregion
#region IMultiArchiveFactory
@@ -156,22 +103,10 @@ public class ZipFactory
public IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null) =>
ZipArchive.Open(streams, readerOptions);
public Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <inheritdoc/>
public IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null) =>
ZipArchive.Open(fileInfos, readerOptions);
public Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
#endregion
#region IReaderFactory

View File

@@ -1,38 +0,0 @@
#if !NETFRAMEWORK && !NETSTANDARD2_0
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Polyfills;
internal static class AsyncEnumerableExtensions
{
public static async Task<T?> FirstOrDefaultAsync<T>(
this IAsyncEnumerable<T> source,
CancellationToken cancellationToken = default
)
{
await foreach (var item in source.WithCancellation(cancellationToken))
{
return item;
}
return default;
}
public static async Task<T?> FirstOrDefaultAsync<T>(
this IAsyncEnumerable<T> source,
Func<T, bool> predicate,
CancellationToken cancellationToken = default
)
{
await foreach (var item in source.WithCancellation(cancellationToken))
{
if (predicate(item))
{
return item;
}
}
return default;
}
}
#endif

View File

@@ -0,0 +1,63 @@
using System;
using System.IO;
using SharpCompress.Archives;
using Xunit;
namespace SharpCompress.Test;
public class ArchiveFactoryCompressedTarTests : TestBase
{
[Fact]
public void ArchiveFactory_Open_TarBz2_ThrowsHelpfulException()
{
var testFile = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2");
var exception = Assert.Throws<InvalidOperationException>(() =>
{
using var archive = ArchiveFactory.Open(testFile);
});
Assert.Contains("tar.bz2", exception.Message);
Assert.Contains("ReaderFactory", exception.Message);
}
[Fact]
public void ArchiveFactory_Open_TarLz_ThrowsHelpfulException()
{
var testFile = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.lz");
var exception = Assert.Throws<InvalidOperationException>(() =>
{
using var archive = ArchiveFactory.Open(testFile);
});
Assert.Contains("tar.lz", exception.Message);
Assert.Contains("ReaderFactory", exception.Message);
}
[Fact]
public void ArchiveFactory_Open_TarBz2Stream_ThrowsHelpfulException()
{
var testFile = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2");
using var stream = File.OpenRead(testFile);
var exception = Assert.Throws<InvalidOperationException>(() =>
{
using var archive = ArchiveFactory.Open(stream);
});
Assert.Contains("tar.bz2", exception.Message);
Assert.Contains("ReaderFactory", exception.Message);
}
[Fact]
public void ArchiveFactory_Open_TarLzStream_ThrowsHelpfulException()
{
var testFile = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.lz");
using var stream = File.OpenRead(testFile);
var exception = Assert.Throws<InvalidOperationException>(() =>
{
using var archive = ArchiveFactory.Open(stream);
});
Assert.Contains("tar.lz", exception.Message);
Assert.Contains("ReaderFactory", exception.Message);
}
}

View File

@@ -633,4 +633,13 @@ public class RarArchiveTests : ArchiveTests
"Rar5.encrypted_filesOnly.rar",
"Failure jpg exe Empty тест.txt jpg\\test.jpg exe\\test.exe"
);
[Fact]
public void Rar_TestEncryptedDetection()
{
using var passwordProtectedFilesArchive = RarArchive.Open(
Path.Combine(TEST_ARCHIVES_PATH, "Rar.encrypted_filesOnly.rar")
);
Assert.True(passwordProtectedFilesArchive.IsEncrypted);
}
}

View File

@@ -224,6 +224,15 @@ public class SevenZipArchiveTests : ArchiveTests
);
}
[Fact]
public void SevenZipArchive_TestEncryptedDetection()
{
using var passwordProtectedFilesArchive = SevenZipArchive.Open(
Path.Combine(TEST_ARCHIVES_PATH, "7Zip.encryptedFiles.7z")
);
Assert.True(passwordProtectedFilesArchive.IsEncrypted);
}
[Fact]
public void SevenZipArchive_TestSolidDetection()
{

Binary file not shown.