Compare commits

..

26 Commits

Author SHA1 Message Date
copilot-swe-agent[bot]
3cc4ccedfe Fix decompression performance by delaying stream wrapping
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-27 10:58:53 +00:00
copilot-swe-agent[bot]
f27ea2d5f6 Initial plan for decompression performance fix
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-27 10:53:39 +00:00
copilot-swe-agent[bot]
292729028b Initial plan 2025-10-27 10:45:19 +00:00
Adam Hathcock
b3a20d05c5 Merge pull request #978 from adamhathcock/copilot/enhance-stream-io-async-support
Add comprehensive async/await support for Stream I/O operations
2025-10-27 10:23:08 +00:00
Adam Hathcock
4cd024a2b2 Merge remote-tracking branch 'origin/master' into copilot/enhance-stream-io-async-support 2025-10-27 10:20:06 +00:00
Adam Hathcock
63d08ebfd2 update agents 2025-10-27 10:19:57 +00:00
Adam Hathcock
c696197b03 formatting 2025-10-27 10:19:24 +00:00
Adam Hathcock
738a72228b added fixes and more async tests 2025-10-27 10:15:06 +00:00
Adam Hathcock
90641f4488 Merge pull request #979 from adamhathcock/dependabot/github_actions/actions/upload-artifact-5
Bump actions/upload-artifact from 4 to 5
2025-10-27 10:06:02 +00:00
Adam Hathcock
a4cc7eaf9b fully use async for zlibbase 2025-10-27 09:51:39 +00:00
Adam Hathcock
fdca728fdc add some dispose async 2025-10-27 09:47:15 +00:00
dependabot[bot]
d2c4ae8cdf Bump actions/upload-artifact from 4 to 5
Bumps [actions/upload-artifact](https://github.com/actions/upload-artifact) from 4 to 5.
- [Release notes](https://github.com/actions/upload-artifact/releases)
- [Commits](https://github.com/actions/upload-artifact/compare/v4...v5)

---
updated-dependencies:
- dependency-name: actions/upload-artifact
  dependency-version: '5'
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-10-27 09:43:01 +00:00
Adam Hathcock
f3d3ac30a6 add gubbins 2025-10-27 09:39:08 +00:00
Adam Hathcock
f8cc4ade8a format 2025-10-27 09:37:00 +00:00
copilot-swe-agent[bot]
b3975b7bbd Add async tests for EntryStream and compression streams
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-27 09:28:34 +00:00
copilot-swe-agent[bot]
4f1b61f5bc Add async support to DeflateStream and GZipStream
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-27 09:20:37 +00:00
copilot-swe-agent[bot]
beeb37b4fd Add async support to EntryStream, ZlibStream, and ZlibBaseStream
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-27 09:11:29 +00:00
copilot-swe-agent[bot]
43aa2bad22 Integrate async/await support from PR #976 as baseline
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-27 09:00:38 +00:00
copilot-swe-agent[bot]
1b2ba921bb Initial plan 2025-10-27 08:48:01 +00:00
Adam Hathcock
f543da0ea8 Merge pull request #977 from adamhathcock/copilot/add-copilot-agent-config
Add Copilot agent manifest and usage documentation
2025-10-27 08:42:20 +00:00
copilot-swe-agent[bot]
e60c9efa84 Add copilot agent configuration and documentation
- Create .github/agents/copilot-agent.yml with agent manifest
- Replace AGENTS.md with agent usage and command documentation

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-25 18:16:47 +00:00
copilot-swe-agent[bot]
c52fc6f240 Initial plan 2025-10-25 18:12:45 +00:00
Adam Hathcock
ee136b024a Merge pull request #974 from adamhathcock/adam/enable-agent
chore: add Copilot coding agent config and CI workflow
2025-10-25 16:09:51 +01:00
Adam Hathcock
699bc5f34b chore: add Copilot coding agent config and CI workflow 2025-10-25 16:05:09 +01:00
Adam Hathcock
9eed8e842c Merge pull request #972 from TwanVanDongen/master
Handle vendor-specific and malformed ZIP extra fields safely
2025-10-25 13:53:10 +01:00
Twan van Dongen
6d652a12ee And again forgot to apply CSharpierAdds bounds checks to prevent exceptions when extra fields are truncated or non-standard (e.g., 0x4341 "AC"/ARC0). Stops parsing gracefully, allowing other fields to be processed. 2025-10-24 17:18:37 +02:00
66 changed files with 4231 additions and 2767 deletions

7
.copilot-agent.yml Normal file
View File

@@ -0,0 +1,7 @@
enabled: true
agent:
name: copilot-coding-agent
allow:
- paths: ["src/**/*", "tests/**/*", "README.md", "AGENTS.md"]
actions: ["create", "modify"]
require_review_before_merge: true

13
.github/COPILOT_AGENT_README.md vendored Normal file
View File

@@ -0,0 +1,13 @@
# Copilot Coding Agent Configuration
This repository includes a minimal opt-in configuration and CI workflow to allow the GitHub Copilot coding agent to open and validate PRs.
- .copilot-agent.yml: opt-in config for automated agents
- .github/workflows/dotnetcore.yml: CI runs on PRs touching the solution, source, or tests to validate changes
- AGENTS.yml: general information for this project
Maintainers can adjust the allowed paths or disable the agent by editing or removing .copilot-agent.yml.
Notes:
- Do not change any other files in the repository.
- If build/test paths are different, update the workflow accordingly; this workflow targets SharpCompress.sln and the SharpCompress.Tests test project.

17
.github/agents/copilot-agent.yml vendored Normal file
View File

@@ -0,0 +1,17 @@
enabled: true
agent:
name: copilot-coding-agent
allow:
- paths: ["src/**/*", "tests/**/*", "README.md", "AGENTS.md"]
actions: ["create", "modify", "delete"]
require_review_before_merge: true
required_approvals: 1
allowed_merge_strategies:
- squash
- merge
auto_merge_on_green: false
run_workflows: true
notes: |
- This manifest expresses the policy for the Copilot coding agent in this repository.
- It does NOT install or authorize the agent; a repository admin must install the Copilot coding agent app and grant the repository the necessary permissions (contents: write, pull_requests: write, checks: write, actions: write/read, issues: write) to allow the agent to act.
- Keep allow paths narrow and prefer require_review_before_merge during initial rollout.

View File

@@ -19,7 +19,7 @@ jobs:
with:
dotnet-version: 8.0.x
- run: dotnet run --project build/build.csproj
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v5
with:
name: ${{ matrix.os }}-sharpcompress.nupkg
path: artifacts/*

View File

@@ -26,6 +26,7 @@ applyTo: '**/*.cs'
- Use CSharpier for all code formatting to ensure consistent style across the project.
- Install CSharpier globally: `dotnet tool install -g csharpier`
- Format files with: `dotnet csharpier format .`
- **ALWAYS run `dotnet csharpier format .` after making code changes before committing.**
- Configure your IDE to format on save using CSharpier.
- CSharpier configuration can be customized via `.csharpierrc` file in the project root.
- Trust CSharpier's opinionated formatting decisions to maintain consistency.

View File

@@ -4,6 +4,8 @@ SharpCompress is a compression library in pure C# for .NET Framework 4.62, .NET
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).
**NEW:** All I/O operations now support async/await for improved performance and scalability. See the [Async Usage](#async-usage) section below.
GitHub Actions Build -
[![SharpCompress](https://github.com/adamhathcock/sharpcompress/actions/workflows/dotnetcore.yml/badge.svg)](https://github.com/adamhathcock/sharpcompress/actions/workflows/dotnetcore.yml)
[![Static Badge](https://img.shields.io/badge/API%20Docs-DNDocs-190088?logo=readme&logoColor=white)](https://dndocs.com/d/sharpcompress/api/index.html)
@@ -32,6 +34,82 @@ Hi everyone. I hope you're using SharpCompress and finding it useful. Please giv
Please do not email me directly to ask for help. If you think there is a real issue, please report it here.
## Async Usage
SharpCompress now provides full async/await support for all I/O operations, allowing for better performance and scalability in modern applications.
### Async Reading Examples
Extract entries asynchronously:
```csharp
using (Stream stream = File.OpenRead("archive.zip"))
using (var reader = ReaderFactory.Open(stream))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
// Async extraction
await reader.WriteEntryToDirectoryAsync(
@"C:\temp",
new ExtractionOptions() { ExtractFullPath = true, Overwrite = true },
cancellationToken
);
}
}
}
```
Extract all entries to directory asynchronously:
```csharp
using (Stream stream = File.OpenRead("archive.tar.gz"))
using (var reader = ReaderFactory.Open(stream))
{
await reader.WriteAllToDirectoryAsync(
@"C:\temp",
new ExtractionOptions() { ExtractFullPath = true, Overwrite = true },
cancellationToken
);
}
```
Open entry stream asynchronously:
```csharp
using (var archive = ZipArchive.Open("archive.zip"))
{
foreach (var entry in archive.Entries.Where(e => !e.IsDirectory))
{
using (var entryStream = await entry.OpenEntryStreamAsync(cancellationToken))
{
// Process stream asynchronously
await entryStream.CopyToAsync(outputStream, cancellationToken);
}
}
}
```
### Async Writing Examples
Write files asynchronously:
```csharp
using (Stream stream = File.OpenWrite("output.zip"))
using (var writer = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.Deflate))
{
await writer.WriteAsync("file1.txt", fileStream, DateTime.Now, cancellationToken);
}
```
Write all files from directory asynchronously:
```csharp
using (Stream stream = File.OpenWrite("output.tar.gz"))
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, new WriterOptions(CompressionType.GZip)))
{
await writer.WriteAllAsync(@"D:\files", "*", SearchOption.AllDirectories, cancellationToken);
}
```
All async methods support `CancellationToken` for graceful cancellation of long-running operations.
## Want to contribute?
I'm always looking for help or ideas. Please submit code or email with ideas. Unfortunately, just letting me know you'd like to help is not enough because I really have no overall plan of what needs to be done. I'll definitely accept code submissions and add you as a member of the project!

View File

@@ -21,6 +21,10 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Config", "Config", "{CDB425
Directory.Packages.props = Directory.Packages.props
NuGet.config = NuGet.config
.github\workflows\dotnetcore.yml = .github\workflows\dotnetcore.yml
USAGE.md = USAGE.md
README.md = README.md
FORMATS.md = FORMATS.md
AGENTS.md = AGENTS.md
EndProjectSection
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SharpCompress.Performance", "tests\SharpCompress.Performance\SharpCompress.Performance.csproj", "{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}"

143
USAGE.md
View File

@@ -1,5 +1,18 @@
# SharpCompress Usage
## Async/Await Support
SharpCompress now provides full async/await support for all I/O operations. All `Read`, `Write`, and extraction operations have async equivalents ending in `Async` that accept an optional `CancellationToken`. This enables better performance and scalability for I/O-bound operations.
**Key Async Methods:**
- `reader.WriteEntryToAsync(stream, cancellationToken)` - Extract entry asynchronously
- `reader.WriteAllToDirectoryAsync(path, options, cancellationToken)` - Extract all asynchronously
- `writer.WriteAsync(filename, stream, modTime, cancellationToken)` - Write entry asynchronously
- `writer.WriteAllAsync(directory, pattern, searchOption, cancellationToken)` - Write directory asynchronously
- `entry.OpenEntryStreamAsync(cancellationToken)` - Open entry stream asynchronously
See [Async Examples](#async-examples) section below for usage patterns.
## Stream Rules (changed with 0.21)
When dealing with Streams, the rule should be that you don't close a stream you didn't create. This, in effect, should mean you should always put a Stream in a using block to dispose it.
@@ -172,3 +185,133 @@ foreach(var entry in tr.Entries)
Console.WriteLine($"{entry.Key}");
}
```
## Async Examples
### Async Reader Examples
**Extract single entry asynchronously:**
```C#
using (Stream stream = File.OpenRead("archive.zip"))
using (var reader = ReaderFactory.Open(stream))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
using (var entryStream = reader.OpenEntryStream())
{
using (var outputStream = File.Create("output.bin"))
{
await reader.WriteEntryToAsync(outputStream, cancellationToken);
}
}
}
}
}
```
**Extract all entries asynchronously:**
```C#
using (Stream stream = File.OpenRead("archive.tar.gz"))
using (var reader = ReaderFactory.Open(stream))
{
await reader.WriteAllToDirectoryAsync(
@"D:\temp",
new ExtractionOptions()
{
ExtractFullPath = true,
Overwrite = true
},
cancellationToken
);
}
```
**Open and process entry stream asynchronously:**
```C#
using (var archive = ZipArchive.Open("archive.zip"))
{
foreach (var entry in archive.Entries.Where(e => !e.IsDirectory))
{
using (var entryStream = await entry.OpenEntryStreamAsync(cancellationToken))
{
// Process the decompressed stream asynchronously
await ProcessStreamAsync(entryStream, cancellationToken);
}
}
}
```
### Async Writer Examples
**Write single file asynchronously:**
```C#
using (Stream archiveStream = File.OpenWrite("output.zip"))
using (var writer = WriterFactory.Open(archiveStream, ArchiveType.Zip, CompressionType.Deflate))
{
using (Stream fileStream = File.OpenRead("input.txt"))
{
await writer.WriteAsync("entry.txt", fileStream, DateTime.Now, cancellationToken);
}
}
```
**Write entire directory asynchronously:**
```C#
using (Stream stream = File.OpenWrite("backup.tar.gz"))
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, new WriterOptions(CompressionType.GZip)))
{
await writer.WriteAllAsync(
@"D:\files",
"*",
SearchOption.AllDirectories,
cancellationToken
);
}
```
**Write with progress tracking and cancellation:**
```C#
var cts = new CancellationTokenSource();
// Set timeout or cancel from UI
cts.CancelAfter(TimeSpan.FromMinutes(5));
using (Stream stream = File.OpenWrite("archive.zip"))
using (var writer = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.Deflate))
{
try
{
await writer.WriteAllAsync(@"D:\data", "*", SearchOption.AllDirectories, cts.Token);
}
catch (OperationCanceledException)
{
Console.WriteLine("Operation was cancelled");
}
}
```
### Archive Async Examples
**Extract from archive asynchronously:**
```C#
using (var archive = ZipArchive.Open("archive.zip"))
{
using (var reader = archive.ExtractAllEntries())
{
await reader.WriteAllToDirectoryAsync(
@"C:\output",
new ExtractionOptions() { ExtractFullPath = true, Overwrite = true },
cancellationToken
);
}
}
```
**Benefits of Async Operations:**
- Non-blocking I/O for better application responsiveness
- Improved scalability for server applications
- Support for cancellation via CancellationToken
- Better resource utilization in async/await contexts
- Compatible with modern .NET async patterns

View File

@@ -2,7 +2,6 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Factories;
using SharpCompress.IO;
@@ -21,8 +20,9 @@ public static class ArchiveFactory
public static IArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
readerOptions ??= new ReaderOptions();
var factory = FindFactory<IArchiveFactory>(stream);
stream = new SharpCompressStream(stream, bufferSize: readerOptions.BufferSize);
return FindFactory<IArchiveFactory>(stream).Open(stream, readerOptions);
return factory.Open(stream, readerOptions);
}
public static IWritableArchive Create(ArchiveType type)
@@ -117,14 +117,14 @@ public static class ArchiveFactory
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static async Task WriteToDirectoryAsync(
public static void WriteToDirectory(
string sourceArchive,
string destinationDirectory,
ExtractionOptions? options = null
)
{
using var archive = Open(sourceArchive);
await archive.WriteToDirectoryAsync(destinationDirectory, options);
archive.WriteToDirectory(destinationDirectory, options);
}
private static T FindFactory<T>(FileInfo finfo)

View File

@@ -1,5 +1,6 @@
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.GZip;
@@ -21,8 +22,11 @@ public class GZipArchiveEntry : GZipEntry, IArchiveEntry
return Parts.Single().GetCompressedStream().NotNull();
}
public virtual async Task<Stream> OpenEntryStreamAsync() =>
await Task.FromResult(OpenEntryStream());
public virtual Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
{
// GZip synchronous implementation is fast enough, just wrap it
return Task.FromResult(OpenEntryStream());
}
#region IArchiveEntry Members

View File

@@ -1,4 +1,5 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
@@ -13,10 +14,10 @@ public interface IArchiveEntry : IEntry
Stream OpenEntryStream();
/// <summary>
/// Opens the current entry as a stream that will decompress as it is read.
/// Opens the current entry as a stream that will decompress as it is read asynchronously.
/// Read the entire stream or use SkipEntry on EntryStream.
/// </summary>
Task<Stream> OpenEntryStreamAsync();
Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default);
/// <summary>
/// The archive can find all the parts of the archive needed to extract this entry.

View File

@@ -1,5 +1,4 @@
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
@@ -7,7 +6,7 @@ namespace SharpCompress.Archives;
public static class IArchiveEntryExtensions
{
public static async Task WriteToAsync(this IArchiveEntry archiveEntry, Stream streamToWriteTo)
public static void WriteTo(this IArchiveEntry archiveEntry, Stream streamToWriteTo)
{
if (archiveEntry.IsDirectory)
{
@@ -22,11 +21,11 @@ public static class IArchiveEntryExtensions
archiveEntry.Size,
archiveEntry.CompressedSize
);
var entryStream = await archiveEntry.OpenEntryStreamAsync();
var entryStream = archiveEntry.OpenEntryStream();
using (entryStream)
{
using Stream s = new ListeningStream(streamListener, entryStream);
await s.CopyToAsync(streamToWriteTo);
s.CopyTo(streamToWriteTo);
}
streamListener.FireEntryExtractionEnd(archiveEntry);
}
@@ -34,34 +33,34 @@ public static class IArchiveEntryExtensions
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static async Task WriteEntryToDirectoryAsync(
public static void WriteToDirectory(
this IArchiveEntry entry,
string destinationDirectory,
ExtractionOptions? options = null
) =>
await ExtractionMethods.WriteEntryToDirectoryAsync(
ExtractionMethods.WriteEntryToDirectory(
entry,
destinationDirectory,
options,
entry.WriteToFileAsync
entry.WriteToFile
);
/// <summary>
/// Extract to specific file
/// </summary>
public static Task WriteToFileAsync(
public static void WriteToFile(
this IArchiveEntry entry,
string destinationFileName,
ExtractionOptions? options = null
) =>
ExtractionMethods.WriteEntryToFileAsync(
ExtractionMethods.WriteEntryToFile(
entry,
destinationFileName,
options,
async (x, fm) =>
(x, fm) =>
{
using var fs = File.Open(destinationFileName, fm);
await entry.WriteToAsync(fs);
entry.WriteTo(fs);
}
);
}

View File

@@ -1,8 +1,8 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
@@ -13,14 +13,14 @@ public static class IArchiveExtensions
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static async Task WriteToDirectoryAsync(
public static void WriteToDirectory(
this IArchive archive,
string destinationDirectory,
ExtractionOptions? options = null
)
{
using var reader = archive.ExtractAllEntries();
await reader.WriteAllToDirectoryAsync(destinationDirectory, options);
reader.WriteAllToDirectory(destinationDirectory, options);
}
/// <summary>
@@ -30,7 +30,7 @@ public static class IArchiveExtensions
/// <param name="destination">The folder to extract into.</param>
/// <param name="progressReport">Optional progress report callback.</param>
/// <param name="cancellationToken">Optional cancellation token.</param>
public static async Task ExtractToDirectoryAsync(
public static void ExtractToDirectory(
this IArchive archive,
string destination,
Action<double>? progressReport = null,
@@ -75,7 +75,7 @@ public static class IArchiveExtensions
// Write file
using var fs = File.OpenWrite(path);
await entry.WriteToAsync(fs);
entry.WriteTo(fs);
// Update progress
bytesRead += entry.Size;

View File

@@ -2,6 +2,7 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
@@ -67,29 +68,27 @@ public class RarArchiveEntry : RarEntry, IArchiveEntry
}
}
public Stream OpenEntryStream() =>
throw new NotSupportedException(
"Synchronous extraction is not supported. Use OpenEntryStreamAsync instead."
);
public async Task<Stream> OpenEntryStreamAsync()
public Stream OpenEntryStream()
{
if (IsRarV3)
{
return await RarStream.Create(
return new RarStream(
archive.UnpackV1.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
);
}
return await RarStream.Create(
return new RarStream(
archive.UnpackV2017.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
);
}
public Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default) =>
Task.FromResult(OpenEntryStream());
public bool IsComplete
{
get

View File

@@ -2,7 +2,6 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.SevenZip;
using SharpCompress.Compressors.LZMA.Utilites;
@@ -254,8 +253,8 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
}
}
protected override Task<EntryStream> GetEntryStreamAsync() =>
CreateEntryStreamAsync(
protected override EntryStream GetEntryStream() =>
CreateEntryStream(
new ReadOnlySubStream(
_currentStream.NotNull("currentStream is not null"),
_currentItem?.Size ?? 0

View File

@@ -1,4 +1,5 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.SevenZip;
@@ -11,8 +12,8 @@ public class SevenZipArchiveEntry : SevenZipEntry, IArchiveEntry
public Stream OpenEntryStream() => FilePart.GetCompressedStream();
public virtual async Task<Stream> OpenEntryStreamAsync() =>
await Task.FromResult(OpenEntryStream());
public Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default) =>
Task.FromResult(OpenEntryStream());
public IArchive Archive { get; }

View File

@@ -1,5 +1,6 @@
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Tar;
@@ -13,8 +14,9 @@ public class TarArchiveEntry : TarEntry, IArchiveEntry
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();
public virtual async Task<Stream> OpenEntryStreamAsync() =>
await Task.FromResult(OpenEntryStream());
public virtual Task<Stream> OpenEntryStreamAsync(
CancellationToken cancellationToken = default
) => Task.FromResult(OpenEntryStream());
#region IArchiveEntry Members

View File

@@ -1,5 +1,6 @@
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip;
@@ -12,8 +13,9 @@ public class ZipArchiveEntry : ZipEntry, IArchiveEntry
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();
public virtual async Task<Stream> OpenEntryStreamAsync() =>
await Task.FromResult(OpenEntryStream());
public virtual Task<Stream> OpenEntryStreamAsync(
CancellationToken cancellationToken = default
) => Task.FromResult(OpenEntryStream());
#region IArchiveEntry Members

View File

@@ -1,6 +1,7 @@
using System;
using System.IO;
using System.IO.Compression;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -53,11 +54,11 @@ public class EntryStream : Stream, IStreamStack
}
/// <summary>
/// When reading a stream from OpenEntryStream, the stream must be completed so use this to finish reading the entire entry.
/// Asynchronously skip the rest of the entry stream.
/// </summary>
public async Task SkipEntryAsync()
public async Task SkipEntryAsync(CancellationToken cancellationToken = default)
{
await this.SkipAsync();
await this.SkipAsync(cancellationToken).ConfigureAwait(false);
_completed = true;
}
@@ -93,6 +94,40 @@ public class EntryStream : Stream, IStreamStack
_stream.Dispose();
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override async ValueTask DisposeAsync()
{
if (!(_completed || _reader.Cancelled))
{
await SkipEntryAsync().ConfigureAwait(false);
}
//Need a safe standard approach to this - it's okay for compression to overreads. Handling needs to be standardised
if (_stream is IStreamStack ss)
{
if (ss.BaseStream() is SharpCompress.Compressors.Deflate.DeflateStream deflateStream)
{
await deflateStream.FlushAsync().ConfigureAwait(false);
}
else if (ss.BaseStream() is SharpCompress.Compressors.LZMA.LzmaStream lzmaStream)
{
await lzmaStream.FlushAsync().ConfigureAwait(false);
}
}
if (_isDisposed)
{
return;
}
_isDisposed = true;
#if DEBUG_STREAMS
this.DebugDispose(typeof(EntryStream));
#endif
await base.DisposeAsync().ConfigureAwait(false);
await _stream.DisposeAsync().ConfigureAwait(false);
}
#endif
public override bool CanRead => true;
public override bool CanSeek => false;
@@ -101,6 +136,8 @@ public class EntryStream : Stream, IStreamStack
public override void Flush() { }
public override Task FlushAsync(CancellationToken cancellationToken) => Task.CompletedTask;
public override long Length => _stream.Length;
public override long Position
@@ -119,6 +156,38 @@ public class EntryStream : Stream, IStreamStack
return read;
}
public override async Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
var read = await _stream
.ReadAsync(buffer, offset, count, cancellationToken)
.ConfigureAwait(false);
if (read <= 0)
{
_completed = true;
}
return read;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
)
{
var read = await _stream.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
if (read <= 0)
{
_completed = true;
}
return read;
}
#endif
public override int ReadByte()
{
var value = _stream.ReadByte();

View File

@@ -1,5 +1,6 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common;
@@ -9,11 +10,11 @@ internal static class ExtractionMethods
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static async Task WriteEntryToDirectoryAsync(
public static void WriteEntryToDirectory(
IEntry entry,
string destinationDirectory,
ExtractionOptions? options,
Func<string, ExtractionOptions?, Task> write
Action<string, ExtractionOptions?> write
)
{
string destinationFileName;
@@ -78,7 +79,7 @@ internal static class ExtractionMethods
"Entry is trying to write a file outside of the destination directory."
);
}
await write(destinationFileName, options);
write(destinationFileName, options);
}
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
{
@@ -86,11 +87,11 @@ internal static class ExtractionMethods
}
}
public static async Task WriteEntryToFileAsync(
public static void WriteEntryToFile(
IEntry entry,
string destinationFileName,
ExtractionOptions? options,
Func<string, FileMode, Task> openAndWrite
Action<string, FileMode> openAndWrite
)
{
if (entry.LinkTarget != null)
@@ -113,7 +114,118 @@ internal static class ExtractionMethods
fm = FileMode.CreateNew;
}
await openAndWrite(destinationFileName, fm);
openAndWrite(destinationFileName, fm);
entry.PreserveExtractionOptions(destinationFileName, options);
}
}
public static async Task WriteEntryToDirectoryAsync(
IEntry entry,
string destinationDirectory,
ExtractionOptions? options,
Func<string, ExtractionOptions?, Task> writeAsync,
CancellationToken cancellationToken = default
)
{
string destinationFileName;
var fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
//check for trailing slash.
if (
fullDestinationDirectoryPath[fullDestinationDirectoryPath.Length - 1]
!= Path.DirectorySeparatorChar
)
{
fullDestinationDirectoryPath += Path.DirectorySeparatorChar;
}
if (!Directory.Exists(fullDestinationDirectoryPath))
{
throw new ExtractionException(
$"Directory does not exist to extract to: {fullDestinationDirectoryPath}"
);
}
options ??= new ExtractionOptions() { Overwrite = true };
var file = Path.GetFileName(entry.Key.NotNull("Entry Key is null")).NotNull("File is null");
file = Utility.ReplaceInvalidFileNameChars(file);
if (options.ExtractFullPath)
{
var folder = Path.GetDirectoryName(entry.Key.NotNull("Entry Key is null"))
.NotNull("Directory is null");
var destdir = Path.GetFullPath(Path.Combine(fullDestinationDirectoryPath, folder));
if (!Directory.Exists(destdir))
{
if (!destdir.StartsWith(fullDestinationDirectoryPath, StringComparison.Ordinal))
{
throw new ExtractionException(
"Entry is trying to create a directory outside of the destination directory."
);
}
Directory.CreateDirectory(destdir);
}
destinationFileName = Path.Combine(destdir, file);
}
else
{
destinationFileName = Path.Combine(fullDestinationDirectoryPath, file);
}
if (!entry.IsDirectory)
{
destinationFileName = Path.GetFullPath(destinationFileName);
if (
!destinationFileName.StartsWith(
fullDestinationDirectoryPath,
StringComparison.Ordinal
)
)
{
throw new ExtractionException(
"Entry is trying to write a file outside of the destination directory."
);
}
await writeAsync(destinationFileName, options).ConfigureAwait(false);
}
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
{
Directory.CreateDirectory(destinationFileName);
}
}
public static async Task WriteEntryToFileAsync(
IEntry entry,
string destinationFileName,
ExtractionOptions? options,
Func<string, FileMode, Task> openAndWriteAsync,
CancellationToken cancellationToken = default
)
{
if (entry.LinkTarget != null)
{
if (options?.WriteSymbolicLink is null)
{
throw new ExtractionException(
"Entry is a symbolic link but ExtractionOptions.WriteSymbolicLink delegate is null"
);
}
options.WriteSymbolicLink(destinationFileName, entry.LinkTarget);
}
else
{
var fm = FileMode.Create;
options ??= new ExtractionOptions() { Overwrite = true };
if (!options.Overwrite)
{
fm = FileMode.CreateNew;
}
await openAndWriteAsync(destinationFileName, fm).ConfigureAwait(false);
entry.PreserveExtractionOptions(destinationFileName, options);
}
}

View File

@@ -66,6 +66,36 @@ internal class TarReadOnlySubStream : SharpCompressStream, IStreamStack
base.Dispose(disposing);
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override async System.Threading.Tasks.ValueTask DisposeAsync()
{
if (_isDisposed)
{
return;
}
_isDisposed = true;
#if DEBUG_STREAMS
this.DebugDispose(typeof(TarReadOnlySubStream));
#endif
// Ensure we read all remaining blocks for this entry.
await Stream.SkipAsync(BytesLeftToRead).ConfigureAwait(false);
_amountRead += BytesLeftToRead;
// If the last block wasn't a full 512 bytes, skip the remaining padding bytes.
var bytesInLastBlock = _amountRead % 512;
if (bytesInLastBlock != 0)
{
await Stream.SkipAsync(512 - bytesInLastBlock).ConfigureAwait(false);
}
// Call base Dispose instead of base DisposeAsync to avoid double disposal
base.Dispose(true);
GC.SuppressFinalize(this);
}
#endif
private long BytesLeftToRead { get; set; }
public override bool CanRead => true;
@@ -76,6 +106,10 @@ internal class TarReadOnlySubStream : SharpCompressStream, IStreamStack
public override void Flush() { }
public override System.Threading.Tasks.Task FlushAsync(
System.Threading.CancellationToken cancellationToken
) => System.Threading.Tasks.Task.CompletedTask;
public override long Length => throw new NotSupportedException();
public override long Position
@@ -114,6 +148,48 @@ internal class TarReadOnlySubStream : SharpCompressStream, IStreamStack
return value;
}
public override async System.Threading.Tasks.Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
System.Threading.CancellationToken cancellationToken
)
{
if (BytesLeftToRead < count)
{
count = (int)BytesLeftToRead;
}
var read = await Stream
.ReadAsync(buffer, offset, count, cancellationToken)
.ConfigureAwait(false);
if (read > 0)
{
BytesLeftToRead -= read;
_amountRead += read;
}
return read;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override async System.Threading.Tasks.ValueTask<int> ReadAsync(
System.Memory<byte> buffer,
System.Threading.CancellationToken cancellationToken = default
)
{
if (BytesLeftToRead < buffer.Length)
{
buffer = buffer.Slice(0, (int)BytesLeftToRead);
}
var read = await Stream.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
if (read > 0)
{
BytesLeftToRead -= read;
_amountRead += read;
}
return read;
}
#endif
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();

View File

@@ -91,8 +91,15 @@ internal abstract class ZipFileEntry : ZipHeader
protected void LoadExtra(byte[] extra)
{
for (var i = 0; i < extra.Length - 4; )
for (var i = 0; i < extra.Length; )
{
// Ensure we have at least a header (2-byte ID + 2-byte length)
if (i + 4 > extra.Length)
{
// Incomplete header — stop parsing extras
break;
}
var type = (ExtraDataType)BinaryPrimitives.ReadUInt16LittleEndian(extra.AsSpan(i));
if (!Enum.IsDefined(typeof(ExtraDataType), type))
{
@@ -106,7 +113,17 @@ internal abstract class ZipFileEntry : ZipHeader
if (length > extra.Length)
{
// bad extras block
return;
break; // allow processing optional other blocks
}
// Some ZIP files contain vendor-specific or malformed extra fields where the declared
// data length extends beyond the remaining buffer. This adjustment ensures that
// we only read data within bounds (i + 4 + length <= extra.Length)
// The example here is: 41 43 18 00 41 52 43 30 46 EB FF FF 51 29 03 C6 03 00 00 00 00 00 00 00 00
// No existing zip utility uses 0x4341 ('AC')
if (i + 4 + length > extra.Length)
{
// incomplete or corrupt field
break; // allow processing optional other blocks
}
var data = new byte[length];

View File

@@ -28,6 +28,7 @@ using System;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Compressors.Deflate;
@@ -289,6 +290,34 @@ public class DeflateStream : Stream, IStreamStack
_baseStream.Flush();
}
public override async Task FlushAsync(CancellationToken cancellationToken)
{
if (_disposed)
{
throw new ObjectDisposedException("DeflateStream");
}
await _baseStream.FlushAsync(cancellationToken).ConfigureAwait(false);
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override async ValueTask DisposeAsync()
{
if (_disposed)
{
return;
}
_disposed = true;
if (_baseStream != null)
{
await _baseStream.DisposeAsync().ConfigureAwait(false);
}
#if DEBUG_STREAMS
this.DebugDispose(typeof(DeflateStream));
#endif
await base.DisposeAsync().ConfigureAwait(false);
}
#endif
/// <summary>
/// Read data from the stream.
/// </summary>
@@ -325,6 +354,36 @@ public class DeflateStream : Stream, IStreamStack
return _baseStream.Read(buffer, offset, count);
}
public override async Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
if (_disposed)
{
throw new ObjectDisposedException("DeflateStream");
}
return await _baseStream
.ReadAsync(buffer, offset, count, cancellationToken)
.ConfigureAwait(false);
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
)
{
if (_disposed)
{
throw new ObjectDisposedException("DeflateStream");
}
return await _baseStream.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
}
#endif
public override int ReadByte()
{
if (_disposed)
@@ -386,6 +445,36 @@ public class DeflateStream : Stream, IStreamStack
_baseStream.Write(buffer, offset, count);
}
public override async Task WriteAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
if (_disposed)
{
throw new ObjectDisposedException("DeflateStream");
}
await _baseStream
.WriteAsync(buffer, offset, count, cancellationToken)
.ConfigureAwait(false);
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override async ValueTask WriteAsync(
ReadOnlyMemory<byte> buffer,
CancellationToken cancellationToken = default
)
{
if (_disposed)
{
throw new ObjectDisposedException("DeflateStream");
}
await _baseStream.WriteAsync(buffer, cancellationToken).ConfigureAwait(false);
}
#endif
public override void WriteByte(byte value)
{
if (_disposed)

View File

@@ -30,6 +30,8 @@ using System;
using System.Buffers.Binary;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Compressors.Deflate;
@@ -257,6 +259,15 @@ public class GZipStream : Stream, IStreamStack
BaseStream.Flush();
}
public override async Task FlushAsync(CancellationToken cancellationToken)
{
if (_disposed)
{
throw new ObjectDisposedException("GZipStream");
}
await BaseStream.FlushAsync(cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Read and decompress data from the source stream.
/// </summary>
@@ -309,6 +320,54 @@ public class GZipStream : Stream, IStreamStack
return n;
}
public override async Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
if (_disposed)
{
throw new ObjectDisposedException("GZipStream");
}
var n = await BaseStream
.ReadAsync(buffer, offset, count, cancellationToken)
.ConfigureAwait(false);
if (!_firstReadDone)
{
_firstReadDone = true;
FileName = BaseStream._GzipFileName;
Comment = BaseStream._GzipComment;
LastModified = BaseStream._GzipMtime;
}
return n;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
)
{
if (_disposed)
{
throw new ObjectDisposedException("GZipStream");
}
var n = await BaseStream.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
if (!_firstReadDone)
{
_firstReadDone = true;
FileName = BaseStream._GzipFileName;
Comment = BaseStream._GzipComment;
LastModified = BaseStream._GzipMtime;
}
return n;
}
#endif
/// <summary>
/// Calling this method always throws a <see cref="NotImplementedException"/>.
/// </summary>
@@ -368,6 +427,77 @@ public class GZipStream : Stream, IStreamStack
BaseStream.Write(buffer, offset, count);
}
public override async Task WriteAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
if (_disposed)
{
throw new ObjectDisposedException("GZipStream");
}
if (BaseStream._streamMode == ZlibBaseStream.StreamMode.Undefined)
{
if (BaseStream._wantCompress)
{
// first write in compression, therefore, emit the GZIP header
_headerByteCount = EmitHeader();
}
else
{
throw new InvalidOperationException();
}
}
await BaseStream.WriteAsync(buffer, offset, count, cancellationToken).ConfigureAwait(false);
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override async ValueTask WriteAsync(
ReadOnlyMemory<byte> buffer,
CancellationToken cancellationToken = default
)
{
if (_disposed)
{
throw new ObjectDisposedException("GZipStream");
}
if (BaseStream._streamMode == ZlibBaseStream.StreamMode.Undefined)
{
if (BaseStream._wantCompress)
{
// first write in compression, therefore, emit the GZIP header
_headerByteCount = EmitHeader();
}
else
{
throw new InvalidOperationException();
}
}
await BaseStream.WriteAsync(buffer, cancellationToken).ConfigureAwait(false);
}
public override async ValueTask DisposeAsync()
{
if (_disposed)
{
return;
}
_disposed = true;
if (BaseStream != null)
{
await BaseStream.DisposeAsync().ConfigureAwait(false);
}
#if DEBUG_STREAMS
this.DebugDispose(typeof(GZipStream));
#endif
await base.DisposeAsync().ConfigureAwait(false);
}
#endif
#endregion Stream methods
public string? Comment

View File

@@ -31,6 +31,8 @@ using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
@@ -197,6 +199,69 @@ internal class ZlibBaseStream : Stream, IStreamStack
} while (!done);
}
public override async Task WriteAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
// workitem 7159
// calculate the CRC on the unccompressed data (before writing)
if (crc != null)
{
crc.SlurpBlock(buffer, offset, count);
}
if (_streamMode == StreamMode.Undefined)
{
_streamMode = StreamMode.Writer;
}
else if (_streamMode != StreamMode.Writer)
{
throw new ZlibException("Cannot Write after Reading.");
}
if (count == 0)
{
return;
}
// first reference of z property will initialize the private var _z
z.InputBuffer = buffer;
_z.NextIn = offset;
_z.AvailableBytesIn = count;
var done = false;
do
{
_z.OutputBuffer = workingBuffer;
_z.NextOut = 0;
_z.AvailableBytesOut = _workingBuffer.Length;
var rc = (_wantCompress) ? _z.Deflate(_flushMode) : _z.Inflate(_flushMode);
if (rc != ZlibConstants.Z_OK && rc != ZlibConstants.Z_STREAM_END)
{
throw new ZlibException((_wantCompress ? "de" : "in") + "flating: " + _z.Message);
}
await _stream
.WriteAsync(
_workingBuffer,
0,
_workingBuffer.Length - _z.AvailableBytesOut,
cancellationToken
)
.ConfigureAwait(false);
done = _z.AvailableBytesIn == 0 && _z.AvailableBytesOut != 0;
// If GZIP and de-compress, we're done when 8 bytes remain.
if (_flavor == ZlibStreamFlavor.GZIP && !_wantCompress)
{
done = (_z.AvailableBytesIn == 8 && _z.AvailableBytesOut != 0);
}
} while (!done);
}
private void finish()
{
if (_z is null)
@@ -335,6 +400,111 @@ internal class ZlibBaseStream : Stream, IStreamStack
}
}
private async Task finishAsync(CancellationToken cancellationToken = default)
{
if (_z is null)
{
return;
}
if (_streamMode == StreamMode.Writer)
{
var done = false;
do
{
_z.OutputBuffer = workingBuffer;
_z.NextOut = 0;
_z.AvailableBytesOut = _workingBuffer.Length;
var rc =
(_wantCompress) ? _z.Deflate(FlushType.Finish) : _z.Inflate(FlushType.Finish);
if (rc != ZlibConstants.Z_STREAM_END && rc != ZlibConstants.Z_OK)
{
var verb = (_wantCompress ? "de" : "in") + "flating";
if (_z.Message is null)
{
throw new ZlibException(String.Format("{0}: (rc = {1})", verb, rc));
}
throw new ZlibException(verb + ": " + _z.Message);
}
if (_workingBuffer.Length - _z.AvailableBytesOut > 0)
{
await _stream
.WriteAsync(
_workingBuffer,
0,
_workingBuffer.Length - _z.AvailableBytesOut,
cancellationToken
)
.ConfigureAwait(false);
}
done = _z.AvailableBytesIn == 0 && _z.AvailableBytesOut != 0;
// If GZIP and de-compress, we're done when 8 bytes remain.
if (_flavor == ZlibStreamFlavor.GZIP && !_wantCompress)
{
done = (_z.AvailableBytesIn == 8 && _z.AvailableBytesOut != 0);
}
} while (!done);
await FlushAsync(cancellationToken).ConfigureAwait(false);
// workitem 7159
if (_flavor == ZlibStreamFlavor.GZIP)
{
if (_wantCompress)
{
// Emit the GZIP trailer: CRC32 and size mod 2^32
byte[] intBuf = new byte[4];
BinaryPrimitives.WriteInt32LittleEndian(intBuf, crc.Crc32Result);
await _stream.WriteAsync(intBuf, 0, 4, cancellationToken).ConfigureAwait(false);
var c2 = (int)(crc.TotalBytesRead & 0x00000000FFFFFFFF);
BinaryPrimitives.WriteInt32LittleEndian(intBuf, c2);
await _stream.WriteAsync(intBuf, 0, 4, cancellationToken).ConfigureAwait(false);
}
else
{
throw new ZlibException("Writing with decompression is not supported.");
}
}
}
// workitem 7159
else if (_streamMode == StreamMode.Reader)
{
if (_flavor == ZlibStreamFlavor.GZIP)
{
if (!_wantCompress)
{
// workitem 8501: handle edge case (decompress empty stream)
if (_z.TotalBytesOut == 0L)
{
return;
}
// Read and potentially verify the GZIP trailer: CRC32 and size mod 2^32
byte[] trailer = new byte[8];
// workitem 8679
if (_z.AvailableBytesIn != 8)
{
// Make sure we have read to the end of the stream
_z.InputBuffer.AsSpan(_z.NextIn, _z.AvailableBytesIn).CopyTo(trailer);
var bytesNeeded = 8 - _z.AvailableBytesIn;
var bytesRead = await _stream
.ReadAsync(trailer, _z.AvailableBytesIn, bytesNeeded, cancellationToken)
.ConfigureAwait(false);
}
}
else
{
throw new ZlibException("Reading with compression is not supported.");
}
}
}
}
private void end()
{
if (z is null)
@@ -382,6 +552,38 @@ internal class ZlibBaseStream : Stream, IStreamStack
}
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override async ValueTask DisposeAsync()
{
if (isDisposed)
{
return;
}
isDisposed = true;
#if DEBUG_STREAMS
this.DebugDispose(typeof(ZlibBaseStream));
#endif
await base.DisposeAsync().ConfigureAwait(false);
if (_stream is null)
{
return;
}
try
{
await finishAsync().ConfigureAwait(false);
}
finally
{
end();
if (_stream != null)
{
await _stream.DisposeAsync().ConfigureAwait(false);
_stream = null;
}
}
}
#endif
public override void Flush()
{
_stream.Flush();
@@ -390,6 +592,14 @@ internal class ZlibBaseStream : Stream, IStreamStack
z.AvailableBytesIn = 0;
}
public override async Task FlushAsync(CancellationToken cancellationToken)
{
await _stream.FlushAsync(cancellationToken).ConfigureAwait(false);
//rewind the buffer
((IStreamStack)this).Rewind(z.AvailableBytesIn); //unused
z.AvailableBytesIn = 0;
}
public override Int64 Seek(Int64 offset, SeekOrigin origin) =>
throw new NotSupportedException();
@@ -436,6 +646,31 @@ internal class ZlibBaseStream : Stream, IStreamStack
return _encoding.GetString(buffer, 0, buffer.Length);
}
private async Task<string> ReadZeroTerminatedStringAsync(CancellationToken cancellationToken)
{
var list = new List<byte>();
var done = false;
do
{
// workitem 7740
var n = await _stream.ReadAsync(_buf1, 0, 1, cancellationToken).ConfigureAwait(false);
if (n != 1)
{
throw new ZlibException("Unexpected EOF reading GZIP header.");
}
if (_buf1[0] == 0)
{
done = true;
}
else
{
list.Add(_buf1[0]);
}
} while (!done);
var buffer = list.ToArray();
return _encoding.GetString(buffer, 0, buffer.Length);
}
private int _ReadAndValidateGzipHeader()
{
var totalBytesRead = 0;
@@ -494,6 +729,68 @@ internal class ZlibBaseStream : Stream, IStreamStack
return totalBytesRead;
}
private async Task<int> _ReadAndValidateGzipHeaderAsync(CancellationToken cancellationToken)
{
var totalBytesRead = 0;
// read the header on the first read
byte[] header = new byte[10];
var n = await _stream.ReadAsync(header, 0, 10, cancellationToken).ConfigureAwait(false);
// workitem 8501: handle edge case (decompress empty stream)
if (n == 0)
{
return 0;
}
if (n != 10)
{
throw new ZlibException("Not a valid GZIP stream.");
}
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
throw new ZlibException("Bad GZIP header.");
}
var timet = BinaryPrimitives.ReadInt32LittleEndian(header.AsSpan(4));
_GzipMtime = TarHeader.EPOCH.AddSeconds(timet);
totalBytesRead += n;
if ((header[3] & 0x04) == 0x04)
{
// read and discard extra field
n = await _stream.ReadAsync(header, 0, 2, cancellationToken).ConfigureAwait(false); // 2-byte length field
totalBytesRead += n;
var extraLength = (short)(header[0] + header[1] * 256);
var extra = new byte[extraLength];
n = await _stream
.ReadAsync(extra, 0, extra.Length, cancellationToken)
.ConfigureAwait(false);
if (n != extraLength)
{
throw new ZlibException("Unexpected end-of-file reading GZIP header.");
}
totalBytesRead += n;
}
if ((header[3] & 0x08) == 0x08)
{
_GzipFileName = await ReadZeroTerminatedStringAsync(cancellationToken)
.ConfigureAwait(false);
}
if ((header[3] & 0x10) == 0x010)
{
_GzipComment = await ReadZeroTerminatedStringAsync(cancellationToken)
.ConfigureAwait(false);
}
if ((header[3] & 0x02) == 0x02)
{
await _stream.ReadAsync(_buf1, 0, 1, cancellationToken).ConfigureAwait(false); // CRC16, ignore
}
return totalBytesRead;
}
public override Int32 Read(Byte[] buffer, Int32 offset, Int32 count)
{
// According to MS documentation, any implementation of the IO.Stream.Read function must:
@@ -678,6 +975,220 @@ internal class ZlibBaseStream : Stream, IStreamStack
return rc;
}
public override async Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
// According to MS documentation, any implementation of the IO.Stream.Read function must:
// (a) throw an exception if offset & count reference an invalid part of the buffer,
// or if count < 0, or if buffer is null
// (b) return 0 only upon EOF, or if count = 0
// (c) if not EOF, then return at least 1 byte, up to <count> bytes
if (_streamMode == StreamMode.Undefined)
{
if (!_stream.CanRead)
{
throw new ZlibException("The stream is not readable.");
}
// for the first read, set up some controls.
_streamMode = StreamMode.Reader;
// (The first reference to _z goes through the private accessor which
// may initialize it.)
z.AvailableBytesIn = 0;
if (_flavor == ZlibStreamFlavor.GZIP)
{
_gzipHeaderByteCount = await _ReadAndValidateGzipHeaderAsync(cancellationToken)
.ConfigureAwait(false);
// workitem 8501: handle edge case (decompress empty stream)
if (_gzipHeaderByteCount == 0)
{
return 0;
}
}
}
if (_streamMode != StreamMode.Reader)
{
throw new ZlibException("Cannot Read after Writing.");
}
var rc = 0;
// set up the output of the deflate/inflate codec:
_z.OutputBuffer = buffer;
_z.NextOut = offset;
_z.AvailableBytesOut = count;
if (count == 0)
{
return 0;
}
if (nomoreinput && _wantCompress)
{
// no more input data available; therefore we flush to
// try to complete the read
rc = _z.Deflate(FlushType.Finish);
if (rc != ZlibConstants.Z_OK && rc != ZlibConstants.Z_STREAM_END)
{
throw new ZlibException(
String.Format("Deflating: rc={0} msg={1}", rc, _z.Message)
);
}
rc = (count - _z.AvailableBytesOut);
// calculate CRC after reading
if (crc != null)
{
crc.SlurpBlock(buffer, offset, rc);
}
return rc;
}
if (buffer is null)
{
throw new ArgumentNullException(nameof(buffer));
}
if (count < 0)
{
throw new ArgumentOutOfRangeException(nameof(count));
}
if (offset < buffer.GetLowerBound(0))
{
throw new ArgumentOutOfRangeException(nameof(offset));
}
if ((offset + count) > buffer.GetLength(0))
{
throw new ArgumentOutOfRangeException(nameof(count));
}
// This is necessary in case _workingBuffer has been resized. (new byte[])
// (The first reference to _workingBuffer goes through the private accessor which
// may initialize it.)
_z.InputBuffer = workingBuffer;
do
{
// need data in _workingBuffer in order to deflate/inflate. Here, we check if we have any.
if ((_z.AvailableBytesIn == 0) && (!nomoreinput))
{
// No data available, so try to Read data from the captive stream.
_z.NextIn = 0;
_z.AvailableBytesIn = await _stream
.ReadAsync(_workingBuffer, 0, _workingBuffer.Length, cancellationToken)
.ConfigureAwait(false);
if (_z.AvailableBytesIn == 0)
{
nomoreinput = true;
}
}
// we have data in InputBuffer; now compress or decompress as appropriate
rc = (_wantCompress) ? _z.Deflate(_flushMode) : _z.Inflate(_flushMode);
if (nomoreinput && (rc == ZlibConstants.Z_BUF_ERROR))
{
return 0;
}
if (rc != ZlibConstants.Z_OK && rc != ZlibConstants.Z_STREAM_END)
{
throw new ZlibException(
String.Format(
"{0}flating: rc={1} msg={2}",
(_wantCompress ? "de" : "in"),
rc,
_z.Message
)
);
}
if (
(nomoreinput || rc == ZlibConstants.Z_STREAM_END) && (_z.AvailableBytesOut == count)
)
{
break; // nothing more to read
}
} //while (_z.AvailableBytesOut == count && rc == ZlibConstants.Z_OK);
while (_z.AvailableBytesOut > 0 && !nomoreinput && rc == ZlibConstants.Z_OK);
// workitem 8557
// is there more room in output?
if (_z.AvailableBytesOut > 0)
{
if (rc == ZlibConstants.Z_OK && _z.AvailableBytesIn == 0)
{
// deferred
}
// are we completely done reading?
if (nomoreinput)
{
// and in compression?
if (_wantCompress)
{
// no more input data available; therefore we flush to
// try to complete the read
rc = _z.Deflate(FlushType.Finish);
if (rc != ZlibConstants.Z_OK && rc != ZlibConstants.Z_STREAM_END)
{
throw new ZlibException(
String.Format("Deflating: rc={0} msg={1}", rc, _z.Message)
);
}
}
}
}
rc = (count - _z.AvailableBytesOut);
// calculate CRC after reading
if (crc != null)
{
crc.SlurpBlock(buffer, offset, rc);
}
if (rc == ZlibConstants.Z_STREAM_END && z.AvailableBytesIn != 0 && !_wantCompress)
{
//rewind the buffer
((IStreamStack)this).Rewind(z.AvailableBytesIn); //unused
z.AvailableBytesIn = 0;
}
return rc;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
)
{
// Use ArrayPool to rent a buffer and delegate to byte[] ReadAsync
byte[] array = System.Buffers.ArrayPool<byte>.Shared.Rent(buffer.Length);
try
{
int read = await ReadAsync(array, 0, buffer.Length, cancellationToken)
.ConfigureAwait(false);
array.AsSpan(0, read).CopyTo(buffer.Span);
return read;
}
finally
{
System.Buffers.ArrayPool<byte>.Shared.Return(array);
}
}
#endif
public override Boolean CanRead => _stream.CanRead;
public override Boolean CanSeek => _stream.CanSeek;

View File

@@ -28,6 +28,8 @@
using System;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Compressors.Deflate;
@@ -266,6 +268,34 @@ public class ZlibStream : Stream, IStreamStack
_baseStream.Flush();
}
public override async Task FlushAsync(CancellationToken cancellationToken)
{
if (_disposed)
{
throw new ObjectDisposedException("ZlibStream");
}
await _baseStream.FlushAsync(cancellationToken).ConfigureAwait(false);
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override async ValueTask DisposeAsync()
{
if (_disposed)
{
return;
}
_disposed = true;
if (_baseStream != null)
{
await _baseStream.DisposeAsync().ConfigureAwait(false);
}
#if DEBUG_STREAMS
this.DebugDispose(typeof(ZlibStream));
#endif
await base.DisposeAsync().ConfigureAwait(false);
}
#endif
/// <summary>
/// Read data from the stream.
/// </summary>
@@ -301,6 +331,36 @@ public class ZlibStream : Stream, IStreamStack
return _baseStream.Read(buffer, offset, count);
}
public override async Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
if (_disposed)
{
throw new ObjectDisposedException("ZlibStream");
}
return await _baseStream
.ReadAsync(buffer, offset, count, cancellationToken)
.ConfigureAwait(false);
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
)
{
if (_disposed)
{
throw new ObjectDisposedException("ZlibStream");
}
return await _baseStream.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
}
#endif
public override int ReadByte()
{
if (_disposed)
@@ -355,6 +415,36 @@ public class ZlibStream : Stream, IStreamStack
_baseStream.Write(buffer, offset, count);
}
public override async Task WriteAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
if (_disposed)
{
throw new ObjectDisposedException("ZlibStream");
}
await _baseStream
.WriteAsync(buffer, offset, count, cancellationToken)
.ConfigureAwait(false);
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override async ValueTask WriteAsync(
ReadOnlyMemory<byte> buffer,
CancellationToken cancellationToken = default
)
{
if (_disposed)
{
throw new ObjectDisposedException("ZlibStream");
}
await _baseStream.WriteAsync(buffer, cancellationToken).ConfigureAwait(false);
}
#endif
public override void WriteByte(byte value)
{
if (_disposed)

View File

@@ -1,18 +1,12 @@
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Compressors.Rar;
internal interface IRarUnpack
{
#if NETSTANDARD2_0 || NETFRAMEWORK
void DoUnpack(FileHeader fileHeader, Stream readStream, Stream writeStream);
void DoUnpack();
#else
ValueTask DoUnpackAsync(FileHeader fileHeader, Stream readStream, Stream writeStream);
ValueTask DoUnpackAsync();
#endif
// eg u/i pause/resume button
bool Suspended { get; set; }

View File

@@ -1,7 +1,6 @@
using System;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
@@ -10,17 +9,6 @@ namespace SharpCompress.Compressors.Rar;
internal class RarBLAKE2spStream : RarStream, IStreamStack
{
public static async ValueTask<RarBLAKE2spStream> Create(
IRarUnpack unpack,
FileHeader fileHeader,
MultiVolumeReadOnlyStream readStream
)
{
var rs = new RarBLAKE2spStream(unpack, fileHeader, readStream);
await RarStream.Initialize(rs, unpack, fileHeader, readStream);
return rs;
}
#if DEBUG_STREAMS
long IStreamStack.InstanceId { get; set; }
#endif
@@ -115,7 +103,7 @@ internal class RarBLAKE2spStream : RarStream, IStreamStack
byte[] _hash = { };
protected RarBLAKE2spStream(
public RarBLAKE2spStream(
IRarUnpack unpack,
FileHeader fileHeader,
MultiVolumeReadOnlyStream readStream

View File

@@ -1,8 +1,8 @@
#nullable disable
using System;
using System.Buffers;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
@@ -36,47 +36,18 @@ internal class RarStream : Stream, IStreamStack
private bool fetch;
private byte[]? tmpBuffer = ArrayPool<byte>.Shared.Rent(65536);
private byte[] tmpBuffer = ArrayPool<byte>.Shared.Rent(65536);
private int tmpOffset;
private int tmpCount;
private byte[]? outBuffer;
private byte[] outBuffer;
private int outOffset;
private int outCount;
private int outTotal;
private bool isDisposed;
private long _position;
public static async ValueTask<RarStream> Create(
IRarUnpack unpack,
FileHeader fileHeader,
Stream readStream
)
{
var rs = new RarStream(unpack, fileHeader, readStream);
await Initialize(rs, unpack, fileHeader, readStream);
return rs;
}
internal static async ValueTask Initialize(
RarStream rs,
IRarUnpack unpack,
FileHeader fileHeader,
Stream readStream
)
{
rs.fetch = true;
#if !NETSTANDARD2_0 && !NETFRAMEWORK
await unpack.DoUnpackAsync(fileHeader, readStream, rs);
#else
unpack.DoUnpack(fileHeader, readStream, rs);
await Task.CompletedTask;
#endif
rs.fetch = false;
rs._position = 0;
}
protected RarStream(IRarUnpack unpack, FileHeader fileHeader, Stream readStream)
public RarStream(IRarUnpack unpack, FileHeader fileHeader, Stream readStream)
{
this.unpack = unpack;
this.fileHeader = fileHeader;
@@ -85,6 +56,11 @@ internal class RarStream : Stream, IStreamStack
#if DEBUG_STREAMS
this.DebugConstruct(typeof(RarStream));
#endif
fetch = true;
unpack.DoUnpack(fileHeader, readStream, this);
fetch = false;
_position = 0;
}
protected override void Dispose(bool disposing)
@@ -96,11 +72,8 @@ internal class RarStream : Stream, IStreamStack
#if DEBUG_STREAMS
this.DebugDispose(typeof(RarStream));
#endif
if (tmpBuffer != null)
{
ArrayPool<byte>.Shared.Return(this.tmpBuffer);
this.tmpBuffer = null;
}
ArrayPool<byte>.Shared.Return(this.tmpBuffer);
this.tmpBuffer = null;
}
isDisposed = true;
base.Dispose(disposing);
@@ -108,26 +81,6 @@ internal class RarStream : Stream, IStreamStack
}
}
#if !NETSTANDARD2_0 && !NETFRAMEWORK
public override async ValueTask DisposeAsync()
{
if (!isDisposed)
{
#if DEBUG_STREAMS
this.DebugDispose(typeof(RarStream));
#endif
if (tmpBuffer != null)
{
ArrayPool<byte>.Shared.Return(this.tmpBuffer);
this.tmpBuffer = null;
}
isDisposed = true;
await readStream.DisposeAsync().ConfigureAwait(false);
}
await base.DisposeAsync().ConfigureAwait(false);
}
#endif
public override bool CanRead => true;
public override bool CanSeek => false;
@@ -136,8 +89,6 @@ internal class RarStream : Stream, IStreamStack
public override void Flush() { }
public override Task FlushAsync(CancellationToken cancellationToken) => Task.CompletedTask;
public override long Length => fileHeader.UncompressedSize;
//commented out code always returned the length of the file
@@ -147,96 +98,8 @@ internal class RarStream : Stream, IStreamStack
set => throw new NotSupportedException();
}
#if !NETSTANDARD2_0 && !NETFRAMEWORK
public override Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
var bytesRead = Read(buffer, offset, count);
return Task.FromResult(bytesRead);
}
catch (Exception ex)
{
return Task.FromException<int>(ex);
}
}
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
outTotal = 0;
var count = buffer.Length;
var offset = 0;
if (tmpCount > 0)
{
var toCopy = tmpCount < count ? tmpCount : count;
tmpBuffer.AsSpan(tmpOffset, toCopy).CopyTo(buffer.Span.Slice(offset, toCopy));
tmpOffset += toCopy;
tmpCount -= toCopy;
offset += toCopy;
count -= toCopy;
outTotal += toCopy;
}
if (count > 0 && unpack.DestSize > 0)
{
// Create a temporary array for the unpack operation
var tempArray = ArrayPool<byte>.Shared.Rent(count);
try
{
outBuffer = tempArray;
outOffset = 0;
outCount = count;
fetch = true;
await unpack.DoUnpackAsync();
fetch = false;
// Copy the unpacked data to the memory buffer
var unpacked = outTotal - (tmpCount > 0 ? offset : 0);
if (unpacked > 0)
{
tempArray.AsSpan(0, unpacked).CopyTo(buffer.Span.Slice(offset, unpacked));
}
}
finally
{
ArrayPool<byte>.Shared.Return(tempArray);
outBuffer = null;
}
}
_position += outTotal;
if (count > 0 && outTotal == 0 && _position != Length)
{
// sanity check, eg if we try to decompress a redir entry
throw new InvalidOperationException(
$"unpacked file size does not match header: expected {Length} found {_position}"
);
}
return outTotal;
}
public override int Read(byte[] buffer, int offset, int count) =>
throw new NotSupportedException("Use ReadAsync or ReadAsync(Memory<byte>) instead.");
#else
public override int Read(byte[] buffer, int offset, int count)
{
if (tmpBuffer == null)
{
throw new ObjectDisposedException(nameof(RarStream));
}
outTotal = 0;
if (tmpCount > 0)
{
@@ -267,7 +130,6 @@ internal class RarStream : Stream, IStreamStack
}
return outTotal;
}
#endif
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
@@ -275,14 +137,6 @@ internal class RarStream : Stream, IStreamStack
public override void Write(byte[] buffer, int offset, int count)
{
if (tmpBuffer == null)
{
throw new ObjectDisposedException(nameof(RarStream));
}
if (outBuffer == null)
{
throw new ObjectDisposedException(nameof(RarStream));
}
if (!fetch)
{
throw new NotSupportedException();
@@ -311,81 +165,8 @@ internal class RarStream : Stream, IStreamStack
}
}
public override Task WriteAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
Write(buffer, offset, count);
return Task.CompletedTask;
}
catch (Exception ex)
{
return Task.FromException(ex);
}
}
#if !NETSTANDARD2_0 && !NETFRAMEWORK
public override ValueTask WriteAsync(
ReadOnlyMemory<byte> buffer,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
if (!fetch)
{
throw new NotSupportedException();
}
var count = buffer.Length;
var offset = 0;
if (outCount > 0)
{
var toCopy = outCount < count ? outCount : count;
buffer.Span.Slice(offset, toCopy).CopyTo(outBuffer.AsSpan(outOffset, toCopy));
outOffset += toCopy;
outCount -= toCopy;
offset += toCopy;
count -= toCopy;
outTotal += toCopy;
}
if (count > 0)
{
EnsureBufferCapacity(count);
buffer.Span.Slice(offset, count).CopyTo(tmpBuffer.AsSpan(tmpCount, count));
tmpCount += count;
tmpOffset = 0;
unpack.Suspended = true;
}
else
{
unpack.Suspended = false;
}
return ValueTask.CompletedTask;
}
catch (Exception ex)
{
return new ValueTask(Task.FromException(ex));
}
}
#endif
private void EnsureBufferCapacity(int count)
{
if (tmpBuffer == null)
{
throw new ObjectDisposedException(nameof(RarStream));
}
if (this.tmpBuffer.Length < this.tmpCount + count)
{
var newLength =

View File

@@ -4,7 +4,6 @@ using System;
using System.Buffers;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Compressors.PPMd.H;
@@ -156,20 +155,6 @@ internal sealed partial class Unpack : BitInput, IRarUnpack
DoUnpack();
}
#if !NETSTANDARD2_0 && !NETFRAMEWORK
public ValueTask DoUnpackAsync()
{
DoUnpack();
return ValueTask.CompletedTask;
}
public ValueTask DoUnpackAsync(FileHeader fileHeader, Stream readStream, Stream writeStream)
{
DoUnpack(fileHeader, readStream, writeStream);
return ValueTask.CompletedTask;
}
#endif
public void DoUnpack()
{
if (fileHeader.CompressionMethod == 0)

View File

@@ -1,4 +1,3 @@
#if NETSTANDARD2_0 || NETFRAMEWORK
using System;
using System.IO;
using SharpCompress.Common.Rar.Headers;
@@ -107,4 +106,3 @@ internal partial class Unpack : IRarUnpack
public static byte[] EnsureCapacity(byte[] array, int length) =>
array.Length < length ? new byte[length] : array;
}
#endif

View File

@@ -1,4 +1,3 @@
#if NETSTANDARD2_0 || NETFRAMEWORK
#nullable disable
using System;
@@ -30,12 +29,12 @@ internal sealed partial class Unpack : BitInput
Suspended = false;
UnpAllBuf = false;
UnpSomeRead = false;
// #if RarV2017_RAR_SMP
// MaxUserThreads = 1;
// UnpThreadPool = CreateThreadPool();
// ReadBufMT = null;
// UnpThreadData = null;
// #endif
/*#if RarV2017_RAR_SMP
MaxUserThreads = 1;
UnpThreadPool = CreateThreadPool();
ReadBufMT = null;
UnpThreadData = null;
#endif*/
MaxWinSize = 0;
MaxWinMask = 0;
@@ -198,21 +197,21 @@ internal sealed partial class Unpack : BitInput
break;
#endif
case 50: // RAR 5.0 compression algorithm.
// #if RarV2017_RAR_SMP
// if (MaxUserThreads > 1)
// {
// // We do not use the multithreaded unpack routine to repack RAR archives
// // in 'suspended' mode, because unlike the single threaded code it can
// // write more than one dictionary for same loop pass. So we would need
// // larger buffers of unknown size. Also we do not support multithreading
// // in fragmented window mode.
// if (!Fragmented)
// {
// Unpack5MT(Solid);
// break;
// }
// }
// #endif
/*#if RarV2017_RAR_SMP
if (MaxUserThreads > 1)
{
// We do not use the multithreaded unpack routine to repack RAR archives
// in 'suspended' mode, because unlike the single threaded code it can
// write more than one dictionary for same loop pass. So we would need
// larger buffers of unknown size. Also we do not support multithreading
// in fragmented window mode.
if (!Fragmented)
{
Unpack5MT(Solid);
break;
}
}
#endif*/
Unpack5(Solid);
break;
#if !Rar2017_NOSTRICT
@@ -408,4 +407,3 @@ internal sealed partial class Unpack : BitInput
}
}
}
#endif

View File

@@ -1,411 +0,0 @@
#if !NETSTANDARD2_0 && !NETFRAMEWORK
#nullable disable
using System;
using SharpCompress.Common;
using static SharpCompress.Compressors.Rar.UnpackV2017.PackDef;
using static SharpCompress.Compressors.Rar.UnpackV2017.UnpackGlobal;
#if !Rar2017_64bit
using size_t = System.UInt32;
#else
using nint = System.Int64;
using nuint = System.UInt64;
using size_t = System.UInt64;
#endif
namespace SharpCompress.Compressors.Rar.UnpackV2017;
internal sealed partial class Unpack : BitInput
{
public Unpack( /* ComprDataIO *DataIO */
)
//:Inp(true),VMCodeInp(true)
: base(true)
{
_UnpackCtor();
//UnpIO=DataIO;
Window = null;
Fragmented = false;
Suspended = false;
UnpAllBuf = false;
UnpSomeRead = false;
// #if RarV2017_RAR_SMP
// MaxUserThreads = 1;
// UnpThreadPool = CreateThreadPool();
// ReadBufMT = null;
// UnpThreadData = null;
// #endif
MaxWinSize = 0;
MaxWinMask = 0;
// Perform initialization, which should be done only once for all files.
// It prevents crash if first DoUnpack call is later made with wrong
// (true) 'Solid' value.
UnpInitData(false);
#if !RarV2017_SFX_MODULE
// RAR 1.5 decompression initialization
UnpInitData15(false);
InitHuff();
#endif
}
// later: may need Dispose() if we support thread pool
//Unpack::~Unpack()
//{
// InitFilters30(false);
//
// if (Window!=null)
// free(Window);
//#if RarV2017_RAR_SMP
// DestroyThreadPool(UnpThreadPool);
// delete[] ReadBufMT;
// delete[] UnpThreadData;
//#endif
//}
private void Init(size_t WinSize, bool Solid)
{
// If 32-bit RAR unpacks an archive with 4 GB dictionary, the window size
// will be 0 because of size_t overflow. Let's issue the memory error.
if (WinSize == 0)
//ErrHandler.MemoryError();
{
throw new InvalidFormatException(
"invalid window size (possibly due to a rar file with a 4GB being unpacked on a 32-bit platform)"
);
}
// Minimum window size must be at least twice more than maximum possible
// size of filter block, which is 0x10000 in RAR now. If window size is
// smaller, we can have a block with never cleared flt->NextWindow flag
// in UnpWriteBuf(). Minimum window size 0x20000 would be enough, but let's
// use 0x40000 for extra safety and possible filter area size expansion.
const size_t MinAllocSize = 0x40000;
if (WinSize < MinAllocSize)
{
WinSize = MinAllocSize;
}
if (WinSize <= MaxWinSize) // Use the already allocated window.
{
return;
}
if ((WinSize >> 16) > 0x10000) // Window size must not exceed 4 GB.
{
return;
}
// Archiving code guarantees that window size does not grow in the same
// solid stream. So if we are here, we are either creating a new window
// or increasing the size of non-solid window. So we could safely reject
// current window data without copying them to a new window, though being
// extra cautious, we still handle the solid window grow case below.
var Grow = Solid && (Window != null || Fragmented);
// We do not handle growth for existing fragmented window.
if (Grow && Fragmented)
//throw std::bad_alloc();
{
throw new InvalidFormatException("Grow && Fragmented");
}
var NewWindow = Fragmented ? null : new byte[WinSize];
if (NewWindow == null)
{
if (Grow || WinSize < 0x1000000)
{
// We do not support growth for new fragmented window.
// Also exclude RAR4 and small dictionaries.
//throw std::bad_alloc();
throw new InvalidFormatException("Grow || WinSize<0x1000000");
}
else
{
if (Window != null) // If allocated by preceding files.
{
//free(Window);
Window = null;
}
FragWindow.Init(WinSize);
Fragmented = true;
}
}
if (!Fragmented)
{
// Clean the window to generate the same output when unpacking corrupt
// RAR files, which may access unused areas of sliding dictionary.
// sharpcompress: don't need this, freshly allocated above
//memset(NewWindow,0,WinSize);
// If Window is not NULL, it means that window size has grown.
// In solid streams we need to copy data to a new window in such case.
// RAR archiving code does not allow it in solid streams now,
// but let's implement it anyway just in case we'll change it sometimes.
if (Grow)
{
for (size_t I = 1; I <= MaxWinSize; I++)
{
NewWindow[(UnpPtr - I) & (WinSize - 1)] = Window[
(UnpPtr - I) & (MaxWinSize - 1)
];
}
}
//if (Window!=null)
// free(Window);
Window = NewWindow;
}
MaxWinSize = WinSize;
MaxWinMask = MaxWinSize - 1;
}
private void DoUnpack(uint Method, bool Solid)
{
// Methods <50 will crash in Fragmented mode when accessing NULL Window.
// They cannot be called in such mode now, but we check it below anyway
// just for extra safety.
switch (Method)
{
#if !RarV2017_SFX_MODULE
case 15: // rar 1.5 compression
if (!Fragmented)
{
Unpack15(Solid);
}
break;
case 20: // rar 2.x compression
case 26: // files larger than 2GB
if (!Fragmented)
{
Unpack20(Solid);
}
break;
#endif
#if !RarV2017_RAR5ONLY
case 29: // rar 3.x compression
if (!Fragmented)
{
throw new NotImplementedException();
}
break;
#endif
case 50: // RAR 5.0 compression algorithm.
// #if RarV2017_RAR_SMP
// if (MaxUserThreads > 1)
// {
// // We do not use the multithreaded unpack routine to repack RAR archives
// // in 'suspended' mode, because unlike the single threaded code it can
// // write more than one dictionary for same loop pass. So we would need
// // larger buffers of unknown size. Also we do not support multithreading
// // in fragmented window mode.
// if (!Fragmented)
// {
// Unpack5MT(Solid);
// break;
// }
// }
// #endif
Unpack5(Solid);
break;
#if !Rar2017_NOSTRICT
default:
throw new InvalidFormatException("unknown compression method " + Method);
#endif
}
}
private void UnpInitData(bool Solid)
{
if (!Solid)
{
new Span<uint>(OldDist).Clear();
OldDistPtr = 0;
LastDist = LastLength = 0;
// memset(Window,0,MaxWinSize);
//memset(&BlockTables,0,sizeof(BlockTables));
BlockTables = new UnpackBlockTables();
// sharpcompress: no default ctor for struct
BlockTables.Init();
UnpPtr = WrPtr = 0;
WriteBorder = Math.Min(MaxWinSize, UNPACK_MAX_WRITE) & MaxWinMask;
}
// Filters never share several solid files, so we can safely reset them
// even in solid archive.
InitFilters();
Inp.InitBitInput();
WrittenFileSize = 0;
ReadTop = 0;
ReadBorder = 0;
//memset(&BlockHeader,0,sizeof(BlockHeader));
BlockHeader = new UnpackBlockHeader();
BlockHeader.BlockSize = -1; // '-1' means not defined yet.
#if !RarV2017_SFX_MODULE
UnpInitData20(Solid);
#endif
//UnpInitData30(Solid);
UnpInitData50(Solid);
}
// LengthTable contains the length in bits for every element of alphabet.
// Dec is the structure to decode Huffman code/
// Size is size of length table and DecodeNum field in Dec structure,
private void MakeDecodeTables(Span<byte> LengthTable, int offset, DecodeTable Dec, uint Size)
{
// Size of alphabet and DecodePos array.
Dec.MaxNum = Size;
// Calculate how many entries for every bit length in LengthTable we have.
var LengthCount = new uint[16];
//memset(LengthCount,0,sizeof(LengthCount));
for (size_t I = 0; I < Size; I++)
{
LengthCount[LengthTable[checked((int)(offset + I))] & 0xf]++;
}
// We must not calculate the number of zero length codes.
LengthCount[0] = 0;
// Set the entire DecodeNum to zero.
//memset(Dec->DecodeNum,0,Size*sizeof(*Dec->DecodeNum));
new Span<ushort>(Dec.DecodeNum).Clear();
// Initialize not really used entry for zero length code.
Dec.DecodePos[0] = 0;
// Start code for bit length 1 is 0.
Dec.DecodeLen[0] = 0;
// Right aligned upper limit code for current bit length.
uint UpperLimit = 0;
for (var I = 1; I < 16; I++)
{
// Adjust the upper limit code.
UpperLimit += LengthCount[I];
// Left aligned upper limit code.
var LeftAligned = UpperLimit << (16 - I);
// Prepare the upper limit code for next bit length.
UpperLimit *= 2;
// Store the left aligned upper limit code.
Dec.DecodeLen[I] = LeftAligned;
// Every item of this array contains the sum of all preceding items.
// So it contains the start position in code list for every bit length.
Dec.DecodePos[I] = Dec.DecodePos[I - 1] + LengthCount[I - 1];
}
// Prepare the copy of DecodePos. We'll modify this copy below,
// so we cannot use the original DecodePos.
var CopyDecodePos = new uint[Dec.DecodePos.Length];
//memcpy(CopyDecodePos,Dec->DecodePos,sizeof(CopyDecodePos));
Array.Copy(Dec.DecodePos, CopyDecodePos, CopyDecodePos.Length);
// For every bit length in the bit length table and so for every item
// of alphabet.
for (uint I = 0; I < Size; I++)
{
// Get the current bit length.
var _CurBitLength = (byte)(LengthTable[checked((int)(offset + I))] & 0xf);
if (_CurBitLength != 0)
{
// Last position in code list for current bit length.
var LastPos = CopyDecodePos[_CurBitLength];
// Prepare the decode table, so this position in code list will be
// decoded to current alphabet item number.
Dec.DecodeNum[LastPos] = (ushort)I;
// We'll use next position number for this bit length next time.
// So we pass through the entire range of positions available
// for every bit length.
CopyDecodePos[_CurBitLength]++;
}
}
// Define the number of bits to process in quick mode. We use more bits
// for larger alphabets. More bits means that more codes will be processed
// in quick mode, but also that more time will be spent to preparation
// of tables for quick decode.
switch (Size)
{
case NC:
case NC20:
case NC30:
Dec.QuickBits = MAX_QUICK_DECODE_BITS;
break;
default:
Dec.QuickBits = MAX_QUICK_DECODE_BITS - 3;
break;
}
// Size of tables for quick mode.
var QuickDataSize = 1U << (int)Dec.QuickBits;
// Bit length for current code, start from 1 bit codes. It is important
// to use 1 bit instead of 0 for minimum code length, so we are moving
// forward even when processing a corrupt archive.
//uint CurBitLength=1;
byte CurBitLength = 1;
// For every right aligned bit string which supports the quick decoding.
for (uint Code = 0; Code < QuickDataSize; Code++)
{
// Left align the current code, so it will be in usual bit field format.
var BitField = Code << (int)(16 - Dec.QuickBits);
// Prepare the table for quick decoding of bit lengths.
// Find the upper limit for current bit field and adjust the bit length
// accordingly if necessary.
while (CurBitLength < Dec.DecodeLen.Length && BitField >= Dec.DecodeLen[CurBitLength])
{
CurBitLength++;
}
// Translation of right aligned bit string to bit length.
Dec.QuickLen[Code] = CurBitLength;
// Prepare the table for quick translation of position in code list
// to position in alphabet.
// Calculate the distance from the start code for current bit length.
var Dist = BitField - Dec.DecodeLen[CurBitLength - 1];
// Right align the distance.
Dist >>= (16 - CurBitLength);
// Now we can calculate the position in the code list. It is the sum
// of first position for current bit length and right aligned distance
// between our bit field and start code for current bit length.
uint Pos;
if (
CurBitLength < Dec.DecodePos.Length
&& (Pos = Dec.DecodePos[CurBitLength] + Dist) < Size
)
{
// Define the code to alphabet number translation.
Dec.QuickNum[Code] = Dec.DecodeNum[Pos];
}
else
{
// Can be here for length table filled with zeroes only (empty).
Dec.QuickNum[Code] = 0;
}
}
}
}
#endif

View File

@@ -1,115 +0,0 @@
#if !NETSTANDARD2_0 && !NETFRAMEWORK
using System;
using System.Buffers;
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common.Rar.Headers;
#if !Rar2017_64bit
using size_t = System.UInt32;
#else
using nint = System.Int64;
using nuint = System.UInt64;
using size_t = System.UInt64;
#endif
namespace SharpCompress.Compressors.Rar.UnpackV2017;
internal partial class Unpack : IRarUnpack
{
private FileHeader fileHeader;
private Stream readStream;
private Stream writeStream;
private void _UnpackCtor()
{
for (var i = 0; i < AudV.Length; i++)
{
AudV[i] = new AudioVariables();
}
}
private int UnpIO_UnpRead(byte[] buf, int offset, int count) =>
// NOTE: caller has logic to check for -1 for error we throw instead.
readStream.Read(buf, offset, count);
private void UnpIO_UnpWrite(byte[] buf, size_t offset, uint count) =>
writeStream.Write(buf, checked((int)offset), checked((int)count));
public ValueTask DoUnpackAsync(FileHeader fileHeader, Stream readStream, Stream writeStream)
{
// as of 12/2017 .NET limits array indexing to using a signed integer
// MaxWinSize causes unpack to use a fragmented window when the file
// window size exceeds MaxWinSize
// uggh, that's not how this variable is used, it's the size of the currently allocated window buffer
//x MaxWinSize = ((uint)int.MaxValue) + 1;
// may be long.MaxValue which could indicate unknown size (not present in header)
DestUnpSize = fileHeader.UncompressedSize;
this.fileHeader = fileHeader;
this.readStream = readStream;
this.writeStream = writeStream;
if (!fileHeader.IsStored)
{
Init(fileHeader.WindowSize, fileHeader.IsSolid);
}
Suspended = false;
return DoUnpackAsync();
}
public ValueTask DoUnpackAsync()
{
if (fileHeader.IsStored)
{
return UnstoreFileAsync();
}
else
{
DoUnpack(fileHeader.CompressionAlgorithm, fileHeader.IsSolid);
return new ValueTask();
}
}
private async ValueTask UnstoreFileAsync()
{
var length = (int)Math.Min(0x10000, DestUnpSize);
var buffer = ArrayPool<byte>.Shared.Rent(length);
do
{
var memory = new Memory<byte>(buffer, 0, length);
var n = await readStream.ReadAsync(memory);
if (n == 0)
{
break;
}
await writeStream.WriteAsync(memory.Slice(0, n));
DestUnpSize -= n;
} while (!Suspended);
}
public bool Suspended { get; set; }
public long DestSize => DestUnpSize;
public int Char
{
get
{
// TODO: coderb: not sure where the "MAXSIZE-30" comes from, ported from V1 code
if (InAddr > MAX_SIZE - 30)
{
UnpReadBuf();
}
return InBuf[InAddr++];
}
}
public int PpmEscChar
{
get => PPMEscChar;
set => PPMEscChar = value;
}
public static byte[] EnsureCapacity(byte[] array, int length) =>
array.Length < length ? new byte[length] : array;
}
#endif

View File

@@ -4,6 +4,7 @@ using System.Diagnostics;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.IO;
@@ -326,20 +327,146 @@ public class SharpCompressStream : Stream, IStreamStack
_internalPosition += count;
}
public override async Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
if (count == 0)
return 0;
if (_bufferingEnabled)
{
ValidateBufferState();
// Fill buffer if needed
if (_bufferedLength == 0)
{
_bufferedLength = await Stream
.ReadAsync(_buffer!, 0, _bufferSize, cancellationToken)
.ConfigureAwait(false);
_bufferPosition = 0;
}
int available = _bufferedLength - _bufferPosition;
int toRead = Math.Min(count, available);
if (toRead > 0)
{
Array.Copy(_buffer!, _bufferPosition, buffer, offset, toRead);
_bufferPosition += toRead;
_internalPosition += toRead;
return toRead;
}
// If buffer exhausted, refill
int r = await Stream
.ReadAsync(_buffer!, 0, _bufferSize, cancellationToken)
.ConfigureAwait(false);
if (r == 0)
return 0;
_bufferedLength = r;
_bufferPosition = 0;
if (_bufferedLength == 0)
{
return 0;
}
toRead = Math.Min(count, _bufferedLength);
Array.Copy(_buffer!, 0, buffer, offset, toRead);
_bufferPosition = toRead;
_internalPosition += toRead;
return toRead;
}
else
{
int read = await Stream
.ReadAsync(buffer, offset, count, cancellationToken)
.ConfigureAwait(false);
_internalPosition += read;
return read;
}
}
public override async Task WriteAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
await Stream.WriteAsync(buffer, offset, count, cancellationToken).ConfigureAwait(false);
_internalPosition += count;
}
public override async Task FlushAsync(CancellationToken cancellationToken)
{
await Stream.FlushAsync(cancellationToken).ConfigureAwait(false);
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
//public override int Read(Span<byte> buffer)
//{
// int bytesRead = Stream.Read(buffer);
// _internalPosition += bytesRead;
// return bytesRead;
//}
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
)
{
if (buffer.Length == 0)
return 0;
// public override void Write(ReadOnlySpan<byte> buffer)
// {
// Stream.Write(buffer);
// _internalPosition += buffer.Length;
// }
if (_bufferingEnabled)
{
ValidateBufferState();
// Fill buffer if needed
if (_bufferedLength == 0)
{
_bufferedLength = await Stream
.ReadAsync(_buffer.AsMemory(0, _bufferSize), cancellationToken)
.ConfigureAwait(false);
_bufferPosition = 0;
}
int available = _bufferedLength - _bufferPosition;
int toRead = Math.Min(buffer.Length, available);
if (toRead > 0)
{
_buffer.AsSpan(_bufferPosition, toRead).CopyTo(buffer.Span);
_bufferPosition += toRead;
_internalPosition += toRead;
return toRead;
}
// If buffer exhausted, refill
int r = await Stream
.ReadAsync(_buffer.AsMemory(0, _bufferSize), cancellationToken)
.ConfigureAwait(false);
if (r == 0)
return 0;
_bufferedLength = r;
_bufferPosition = 0;
if (_bufferedLength == 0)
{
return 0;
}
toRead = Math.Min(buffer.Length, _bufferedLength);
_buffer.AsSpan(0, toRead).CopyTo(buffer.Span);
_bufferPosition = toRead;
_internalPosition += toRead;
return toRead;
}
else
{
int read = await Stream.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
_internalPosition += read;
return read;
}
}
public override async ValueTask WriteAsync(
ReadOnlyMemory<byte> buffer,
CancellationToken cancellationToken = default
)
{
await Stream.WriteAsync(buffer, cancellationToken).ConfigureAwait(false);
_internalPosition += buffer.Length;
}
#endif
}

View File

@@ -2,6 +2,8 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Readers;
namespace SharpCompress.IO;
@@ -238,6 +240,105 @@ public class SourceStream : Stream, IStreamStack
public override void Write(byte[] buffer, int offset, int count) =>
throw new NotImplementedException();
public override async Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
if (count <= 0)
{
return 0;
}
var total = count;
var r = -1;
while (count != 0 && r != 0)
{
r = await Current
.ReadAsync(
buffer,
offset,
(int)Math.Min(count, Current.Length - Current.Position),
cancellationToken
)
.ConfigureAwait(false);
count -= r;
offset += r;
if (!IsVolumes && count != 0 && Current.Position == Current.Length)
{
var length = Current.Length;
// Load next file if present
if (!SetStream(_stream + 1))
{
break;
}
// Current stream switched
// Add length of previous stream
_prevSize += length;
Current.Seek(0, SeekOrigin.Begin);
r = -1; //BugFix: reset to allow loop if count is still not 0 - was breaking split zipx (lzma xz etc)
}
}
return total - count;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
)
{
if (buffer.Length <= 0)
{
return 0;
}
var total = buffer.Length;
var count = buffer.Length;
var offset = 0;
var r = -1;
while (count != 0 && r != 0)
{
r = await Current
.ReadAsync(
buffer.Slice(offset, (int)Math.Min(count, Current.Length - Current.Position)),
cancellationToken
)
.ConfigureAwait(false);
count -= r;
offset += r;
if (!IsVolumes && count != 0 && Current.Position == Current.Length)
{
var length = Current.Length;
// Load next file if present
if (!SetStream(_stream + 1))
{
break;
}
// Current stream switched
// Add length of previous stream
_prevSize += length;
Current.Seek(0, SeekOrigin.Begin);
r = -1;
}
}
return total - count;
}
#endif
public override void Close()
{
if (IsFileMode || !ReaderOptions.LeaveStreamOpen) //close if file mode or options specify it

View File

@@ -2,6 +2,7 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
@@ -68,7 +69,7 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
}
}
public async Task<bool> MoveToNextEntryAsync()
public bool MoveToNextEntry()
{
if (_completed)
{
@@ -84,7 +85,7 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
}
if (!_wroteCurrentEntry)
{
await SkipEntryAsync();
SkipEntry();
}
_wroteCurrentEntry = false;
if (NextEntryForCurrentStream())
@@ -120,15 +121,15 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
#region Entry Skip/Write
private async Task SkipEntryAsync()
private void SkipEntry()
{
if (!Entry.IsDirectory)
{
await SkipAsync();
Skip();
}
}
private async Task SkipAsync()
private void Skip()
{
var part = Entry.Parts.First();
@@ -146,11 +147,11 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
}
}
//don't know the size so we have to try to decompress to skip
using var s = await OpenEntryStreamAsync();
await s.SkipEntryAsync();
using var s = OpenEntryStream();
s.SkipEntry();
}
public async Task WriteEntryToAsync(Stream writableStream)
public void WriteEntryTo(Stream writableStream)
{
if (_wroteCurrentEntry)
{
@@ -168,24 +169,65 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
);
}
await WriteAsync(writableStream);
Write(writableStream);
_wroteCurrentEntry = true;
}
internal async Task WriteAsync(Stream writeStream)
public async Task WriteEntryToAsync(
Stream writableStream,
CancellationToken cancellationToken = default
)
{
if (_wroteCurrentEntry)
{
throw new ArgumentException(
"WriteEntryToAsync or OpenEntryStream can only be called once."
);
}
if (writableStream is null)
{
throw new ArgumentNullException(nameof(writableStream));
}
if (!writableStream.CanWrite)
{
throw new ArgumentException(
"A writable Stream was required. Use Cancel if that was intended."
);
}
await WriteAsync(writableStream, cancellationToken).ConfigureAwait(false);
_wroteCurrentEntry = true;
}
internal void Write(Stream writeStream)
{
var streamListener = this as IReaderExtractionListener;
using Stream s = await OpenEntryStreamAsync();
using Stream s = OpenEntryStream();
s.TransferTo(writeStream, Entry, streamListener);
}
public async Task<EntryStream> OpenEntryStreamAsync()
internal async Task WriteAsync(Stream writeStream, CancellationToken cancellationToken)
{
var streamListener = this as IReaderExtractionListener;
#if NETFRAMEWORK || NETSTANDARD2_0
using Stream s = OpenEntryStream();
await s.TransferToAsync(writeStream, Entry, streamListener, cancellationToken)
.ConfigureAwait(false);
#else
await using Stream s = OpenEntryStream();
await s.TransferToAsync(writeStream, Entry, streamListener, cancellationToken)
.ConfigureAwait(false);
#endif
}
public EntryStream OpenEntryStream()
{
if (_wroteCurrentEntry)
{
throw new ArgumentException("WriteEntryTo or OpenEntryStream can only be called once.");
}
var stream = await GetEntryStreamAsync();
var stream = GetEntryStream();
_wroteCurrentEntry = true;
return stream;
}
@@ -193,11 +235,11 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
/// <summary>
/// Retains a reference to the entry stream, so we can check whether it completed later.
/// </summary>
protected Task<EntryStream> CreateEntryStreamAsync(Stream? decompressed) =>
Task.FromResult(new EntryStream(this, decompressed.NotNull()));
protected EntryStream CreateEntryStream(Stream? decompressed) =>
new(this, decompressed.NotNull());
protected virtual Task<EntryStream> GetEntryStreamAsync() =>
CreateEntryStreamAsync(Entry.Parts.First().GetCompressedStream());
protected virtual EntryStream GetEntryStream() =>
CreateEntryStream(Entry.Parts.First().GetCompressedStream());
#endregion

View File

@@ -1,5 +1,6 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
@@ -20,7 +21,14 @@ public interface IReader : IDisposable
/// Decompresses the current entry to the stream. This cannot be called twice for the current entry.
/// </summary>
/// <param name="writableStream"></param>
Task WriteEntryToAsync(Stream writableStream);
void WriteEntryTo(Stream writableStream);
/// <summary>
/// Decompresses the current entry to the stream asynchronously. This cannot be called twice for the current entry.
/// </summary>
/// <param name="writableStream"></param>
/// <param name="cancellationToken"></param>
Task WriteEntryToAsync(Stream writableStream, CancellationToken cancellationToken = default);
bool Cancelled { get; }
void Cancel();
@@ -29,11 +37,11 @@ public interface IReader : IDisposable
/// Moves to the next entry by reading more data from the underlying stream. This skips if data has not been read.
/// </summary>
/// <returns></returns>
Task<bool> MoveToNextEntryAsync();
bool MoveToNextEntry();
/// <summary>
/// Opens the current entry as a stream that will decompress as it is read.
/// Read the entire stream or use SkipEntry on EntryStream.
/// </summary>
Task<EntryStream> OpenEntryStreamAsync();
EntryStream OpenEntryStream();
}

View File

@@ -1,4 +1,5 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
@@ -6,64 +7,124 @@ namespace SharpCompress.Readers;
public static class IReaderExtensions
{
public static async Task WriteEntryToAsync(this IReader reader, string filePath)
public static void WriteEntryTo(this IReader reader, string filePath)
{
using Stream stream = File.Open(filePath, FileMode.Create, FileAccess.Write);
await reader.WriteEntryToAsync(stream);
reader.WriteEntryTo(stream);
}
public static async Task WriteEntryToAsync(this IReader reader, FileInfo filePath)
public static void WriteEntryTo(this IReader reader, FileInfo filePath)
{
using Stream stream = filePath.Open(FileMode.Create);
await reader.WriteEntryToAsync(stream);
reader.WriteEntryTo(stream);
}
/// <summary>
/// Extract all remaining unread entries to specific directory, retaining filename
/// </summary>
public static async Task WriteAllToDirectoryAsync(
public static void WriteAllToDirectory(
this IReader reader,
string destinationDirectory,
ExtractionOptions? options = null
)
{
while (await reader.MoveToNextEntryAsync())
while (reader.MoveToNextEntry())
{
await reader.WriteEntryToDirectoryAsync(destinationDirectory, options);
reader.WriteEntryToDirectory(destinationDirectory, options);
}
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static async Task WriteEntryToDirectoryAsync(
public static void WriteEntryToDirectory(
this IReader reader,
string destinationDirectory,
ExtractionOptions? options = null
) =>
await ExtractionMethods.WriteEntryToDirectoryAsync(
ExtractionMethods.WriteEntryToDirectory(
reader.Entry,
destinationDirectory,
options,
reader.WriteEntryToFileAsync
reader.WriteEntryToFile
);
/// <summary>
/// Extract to specific file
/// </summary>
public static async Task WriteEntryToFileAsync(
public static void WriteEntryToFile(
this IReader reader,
string destinationFileName,
ExtractionOptions? options = null
) =>
await ExtractionMethods.WriteEntryToFileAsync(
ExtractionMethods.WriteEntryToFile(
reader.Entry,
destinationFileName,
options,
async (x, fm) =>
(x, fm) =>
{
using var fs = File.Open(destinationFileName, fm);
await reader.WriteEntryToAsync(fs);
reader.WriteEntryTo(fs);
}
);
/// <summary>
/// Extract to specific directory asynchronously, retaining filename
/// </summary>
public static async Task WriteEntryToDirectoryAsync(
this IReader reader,
string destinationDirectory,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default
) =>
await ExtractionMethods
.WriteEntryToDirectoryAsync(
reader.Entry,
destinationDirectory,
options,
(fileName, opts) => reader.WriteEntryToFileAsync(fileName, opts, cancellationToken),
cancellationToken
)
.ConfigureAwait(false);
/// <summary>
/// Extract to specific file asynchronously
/// </summary>
public static async Task WriteEntryToFileAsync(
this IReader reader,
string destinationFileName,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default
) =>
await ExtractionMethods
.WriteEntryToFileAsync(
reader.Entry,
destinationFileName,
options,
async (x, fm) =>
{
using var fs = File.Open(destinationFileName, fm);
await reader.WriteEntryToAsync(fs, cancellationToken).ConfigureAwait(false);
},
cancellationToken
)
.ConfigureAwait(false);
/// <summary>
/// Extract all remaining unread entries to specific directory asynchronously, retaining filename
/// </summary>
public static async Task WriteAllToDirectoryAsync(
this IReader reader,
string destinationDirectory,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default
)
{
while (reader.MoveToNextEntry())
{
await reader
.WriteEntryToDirectoryAsync(destinationDirectory, options, cancellationToken)
.ConfigureAwait(false);
}
}
}

View File

@@ -2,7 +2,6 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Compressors.Rar;
@@ -101,7 +100,7 @@ public abstract class RarReader : AbstractReader<RarReaderEntry, RarVolume>
protected virtual IEnumerable<FilePart> CreateFilePartEnumerableForCurrentEntry() =>
Entry.Parts;
protected override async Task<EntryStream> GetEntryStreamAsync()
protected override EntryStream GetEntryStream()
{
if (Entry.IsRedir)
{
@@ -114,19 +113,16 @@ public abstract class RarReader : AbstractReader<RarReaderEntry, RarVolume>
);
if (Entry.IsRarV3)
{
return await CreateEntryStreamAsync(
new RarCrcStream(UnpackV1.Value, Entry.FileHeader, stream)
);
return CreateEntryStream(new RarCrcStream(UnpackV1.Value, Entry.FileHeader, stream));
}
if (Entry.FileHeader.FileCrc?.Length > 5)
{
var s = await RarBLAKE2spStream.Create(UnpackV2017.Value, Entry.FileHeader, stream);
return await CreateEntryStreamAsync(s);
return CreateEntryStream(
new RarBLAKE2spStream(UnpackV2017.Value, Entry.FileHeader, stream)
);
}
return await CreateEntryStreamAsync(
new RarCrcStream(UnpackV2017.Value, Entry.FileHeader, stream)
);
return CreateEntryStream(new RarCrcStream(UnpackV2017.Value, Entry.FileHeader, stream));
}
}

View File

@@ -4,6 +4,7 @@ using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Readers;
@@ -86,27 +87,32 @@ internal static class Utility
public static void Skip(this Stream source)
{
var buffer = ArrayPool<byte>.Shared.Rent(TEMP_BUFFER_SIZE);
try
{
do { } while (source.Read(buffer, 0, buffer.Length) == buffer.Length);
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
}
using var buffer = MemoryPool<byte>.Shared.Rent(TEMP_BUFFER_SIZE);
while (source.Read(buffer.Memory.Span) > 0) { }
}
public static async Task SkipAsync(this Stream source)
public static async Task SkipAsync(
this Stream source,
CancellationToken cancellationToken = default
)
{
var buffer = ArrayPool<byte>.Shared.Rent(TEMP_BUFFER_SIZE);
var array = ArrayPool<byte>.Shared.Rent(TEMP_BUFFER_SIZE);
try
{
do { } while (await source.ReadAsync(buffer, 0, buffer.Length) == buffer.Length);
while (true)
{
var read = await source
.ReadAsync(array, 0, array.Length, cancellationToken)
.ConfigureAwait(false);
if (read <= 0)
{
break;
}
}
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
ArrayPool<byte>.Shared.Return(array);
}
}
@@ -238,6 +244,89 @@ internal static class Utility
}
}
public static async Task<long> TransferToAsync(
this Stream source,
Stream destination,
long maxLength,
CancellationToken cancellationToken = default
)
{
var array = ArrayPool<byte>.Shared.Rent(TEMP_BUFFER_SIZE);
try
{
var maxReadSize = array.Length;
long total = 0;
var remaining = maxLength;
if (remaining < maxReadSize)
{
maxReadSize = (int)remaining;
}
while (
await ReadTransferBlockAsync(source, array, maxReadSize, cancellationToken)
.ConfigureAwait(false)
is var (success, count)
&& success
)
{
await destination
.WriteAsync(array, 0, count, cancellationToken)
.ConfigureAwait(false);
total += count;
if (remaining - count < 0)
{
break;
}
remaining -= count;
if (remaining < maxReadSize)
{
maxReadSize = (int)remaining;
}
}
return total;
}
finally
{
ArrayPool<byte>.Shared.Return(array);
}
}
public static async Task<long> TransferToAsync(
this Stream source,
Stream destination,
Common.Entry entry,
IReaderExtractionListener readerExtractionListener,
CancellationToken cancellationToken = default
)
{
var array = ArrayPool<byte>.Shared.Rent(TEMP_BUFFER_SIZE);
try
{
var iterations = 0;
long total = 0;
int count;
while (
(
count = await source
.ReadAsync(array, 0, array.Length, cancellationToken)
.ConfigureAwait(false)
) != 0
)
{
total += count;
await destination
.WriteAsync(array, 0, count, cancellationToken)
.ConfigureAwait(false);
iterations++;
readerExtractionListener.FireEntryExtractionProgress(entry, total, iterations);
}
return total;
}
finally
{
ArrayPool<byte>.Shared.Return(array);
}
}
private static bool ReadTransferBlock(Stream source, byte[] array, int maxSize, out int count)
{
var size = maxSize;
@@ -249,6 +338,56 @@ internal static class Utility
return count != 0;
}
private static async Task<(bool success, int count)> ReadTransferBlockAsync(
Stream source,
byte[] array,
int maxSize,
CancellationToken cancellationToken
)
{
var size = maxSize;
if (maxSize > array.Length)
{
size = array.Length;
}
var count = await source.ReadAsync(array, 0, size, cancellationToken).ConfigureAwait(false);
return (count != 0, count);
}
public static async Task SkipAsync(
this Stream source,
long advanceAmount,
CancellationToken cancellationToken = default
)
{
if (source.CanSeek)
{
source.Position += advanceAmount;
return;
}
var array = ArrayPool<byte>.Shared.Rent(TEMP_BUFFER_SIZE);
try
{
while (advanceAmount > 0)
{
var toRead = (int)Math.Min(array.Length, advanceAmount);
var read = await source
.ReadAsync(array, 0, toRead, cancellationToken)
.ConfigureAwait(false);
if (read <= 0)
{
break;
}
advanceAmount -= read;
}
}
finally
{
ArrayPool<byte>.Shared.Return(array);
}
}
#if NET60_OR_GREATER
public static bool ReadFully(this Stream stream, byte[] buffer)

View File

@@ -1,5 +1,7 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Writers;
@@ -22,6 +24,19 @@ public abstract class AbstractWriter(ArchiveType type, WriterOptions writerOptio
public abstract void Write(string filename, Stream source, DateTime? modificationTime);
public virtual async Task WriteAsync(
string filename,
Stream source,
DateTime? modificationTime,
CancellationToken cancellationToken = default
)
{
// Default implementation calls synchronous version
// Derived classes should override for true async behavior
Write(filename, source, modificationTime);
await Task.CompletedTask.ConfigureAwait(false);
}
protected virtual void Dispose(bool isDisposing)
{
if (isDisposing)

View File

@@ -1,5 +1,7 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Writers;
@@ -8,4 +10,10 @@ public interface IWriter : IDisposable
{
ArchiveType WriterType { get; }
void Write(string filename, Stream source, DateTime? modificationTime);
Task WriteAsync(
string filename,
Stream source,
DateTime? modificationTime,
CancellationToken cancellationToken = default
);
}

View File

@@ -1,6 +1,8 @@
using System;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Writers;
@@ -52,4 +54,71 @@ public static class IWriterExtensions
writer.Write(file.Substring(directory.Length), file);
}
}
// Async extensions
public static Task WriteAsync(
this IWriter writer,
string entryPath,
Stream source,
CancellationToken cancellationToken = default
) => writer.WriteAsync(entryPath, source, null, cancellationToken);
public static async Task WriteAsync(
this IWriter writer,
string entryPath,
FileInfo source,
CancellationToken cancellationToken = default
)
{
if (!source.Exists)
{
throw new ArgumentException("Source does not exist: " + source.FullName);
}
using var stream = source.OpenRead();
await writer
.WriteAsync(entryPath, stream, source.LastWriteTime, cancellationToken)
.ConfigureAwait(false);
}
public static Task WriteAsync(
this IWriter writer,
string entryPath,
string source,
CancellationToken cancellationToken = default
) => writer.WriteAsync(entryPath, new FileInfo(source), cancellationToken);
public static Task WriteAllAsync(
this IWriter writer,
string directory,
string searchPattern = "*",
SearchOption option = SearchOption.TopDirectoryOnly,
CancellationToken cancellationToken = default
) => writer.WriteAllAsync(directory, searchPattern, null, option, cancellationToken);
public static async Task WriteAllAsync(
this IWriter writer,
string directory,
string searchPattern = "*",
Func<string, bool>? fileSearchFunc = null,
SearchOption option = SearchOption.TopDirectoryOnly,
CancellationToken cancellationToken = default
)
{
if (!Directory.Exists(directory))
{
throw new ArgumentException("Directory does not exist: " + directory);
}
fileSearchFunc ??= n => true;
foreach (
var file in Directory
.EnumerateFiles(directory, searchPattern, option)
.Where(fileSearchFunc)
)
{
await writer
.WriteAsync(file.Substring(directory.Length), file, cancellationToken)
.ConfigureAwait(false);
}
}
}

View File

@@ -335,9 +335,9 @@
"net8.0": {
"Microsoft.NET.ILLink.Tasks": {
"type": "Direct",
"requested": "[8.0.17, )",
"resolved": "8.0.17",
"contentHash": "x5/y4l8AtshpBOrCZdlE4txw8K3e3s9meBFeZeR3l8hbbku2V7kK6ojhXvrbjg1rk3G+JqL1BI26gtgc1ZrdUw=="
"requested": "[8.0.20, )",
"resolved": "8.0.20",
"contentHash": "Rhcto2AjGvTO62+/VTmBpumBOmqIGp7nYEbTbmEXkCq4yPGxV8whju3/HsIA/bKyo2+DggaYk5+/8sxb1AbPTw=="
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",

View File

@@ -1,4 +1,8 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
@@ -16,32 +20,31 @@ namespace SharpCompress.Test.Arc
}
[Fact]
public Task Arc_Uncompressed_Read() =>
ReadAsync("Arc.uncompressed.arc", CompressionType.None);
public void Arc_Uncompressed_Read() => Read("Arc.uncompressed.arc", CompressionType.None);
[Fact]
public async Task Arc_Squeezed_Read()
public void Arc_Squeezed_Read()
{
await ProcessArchive("Arc.squeezed.arc");
ProcessArchive("Arc.squeezed.arc");
}
[Fact]
public async Task Arc_Crunched_Read()
public void Arc_Crunched_Read()
{
await ProcessArchive("Arc.crunched.arc");
ProcessArchive("Arc.crunched.arc");
}
private async Task ProcessArchive(string archiveName)
private void ProcessArchive(string archiveName)
{
// Process a given archive by its name
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, archiveName)))
using (IReader reader = ArcReader.Open(stream))
{
while (await reader.MoveToNextEntryAsync())
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
await reader.WriteEntryToDirectoryAsync(
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);

View File

@@ -2,7 +2,6 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Common;
using SharpCompress.Compressors.Xz;
@@ -28,16 +27,13 @@ public class ArchiveTests : ReaderTests
}
}
protected async Task ArchiveStreamReadExtractAllAsync(
string testArchive,
CompressionType compression
)
protected void ArchiveStreamReadExtractAll(string testArchive, CompressionType compression)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
await ArchiveStreamReadExtractAllAsync(new[] { testArchive }, compression);
ArchiveStreamReadExtractAll(new[] { testArchive }, compression);
}
protected async Task ArchiveStreamReadExtractAllAsync(
protected void ArchiveStreamReadExtractAll(
IEnumerable<string> testArchives,
CompressionType compression
)
@@ -58,7 +54,7 @@ public class ArchiveTests : ReaderTests
Assert.True(archive.IsSolid);
using (var reader = archive.ExtractAllEntries())
{
await UseReaderAsync(reader, compression);
UseReader(reader, compression);
}
VerifyFiles();
@@ -69,7 +65,7 @@ public class ArchiveTests : ReaderTests
}
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteEntryToDirectoryAsync(
entry.WriteToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -87,38 +83,36 @@ public class ArchiveTests : ReaderTests
}
}
protected Task ArchiveStreamReadAsync(
string testArchive,
ReaderOptions? readerOptions = null
) => ArchiveStreamReadAsync(ArchiveFactory.AutoFactory, testArchive, readerOptions);
protected void ArchiveStreamRead(string testArchive, ReaderOptions? readerOptions = null) =>
ArchiveStreamRead(ArchiveFactory.AutoFactory, testArchive, readerOptions);
protected Task ArchiveStreamReadAsync(
protected void ArchiveStreamRead(
IArchiveFactory archiveFactory,
string testArchive,
ReaderOptions? readerOptions = null
)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
return ArchiveStreamReadAsync(archiveFactory, readerOptions, testArchive);
ArchiveStreamRead(archiveFactory, readerOptions, testArchive);
}
protected Task ArchiveStreamReadAsync(
protected void ArchiveStreamRead(
ReaderOptions? readerOptions = null,
params string[] testArchives
) => ArchiveStreamReadAsync(ArchiveFactory.AutoFactory, readerOptions, testArchives);
) => ArchiveStreamRead(ArchiveFactory.AutoFactory, readerOptions, testArchives);
protected Task ArchiveStreamReadAsync(
protected void ArchiveStreamRead(
IArchiveFactory archiveFactory,
ReaderOptions? readerOptions = null,
params string[] testArchives
) =>
ArchiveStreamReadAsync(
ArchiveStreamRead(
archiveFactory,
readerOptions,
testArchives.Select(x => Path.Combine(TEST_ARCHIVES_PATH, x))
);
protected async Task ArchiveStreamReadAsync(
protected void ArchiveStreamRead(
IArchiveFactory archiveFactory,
ReaderOptions? readerOptions,
IEnumerable<string> testArchives
@@ -139,7 +133,7 @@ public class ArchiveTests : ReaderTests
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteEntryToDirectoryAsync(
entry.WriteToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -157,16 +151,16 @@ public class ArchiveTests : ReaderTests
}
}
protected Task ArchiveStreamMultiReadAsync(
protected void ArchiveStreamMultiRead(
ReaderOptions? readerOptions = null,
params string[] testArchives
) =>
ArchiveStreamMultiReadAsync(
ArchiveStreamMultiRead(
readerOptions,
testArchives.Select(x => Path.Combine(TEST_ARCHIVES_PATH, x))
);
protected async Task ArchiveStreamMultiReadAsync(
protected void ArchiveStreamMultiRead(
ReaderOptions? readerOptions,
IEnumerable<string> testArchives
)
@@ -180,7 +174,7 @@ public class ArchiveTests : ReaderTests
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteEntryToDirectoryAsync(
entry.WriteToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -189,16 +183,16 @@ public class ArchiveTests : ReaderTests
VerifyFiles();
}
protected Task ArchiveOpenStreamReadAsync(
protected void ArchiveOpenStreamRead(
ReaderOptions? readerOptions = null,
params string[] testArchives
) =>
ArchiveOpenStreamReadAsync(
ArchiveOpenStreamRead(
readerOptions,
testArchives.Select(x => Path.Combine(TEST_ARCHIVES_PATH, x))
);
protected async Task ArchiveOpenStreamReadAsync(
protected void ArchiveOpenStreamRead(
ReaderOptions? readerOptions,
IEnumerable<string> testArchives
)
@@ -212,7 +206,7 @@ public class ArchiveTests : ReaderTests
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteEntryToDirectoryAsync(
entry.WriteToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -258,7 +252,7 @@ public class ArchiveTests : ReaderTests
}
}
protected async Task ArchiveExtractToDirectoryAsync(
protected void ArchiveExtractToDirectory(
string testArchive,
ReaderOptions? readerOptions = null
)
@@ -266,12 +260,12 @@ public class ArchiveTests : ReaderTests
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using (var archive = ArchiveFactory.Open(new FileInfo(testArchive), readerOptions))
{
await archive.ExtractToDirectoryAsync(SCRATCH_FILES_PATH);
archive.ExtractToDirectory(SCRATCH_FILES_PATH);
}
VerifyFiles();
}
protected async Task ArchiveFileReadAsync(
protected void ArchiveFileRead(
IArchiveFactory archiveFactory,
string testArchive,
ReaderOptions? readerOptions = null
@@ -282,7 +276,7 @@ public class ArchiveTests : ReaderTests
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteEntryToDirectoryAsync(
entry.WriteToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -291,8 +285,8 @@ public class ArchiveTests : ReaderTests
VerifyFiles();
}
protected Task ArchiveFileReadAsync(string testArchive, ReaderOptions? readerOptions = null) =>
ArchiveFileReadAsync(ArchiveFactory.AutoFactory, testArchive, readerOptions);
protected void ArchiveFileRead(string testArchive, ReaderOptions? readerOptions = null) =>
ArchiveFileRead(ArchiveFactory.AutoFactory, testArchive, readerOptions);
protected void ArchiveFileSkip(
string testArchive,
@@ -316,14 +310,14 @@ public class ArchiveTests : ReaderTests
/// <summary>
/// Demonstrate the ExtractionOptions.PreserveFileTime and ExtractionOptions.PreserveAttributes extract options
/// </summary>
protected async Task ArchiveFileReadExAsync(string testArchive)
protected void ArchiveFileReadEx(string testArchive)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using (var archive = ArchiveFactory.Open(testArchive))
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteEntryToDirectoryAsync(
entry.WriteToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions
{
@@ -338,7 +332,7 @@ public class ArchiveTests : ReaderTests
VerifyFilesEx();
}
protected async Task ArchiveDeltaDistanceReadAsync(string testArchive)
protected void ArchiveDeltaDistanceRead(string testArchive)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using var archive = ArchiveFactory.Open(testArchive);
@@ -347,7 +341,7 @@ public class ArchiveTests : ReaderTests
if (!entry.IsDirectory)
{
var memory = new MemoryStream();
await entry.WriteToAsync(memory);
entry.WriteTo(memory);
memory.Position = 0;

View File

@@ -0,0 +1,238 @@
using System;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
using SharpCompress.Readers;
using SharpCompress.Writers;
using Xunit;
namespace SharpCompress.Test.GZip;
public class AsyncTests : TestBase
{
[Fact]
public async Task Reader_Async_Extract_All()
{
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz");
#if NETFRAMEWORK
using var stream = File.OpenRead(testArchive);
#else
await using var stream = File.OpenRead(testArchive);
#endif
using var reader = ReaderFactory.Open(stream);
await reader.WriteAllToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
// Just verify some files were extracted
var extractedFiles = Directory.GetFiles(
SCRATCH_FILES_PATH,
"*",
SearchOption.AllDirectories
);
Assert.True(extractedFiles.Length > 0, "No files were extracted");
}
[Fact]
public async Task Reader_Async_Extract_Single_Entry()
{
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz");
#if NETFRAMEWORK
using var stream = File.OpenRead(testArchive);
#else
await using var stream = File.OpenRead(testArchive);
#endif
using var reader = ReaderFactory.Open(stream);
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
var outputPath = Path.Combine(SCRATCH_FILES_PATH, reader.Entry.Key!);
Directory.CreateDirectory(Path.GetDirectoryName(outputPath)!);
#if NETFRAMEWORK
using var outputStream = File.Create(outputPath);
#else
await using var outputStream = File.Create(outputPath);
#endif
await reader.WriteEntryToAsync(outputStream);
break; // Just test one entry
}
}
}
[Fact]
public async Task Archive_Entry_Async_Open_Stream()
{
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz");
using var archive = ArchiveFactory.Open(testArchive);
foreach (var entry in archive.Entries.Where(e => !e.IsDirectory).Take(1))
{
#if NETFRAMEWORK
using var entryStream = await entry.OpenEntryStreamAsync();
#else
await using var entryStream = await entry.OpenEntryStreamAsync();
#endif
Assert.NotNull(entryStream);
Assert.True(entryStream.CanRead);
// Read some data to verify it works
var buffer = new byte[1024];
var read = await entryStream.ReadAsync(buffer, 0, buffer.Length);
Assert.True(read > 0);
}
}
[Fact]
public async Task Writer_Async_Write_Single_File()
{
var outputPath = Path.Combine(SCRATCH_FILES_PATH, "async_test.zip");
using (var stream = File.Create(outputPath))
using (var writer = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.Deflate))
{
var testFile = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz");
using var fileStream = File.OpenRead(testFile);
await writer.WriteAsync("test_entry.bin", fileStream, new DateTime(2023, 1, 1));
}
// Verify the archive was created and contains the entry
Assert.True(File.Exists(outputPath));
using var archive = ZipArchive.Open(outputPath);
Assert.Single(archive.Entries.Where(e => !e.IsDirectory));
}
[Fact]
public async Task Async_With_Cancellation_Token()
{
using var cts = new CancellationTokenSource();
cts.CancelAfter(10000); // 10 seconds should be plenty
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz");
using var stream = File.OpenRead(testArchive);
using var reader = ReaderFactory.Open(stream);
await reader.WriteAllToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
cts.Token
);
// Just verify some files were extracted
var extractedFiles = Directory.GetFiles(
SCRATCH_FILES_PATH,
"*",
SearchOption.AllDirectories
);
Assert.True(extractedFiles.Length > 0, "No files were extracted");
}
[Fact]
public async Task Stream_Extensions_Async()
{
var testFile = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz");
using var inputStream = File.OpenRead(testFile);
var outputPath = Path.Combine(SCRATCH_FILES_PATH, "async_copy.bin");
using var outputStream = File.Create(outputPath);
// Test the async extension method
var buffer = new byte[8192];
int bytesRead;
while ((bytesRead = await inputStream.ReadAsync(buffer, 0, buffer.Length)) > 0)
{
await outputStream.WriteAsync(buffer, 0, bytesRead);
}
Assert.True(File.Exists(outputPath));
Assert.True(new FileInfo(outputPath).Length > 0);
}
[Fact]
public async Task EntryStream_ReadAsync_Works()
{
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz");
using var stream = File.OpenRead(testArchive);
using var reader = ReaderFactory.Open(stream);
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
using var entryStream = reader.OpenEntryStream();
var buffer = new byte[4096];
var totalRead = 0;
int bytesRead;
// Test ReadAsync on EntryStream
while ((bytesRead = await entryStream.ReadAsync(buffer, 0, buffer.Length)) > 0)
{
totalRead += bytesRead;
}
Assert.True(totalRead > 0, "Should have read some data from entry stream");
break; // Test just one entry
}
}
}
[Fact]
public async Task CompressionStream_Async_ReadWrite()
{
var testData = new byte[1024];
new Random(42).NextBytes(testData);
var compressedPath = Path.Combine(SCRATCH_FILES_PATH, "async_compressed.gz");
// Test async write with GZipStream
using (var fileStream = File.Create(compressedPath))
using (
var gzipStream = new Compressors.Deflate.GZipStream(
fileStream,
Compressors.CompressionMode.Compress
)
)
{
await gzipStream.WriteAsync(testData, 0, testData.Length);
await gzipStream.FlushAsync();
}
Assert.True(File.Exists(compressedPath));
Assert.True(new FileInfo(compressedPath).Length > 0);
// Test async read with GZipStream
using (var fileStream = File.OpenRead(compressedPath))
using (
var gzipStream = new Compressors.Deflate.GZipStream(
fileStream,
Compressors.CompressionMode.Decompress
)
)
{
var decompressed = new byte[testData.Length];
var totalRead = 0;
int bytesRead;
while (
totalRead < decompressed.Length
&& (
bytesRead = await gzipStream.ReadAsync(
decompressed,
totalRead,
decompressed.Length - totalRead
)
) > 0
)
{
totalRead += bytesRead;
}
Assert.Equal(testData.Length, totalRead);
Assert.Equal(testData, decompressed);
}
}
}

View File

@@ -1,6 +1,5 @@
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.GZip;
using SharpCompress.Archives.Tar;
@@ -14,13 +13,13 @@ public class GZipArchiveTests : ArchiveTests
public GZipArchiveTests() => UseExtensionInsteadOfNameToVerify = true;
[Fact]
public async Task GZip_Archive_Generic()
public void GZip_Archive_Generic()
{
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz")))
using (var archive = ArchiveFactory.Open(stream))
{
var entry = archive.Entries.First();
await entry.WriteToFileAsync(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull()));
entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull()));
var size = entry.Size;
var scratch = new FileInfo(Path.Combine(SCRATCH_FILES_PATH, "Tar.tar"));
@@ -29,20 +28,20 @@ public class GZipArchiveTests : ArchiveTests
Assert.Equal(size, scratch.Length);
Assert.Equal(size, test.Length);
}
await CompareArchivesByPathAsync(
CompareArchivesByPath(
Path.Combine(SCRATCH_FILES_PATH, "Tar.tar"),
Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar")
);
}
[Fact]
public async Task GZip_Archive()
public void GZip_Archive()
{
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz")))
using (var archive = GZipArchive.Open(stream))
{
var entry = archive.Entries.First();
await entry.WriteToFileAsync(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull()));
entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull()));
var size = entry.Size;
var scratch = new FileInfo(Path.Combine(SCRATCH_FILES_PATH, "Tar.tar"));
@@ -51,7 +50,7 @@ public class GZipArchiveTests : ArchiveTests
Assert.Equal(size, scratch.Length);
Assert.Equal(size, test.Length);
}
await CompareArchivesByPathAsync(
CompareArchivesByPath(
Path.Combine(SCRATCH_FILES_PATH, "Tar.tar"),
Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar")
);

View File

@@ -0,0 +1,99 @@
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.GZip;
using SharpCompress.Test.Mocks;
using Xunit;
namespace SharpCompress.Test.GZip;
public class GZipReaderAsyncTests : ReaderTests
{
public GZipReaderAsyncTests() => UseExtensionInsteadOfNameToVerify = true;
[Fact]
public async Task GZip_Reader_Generic_Async() =>
await ReadAsync("Tar.tar.gz", CompressionType.GZip);
[Fact]
public async Task GZip_Reader_Generic2_Async()
{
//read only as GZip item
using Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"));
using var reader = GZipReader.Open(new SharpCompressStream(stream));
while (reader.MoveToNextEntry())
{
Assert.NotEqual(0, reader.Entry.Size);
Assert.NotEqual(0, reader.Entry.Crc);
// Use async overload for reading the entry
if (!reader.Entry.IsDirectory)
{
using var entryStream = reader.OpenEntryStream();
using var ms = new MemoryStream();
await entryStream.CopyToAsync(ms);
}
}
}
protected async Task ReadAsync(
string testArchive,
CompressionType expectedCompression,
ReaderOptions? options = null
)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
options ??= new ReaderOptions() { BufferSize = 0x20000 };
options.LeaveStreamOpen = true;
await ReadImplAsync(testArchive, expectedCompression, options);
options.LeaveStreamOpen = false;
await ReadImplAsync(testArchive, expectedCompression, options);
VerifyFiles();
}
private async Task ReadImplAsync(
string testArchive,
CompressionType expectedCompression,
ReaderOptions options
)
{
using var file = File.OpenRead(testArchive);
using var protectedStream = SharpCompressStream.Create(
new ForwardOnlyStream(file, options.BufferSize),
leaveOpen: true,
throwOnDispose: true,
bufferSize: options.BufferSize
);
using var testStream = new TestStream(protectedStream);
using (var reader = ReaderFactory.Open(testStream, options))
{
await UseReaderAsync(reader, expectedCompression);
protectedStream.ThrowOnDispose = false;
Assert.False(testStream.IsDisposed, $"{nameof(testStream)} prematurely closed");
}
var message =
$"{nameof(options.LeaveStreamOpen)} is set to '{options.LeaveStreamOpen}', so {nameof(testStream.IsDisposed)} should be set to '{!testStream.IsDisposed}', but is set to {testStream.IsDisposed}";
Assert.True(options.LeaveStreamOpen != testStream.IsDisposed, message);
}
private async Task UseReaderAsync(IReader reader, CompressionType expectedCompression)
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(expectedCompression, reader.Entry.CompressionType);
await reader.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
}
}

View File

@@ -1,5 +1,4 @@
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers.GZip;
@@ -12,15 +11,15 @@ public class GZipReaderTests : ReaderTests
public GZipReaderTests() => UseExtensionInsteadOfNameToVerify = true;
[Fact]
public Task GZip_Reader_Generic() => ReadAsync("Tar.tar.gz", CompressionType.GZip);
public void GZip_Reader_Generic() => Read("Tar.tar.gz", CompressionType.GZip);
[Fact]
public async Task GZip_Reader_Generic2()
public void GZip_Reader_Generic2()
{
//read only as GZip itme
using Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"));
using var reader = GZipReader.Open(new SharpCompressStream(stream));
while (await reader.MoveToNextEntryAsync()) // Crash here
while (reader.MoveToNextEntry()) // Crash here
{
Assert.NotEqual(0, reader.Entry.Size);
Assert.NotEqual(0, reader.Entry.Crc);

View File

@@ -1,5 +1,4 @@
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Writers;
using SharpCompress.Writers.GZip;
@@ -13,7 +12,7 @@ public class GZipWriterTests : WriterTests
: base(ArchiveType.GZip) => UseExtensionInsteadOfNameToVerify = true;
[Fact]
public async Task GZip_Writer_Generic()
public void GZip_Writer_Generic()
{
using (
Stream stream = File.Open(
@@ -26,14 +25,14 @@ public class GZipWriterTests : WriterTests
{
writer.Write("Tar.tar", Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar"));
}
await CompareArchivesByPathAsync(
CompareArchivesByPath(
Path.Combine(SCRATCH_FILES_PATH, "Tar.tar.gz"),
Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz")
);
}
[Fact]
public async Task GZip_Writer()
public void GZip_Writer()
{
using (
Stream stream = File.Open(
@@ -46,7 +45,7 @@ public class GZipWriterTests : WriterTests
{
writer.Write("Tar.tar", Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar"));
}
await CompareArchivesByPathAsync(
CompareArchivesByPath(
Path.Combine(SCRATCH_FILES_PATH, "Tar.tar.gz"),
Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz")
);
@@ -61,7 +60,7 @@ public class GZipWriterTests : WriterTests
});
[Fact]
public async Task GZip_Writer_Entry_Path_With_Dir()
public void GZip_Writer_Entry_Path_With_Dir()
{
using (
Stream stream = File.Open(
@@ -75,7 +74,7 @@ public class GZipWriterTests : WriterTests
var path = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar");
writer.Write(path, path); //covers issue #532
}
await CompareArchivesByPathAsync(
CompareArchivesByPath(
Path.Combine(SCRATCH_FILES_PATH, "Tar.tar.gz"),
Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz")
);

View File

@@ -1,6 +1,5 @@
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Rar;
using SharpCompress.Common;
@@ -13,57 +12,57 @@ namespace SharpCompress.Test.Rar;
public class RarArchiveTests : ArchiveTests
{
[Fact]
public Task Rar_EncryptedFileAndHeader_Archive() =>
ReadRarPasswordAsync("Rar.encrypted_filesAndHeader.rar", "test");
public void Rar_EncryptedFileAndHeader_Archive() =>
ReadRarPassword("Rar.encrypted_filesAndHeader.rar", "test");
[Fact]
public Task Rar_EncryptedFileAndHeader_NoPasswordExceptionTest() =>
Assert.ThrowsAsync(
public void Rar_EncryptedFileAndHeader_NoPasswordExceptionTest() =>
Assert.Throws(
typeof(CryptographicException),
async () => await ReadRarPasswordAsync("Rar.encrypted_filesAndHeader.rar", null)
() => ReadRarPassword("Rar.encrypted_filesAndHeader.rar", null)
);
[Fact]
public Task Rar5_EncryptedFileAndHeader_Archive() =>
ReadRarPasswordAsync("Rar5.encrypted_filesAndHeader.rar", "test");
public void Rar5_EncryptedFileAndHeader_Archive() =>
ReadRarPassword("Rar5.encrypted_filesAndHeader.rar", "test");
[Fact]
public Task Rar5_EncryptedFileAndHeader_Archive_Err() =>
Assert.ThrowsAsync(
public void Rar5_EncryptedFileAndHeader_Archive_Err() =>
Assert.Throws(
typeof(CryptographicException),
async () => await ReadRarPasswordAsync("Rar5.encrypted_filesAndHeader.rar", "failed")
() => ReadRarPassword("Rar5.encrypted_filesAndHeader.rar", "failed")
);
[Fact]
public Task Rar5_EncryptedFileAndHeader_NoPasswordExceptionTest() =>
Assert.ThrowsAsync(
public void Rar5_EncryptedFileAndHeader_NoPasswordExceptionTest() =>
Assert.Throws(
typeof(CryptographicException),
async () => await ReadRarPasswordAsync("Rar5.encrypted_filesAndHeader.rar", null)
() => ReadRarPassword("Rar5.encrypted_filesAndHeader.rar", null)
);
[Fact]
public Task Rar_EncryptedFileOnly_Archive() =>
ReadRarPasswordAsync("Rar.encrypted_filesOnly.rar", "test");
public void Rar_EncryptedFileOnly_Archive() =>
ReadRarPassword("Rar.encrypted_filesOnly.rar", "test");
[Fact]
public Task Rar_EncryptedFileOnly_Archive_Err() =>
Assert.ThrowsAsync(
public void Rar_EncryptedFileOnly_Archive_Err() =>
Assert.Throws(
typeof(CryptographicException),
async () => await ReadRarPasswordAsync("Rar5.encrypted_filesOnly.rar", "failed")
() => ReadRarPassword("Rar5.encrypted_filesOnly.rar", "failed")
);
[Fact]
public Task Rar5_EncryptedFileOnly_Archive() =>
ReadRarPasswordAsync("Rar5.encrypted_filesOnly.rar", "test");
public void Rar5_EncryptedFileOnly_Archive() =>
ReadRarPassword("Rar5.encrypted_filesOnly.rar", "test");
[Fact]
public Task Rar_Encrypted_Archive() => ReadRarPasswordAsync("Rar.Encrypted.rar", "test");
public void Rar_Encrypted_Archive() => ReadRarPassword("Rar.Encrypted.rar", "test");
[Fact]
public Task Rar5_Encrypted_Archive() =>
ReadRarPasswordAsync("Rar5.encrypted_filesAndHeader.rar", "test");
public void Rar5_Encrypted_Archive() =>
ReadRarPassword("Rar5.encrypted_filesAndHeader.rar", "test");
private async Task ReadRarPasswordAsync(string testArchive, string? password)
private void ReadRarPassword(string testArchive, string? password)
{
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, testArchive)))
using (
@@ -78,7 +77,7 @@ public class RarArchiveTests : ArchiveTests
if (!entry.IsDirectory)
{
Assert.Equal(CompressionType.Rar, entry.CompressionType);
await entry.WriteEntryToDirectoryAsync(
entry.WriteToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -89,12 +88,12 @@ public class RarArchiveTests : ArchiveTests
}
[Fact]
public Task Rar_Multi_Archive_Encrypted() =>
Assert.ThrowsAsync<InvalidFormatException>(async () =>
await ArchiveFileReadPasswordAsync("Rar.EncryptedParts.part01.rar", "test")
public void Rar_Multi_Archive_Encrypted() =>
Assert.Throws<InvalidFormatException>(() =>
ArchiveFileReadPassword("Rar.EncryptedParts.part01.rar", "test")
);
protected async Task ArchiveFileReadPasswordAsync(string archiveName, string password)
protected void ArchiveFileReadPassword(string archiveName, string password)
{
using (
var archive = RarArchive.Open(
@@ -105,7 +104,7 @@ public class RarArchiveTests : ArchiveTests
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteEntryToDirectoryAsync(
entry.WriteToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -115,28 +114,28 @@ public class RarArchiveTests : ArchiveTests
}
[Fact]
public Task Rar_None_ArchiveStreamRead() => ArchiveStreamReadAsync("Rar.none.rar");
public void Rar_None_ArchiveStreamRead() => ArchiveStreamRead("Rar.none.rar");
[Fact]
public Task Rar5_None_ArchiveStreamRead() => ArchiveStreamReadAsync("Rar5.none.rar");
public void Rar5_None_ArchiveStreamRead() => ArchiveStreamRead("Rar5.none.rar");
[Fact]
public Task Rar_ArchiveStreamRead() => ArchiveStreamReadAsync("Rar.rar");
public void Rar_ArchiveStreamRead() => ArchiveStreamRead("Rar.rar");
[Fact]
public Task Rar5_ArchiveStreamRead() => ArchiveStreamReadAsync("Rar5.rar");
public void Rar5_ArchiveStreamRead() => ArchiveStreamRead("Rar5.rar");
[Fact]
public Task Rar_test_invalid_exttime_ArchiveStreamRead() =>
public void Rar_test_invalid_exttime_ArchiveStreamRead() =>
DoRar_test_invalid_exttime_ArchiveStreamRead("Rar.test_invalid_exttime.rar");
private async Task DoRar_test_invalid_exttime_ArchiveStreamRead(string filename)
private void DoRar_test_invalid_exttime_ArchiveStreamRead(string filename)
{
using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename));
using var archive = ArchiveFactory.Open(stream);
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteEntryToDirectoryAsync(
entry.WriteToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -144,14 +143,14 @@ public class RarArchiveTests : ArchiveTests
}
[Fact]
public async Task Rar_Jpg_ArchiveStreamRead()
public void Rar_Jpg_ArchiveStreamRead()
{
using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Rar.jpeg.jpg"));
using (var archive = RarArchive.Open(stream, new ReaderOptions { LookForHeader = true }))
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteEntryToDirectoryAsync(
entry.WriteToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -161,12 +160,12 @@ public class RarArchiveTests : ArchiveTests
}
[Fact]
public Task Rar_IsSolidArchiveCheck() => DoRar_IsSolidArchiveCheck("Rar.rar");
public void Rar_IsSolidArchiveCheck() => DoRar_IsSolidArchiveCheck("Rar.rar");
[Fact]
public Task Rar5_IsSolidArchiveCheck() => DoRar_IsSolidArchiveCheck("Rar5.rar");
public void Rar5_IsSolidArchiveCheck() => DoRar_IsSolidArchiveCheck("Rar5.rar");
private async Task DoRar_IsSolidArchiveCheck(string filename)
private void DoRar_IsSolidArchiveCheck(string filename)
{
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename)))
{
@@ -174,7 +173,7 @@ public class RarArchiveTests : ArchiveTests
Assert.False(archive.IsSolid);
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteEntryToDirectoryAsync(
entry.WriteToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -214,22 +213,22 @@ public class RarArchiveTests : ArchiveTests
}
[Fact]
public Task Rar_Solid_ArchiveStreamRead() => ArchiveStreamReadAsync("Rar.solid.rar");
public void Rar_Solid_ArchiveStreamRead() => ArchiveStreamRead("Rar.solid.rar");
[Fact]
public Task Rar5_Solid_ArchiveStreamRead() => ArchiveStreamReadAsync("Rar5.solid.rar");
public void Rar5_Solid_ArchiveStreamRead() => ArchiveStreamRead("Rar5.solid.rar");
[Fact]
public Task Rar_Solid_StreamRead_Extract_All() =>
ArchiveStreamReadExtractAllAsync("Rar.solid.rar", CompressionType.Rar);
public void Rar_Solid_StreamRead_Extract_All() =>
ArchiveStreamReadExtractAll("Rar.solid.rar", CompressionType.Rar);
[Fact]
public Task Rar5_Solid_StreamRead_Extract_All() =>
ArchiveStreamReadExtractAllAsync("Rar5.solid.rar", CompressionType.Rar);
public void Rar5_Solid_StreamRead_Extract_All() =>
ArchiveStreamReadExtractAll("Rar5.solid.rar", CompressionType.Rar);
[Fact]
public Task Rar_Multi_ArchiveStreamRead() =>
DoRar_Multi_ArchiveStreamReadAsync(
public void Rar_Multi_ArchiveStreamRead() =>
DoRar_Multi_ArchiveStreamRead(
[
"Rar.multi.part01.rar",
"Rar.multi.part02.rar",
@@ -242,8 +241,8 @@ public class RarArchiveTests : ArchiveTests
);
[Fact]
public Task Rar5_Multi_ArchiveStreamRead() =>
DoRar_Multi_ArchiveStreamReadAsync(
public void Rar5_Multi_ArchiveStreamRead() =>
DoRar_Multi_ArchiveStreamRead(
[
"Rar5.multi.part01.rar",
"Rar5.multi.part02.rar",
@@ -255,7 +254,7 @@ public class RarArchiveTests : ArchiveTests
false
);
private async Task DoRar_Multi_ArchiveStreamReadAsync(string[] archives, bool isSolid)
private void DoRar_Multi_ArchiveStreamRead(string[] archives, bool isSolid)
{
using var archive = RarArchive.Open(
archives.Select(s => Path.Combine(TEST_ARCHIVES_PATH, s)).Select(File.OpenRead)
@@ -263,7 +262,7 @@ public class RarArchiveTests : ArchiveTests
Assert.Equal(archive.IsSolid, isSolid);
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteEntryToDirectoryAsync(
entry.WriteToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -271,8 +270,8 @@ public class RarArchiveTests : ArchiveTests
}
[Fact]
public Task Rar5_MultiSolid_ArchiveStreamRead() =>
DoRar_Multi_ArchiveStreamReadAsync(
public void Rar5_MultiSolid_ArchiveStreamRead() =>
DoRar_Multi_ArchiveStreamRead(
[
"Rar.multi.solid.part01.rar",
"Rar.multi.solid.part02.rar",
@@ -285,16 +284,16 @@ public class RarArchiveTests : ArchiveTests
);
[Fact]
public Task RarNoneArchiveFileRead() => ArchiveFileReadAsync("Rar.none.rar");
public void RarNoneArchiveFileRead() => ArchiveFileRead("Rar.none.rar");
[Fact]
public Task Rar5NoneArchiveFileRead() => ArchiveFileReadAsync("Rar5.none.rar");
public void Rar5NoneArchiveFileRead() => ArchiveFileRead("Rar5.none.rar");
[Fact]
public Task Rar_ArchiveFileRead() => ArchiveFileReadAsync("Rar.rar");
public void Rar_ArchiveFileRead() => ArchiveFileRead("Rar.rar");
[Fact]
public Task Rar5_ArchiveFileRead() => ArchiveFileReadAsync("Rar5.rar");
public void Rar5_ArchiveFileRead() => ArchiveFileRead("Rar5.rar");
[Fact]
public void Rar_ArchiveFileRead_HasDirectories() =>
@@ -313,7 +312,7 @@ public class RarArchiveTests : ArchiveTests
}
[Fact]
public async Task Rar_Jpg_ArchiveFileRead()
public void Rar_Jpg_ArchiveFileRead()
{
using (
var archive = RarArchive.Open(
@@ -324,7 +323,7 @@ public class RarArchiveTests : ArchiveTests
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteEntryToDirectoryAsync(
entry.WriteToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -334,14 +333,14 @@ public class RarArchiveTests : ArchiveTests
}
[Fact]
public Task Rar_Solid_ArchiveFileRead() => ArchiveFileReadAsync("Rar.solid.rar");
public void Rar_Solid_ArchiveFileRead() => ArchiveFileRead("Rar.solid.rar");
[Fact]
public Task Rar5_Solid_ArchiveFileRead() => ArchiveFileReadAsync("Rar5.solid.rar");
public void Rar5_Solid_ArchiveFileRead() => ArchiveFileRead("Rar5.solid.rar");
[Fact]
public Task Rar2_Multi_ArchiveStreamRead() =>
DoRar_Multi_ArchiveStreamReadAsync(
public void Rar2_Multi_ArchiveStreamRead() =>
DoRar_Multi_ArchiveStreamRead(
[
"Rar2.multi.rar",
"Rar2.multi.r00",
@@ -355,17 +354,17 @@ public class RarArchiveTests : ArchiveTests
);
[Fact]
public Task Rar2_Multi_ArchiveFileRead() => ArchiveFileReadAsync("Rar2.multi.rar"); //r00, r01...
public void Rar2_Multi_ArchiveFileRead() => ArchiveFileRead("Rar2.multi.rar"); //r00, r01...
[Fact]
public Task Rar2_ArchiveFileRead() => ArchiveFileReadAsync("Rar2.rar");
public void Rar2_ArchiveFileRead() => ArchiveFileRead("Rar2.rar");
[Fact]
public async Task Rar15_ArchiveFileRead()
public void Rar15_ArchiveFileRead()
{
UseExtensionInsteadOfNameToVerify = true;
UseCaseInsensitiveToVerify = true;
await ArchiveFileReadAsync("Rar15.rar");
ArchiveFileRead("Rar15.rar");
}
[Fact]
@@ -409,10 +408,10 @@ public class RarArchiveTests : ArchiveTests
}
[Fact]
public Task Rar4_Multi_ArchiveFileRead() => ArchiveFileReadAsync("Rar4.multi.part01.rar");
public void Rar4_Multi_ArchiveFileRead() => ArchiveFileRead("Rar4.multi.part01.rar");
[Fact]
public Task Rar4_ArchiveFileRead() => ArchiveFileReadAsync("Rar4.rar");
public void Rar4_ArchiveFileRead() => ArchiveFileRead("Rar4.rar");
[Fact]
public void Rar_GetPartsSplit() =>
@@ -462,8 +461,8 @@ public class RarArchiveTests : ArchiveTests
);
[Fact]
public Task Rar4_Multi_ArchiveStreamRead() =>
DoRar_Multi_ArchiveStreamReadAsync(
public void Rar4_Multi_ArchiveStreamRead() =>
DoRar_Multi_ArchiveStreamRead(
[
"Rar4.multi.part01.rar",
"Rar4.multi.part02.rar",
@@ -478,8 +477,8 @@ public class RarArchiveTests : ArchiveTests
//no extension to test the lib identifies the archive by content not ext
[Fact]
public Task Rar4_Split_ArchiveStreamRead() =>
ArchiveStreamMultiReadAsync(
public void Rar4_Split_ArchiveStreamRead() =>
ArchiveStreamMultiRead(
null,
[
"Rar4.split.001",
@@ -493,7 +492,7 @@ public class RarArchiveTests : ArchiveTests
//will detect and load other files
[Fact]
public Task Rar4_Multi_ArchiveFirstFileRead() => ArchiveFileReadAsync("Rar4.multi.part01.rar");
public void Rar4_Multi_ArchiveFirstFileRead() => ArchiveFileRead("Rar4.multi.part01.rar");
//"Rar4.multi.part02.rar",
//"Rar4.multi.part03.rar",
@@ -503,7 +502,7 @@ public class RarArchiveTests : ArchiveTests
//"Rar4.multi.part07.rar"
//will detect and load other files
[Fact]
public Task Rar4_Split_ArchiveFirstFileRead() => ArchiveFileReadAsync("Rar4.split.001");
public void Rar4_Split_ArchiveFirstFileRead() => ArchiveFileRead("Rar4.split.001");
//"Rar4.split.002",
//"Rar4.split.003",
@@ -512,8 +511,8 @@ public class RarArchiveTests : ArchiveTests
//"Rar4.split.006"
//will detect and load other files
[Fact]
public Task Rar4_Split_ArchiveStreamFirstFileRead() =>
ArchiveStreamMultiReadAsync(
public void Rar4_Split_ArchiveStreamFirstFileRead() =>
ArchiveStreamMultiRead(
null,
[
"Rar4.split.001",
@@ -527,8 +526,8 @@ public class RarArchiveTests : ArchiveTests
//open with ArchiveFactory.Open and stream
[Fact]
public Task Rar4_Split_ArchiveOpen() =>
ArchiveOpenStreamReadAsync(
public void Rar4_Split_ArchiveOpen() =>
ArchiveOpenStreamRead(
null,
"Rar4.split.001",
"Rar4.split.002",
@@ -540,8 +539,8 @@ public class RarArchiveTests : ArchiveTests
//open with ArchiveFactory.Open and stream
[Fact]
public Task Rar4_Multi_ArchiveOpen() =>
ArchiveOpenStreamReadAsync(
public void Rar4_Multi_ArchiveOpen() =>
ArchiveOpenStreamRead(
null,
"Rar4.multi.part01.rar",
"Rar4.multi.part02.rar",
@@ -571,10 +570,10 @@ public class RarArchiveTests : ArchiveTests
);
[Fact]
public Task Rar_Multi_ArchiveFileRead() => ArchiveFileReadAsync("Rar.multi.part01.rar");
public void Rar_Multi_ArchiveFileRead() => ArchiveFileRead("Rar.multi.part01.rar");
[Fact]
public Task Rar5_Multi_ArchiveFileRead() => ArchiveFileReadAsync("Rar5.multi.part01.rar");
public void Rar5_Multi_ArchiveFileRead() => ArchiveFileRead("Rar5.multi.part01.rar");
[Fact]
public void Rar_IsFirstVolume_True() => DoRar_IsFirstVolume_True("Rar.multi.part01.rar");
@@ -603,7 +602,7 @@ public class RarArchiveTests : ArchiveTests
}
[Fact]
public Task Rar5_CRC_Blake2_Archive() => ArchiveFileReadAsync("Rar5.crc_blake2.rar");
public void Rar5_CRC_Blake2_Archive() => ArchiveFileRead("Rar5.crc_blake2.rar");
[Fact]
void Rar_Iterate_Archive() =>

View File

@@ -1,4 +1,4 @@
/*using System;
using System;
using System.Collections;
using System.IO;
using System.Linq;
@@ -422,4 +422,4 @@ public class RarReaderTests : ReaderTests
Assert.Equal(expectedOrder.Pop(), reader.Entry.Key);
}
}
}*/
}

View File

@@ -1,7 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -12,7 +11,7 @@ namespace SharpCompress.Test;
public abstract class ReaderTests : TestBase
{
protected async Task ReadAsync(
protected void Read(
string testArchive,
CompressionType expectedCompression,
ReaderOptions? options = null
@@ -23,14 +22,14 @@ public abstract class ReaderTests : TestBase
options ??= new ReaderOptions() { BufferSize = 0x20000 }; //test larger buffer size (need test rather than eyeballing debug logs :P)
options.LeaveStreamOpen = true;
await ReadAsyncImpl(testArchive, expectedCompression, options);
ReadImpl(testArchive, expectedCompression, options);
options.LeaveStreamOpen = false;
await ReadAsyncImpl(testArchive, expectedCompression, options);
ReadImpl(testArchive, expectedCompression, options);
VerifyFiles();
}
private async Task ReadAsyncImpl(
private void ReadImpl(
string testArchive,
CompressionType expectedCompression,
ReaderOptions options
@@ -46,7 +45,7 @@ public abstract class ReaderTests : TestBase
using var testStream = new TestStream(protectedStream);
using (var reader = ReaderFactory.Open(testStream, options))
{
await UseReaderAsync(reader, expectedCompression);
UseReader(reader, expectedCompression);
protectedStream.ThrowOnDispose = false;
Assert.False(testStream.IsDisposed, $"{nameof(testStream)} prematurely closed");
}
@@ -58,18 +57,42 @@ public abstract class ReaderTests : TestBase
Assert.True(options.LeaveStreamOpen != testStream.IsDisposed, message);
}
public async Task UseReaderAsync(IReader reader, CompressionType expectedCompression)
public void UseReader(IReader reader, CompressionType expectedCompression)
{
while (await reader.MoveToNextEntryAsync())
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(expectedCompression, reader.Entry.CompressionType);
await reader.WriteEntryToDirectoryAsync(
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
}
protected void Iterate(
string testArchive,
string fileOrder,
CompressionType expectedCompression,
ReaderOptions? options = null
)
{
if (!Environment.OSVersion.IsWindows())
{
fileOrder = fileOrder.Replace('\\', '/');
}
var expected = new Stack<string>(fileOrder.Split(' '));
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using var file = File.OpenRead(testArchive);
using var forward = new ForwardOnlyStream(file);
using var reader = ReaderFactory.Open(forward, options);
while (reader.MoveToNextEntry())
{
Assert.Equal(expectedCompression, reader.Entry.CompressionType);
Assert.Equal(expected.Pop(), reader.Entry.Key);
}
}
}

View File

@@ -1,7 +1,6 @@
using System;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.SevenZip;
using SharpCompress.Common;
@@ -14,93 +13,78 @@ namespace SharpCompress.Test.SevenZip;
public class SevenZipArchiveTests : ArchiveTests
{
[Fact]
public Task SevenZipArchive_Solid_StreamRead() => ArchiveStreamReadAsync("7Zip.solid.7z");
public void SevenZipArchive_Solid_StreamRead() => ArchiveStreamRead("7Zip.solid.7z");
[Fact]
public Task SevenZipArchive_NonSolid_StreamRead() => ArchiveStreamReadAsync("7Zip.nonsolid.7z");
public void SevenZipArchive_NonSolid_StreamRead() => ArchiveStreamRead("7Zip.nonsolid.7z");
[Fact]
public Task SevenZipArchive_LZMA_StreamRead() => ArchiveStreamReadAsync("7Zip.LZMA.7z");
public void SevenZipArchive_LZMA_StreamRead() => ArchiveStreamRead("7Zip.LZMA.7z");
[Fact]
public Task SevenZipArchive_LZMA_PathRead() => ArchiveFileReadAsync("7Zip.LZMA.7z");
public void SevenZipArchive_LZMA_PathRead() => ArchiveFileRead("7Zip.LZMA.7z");
[Fact]
public Task SevenZipArchive_LZMAAES_StreamRead() =>
ArchiveStreamReadAsync("7Zip.LZMA.Aes.7z", new ReaderOptions { Password = "testpassword" });
public void SevenZipArchive_LZMAAES_StreamRead() =>
ArchiveStreamRead("7Zip.LZMA.Aes.7z", new ReaderOptions { Password = "testpassword" });
[Fact]
public Task SevenZipArchive_LZMAAES_PathRead() =>
ArchiveFileReadAsync("7Zip.LZMA.Aes.7z", new ReaderOptions { Password = "testpassword" });
public void SevenZipArchive_LZMAAES_PathRead() =>
ArchiveFileRead("7Zip.LZMA.Aes.7z", new ReaderOptions { Password = "testpassword" });
[Fact]
public Task SevenZipArchive_LZMAAES_NoPasswordExceptionTest() =>
Assert.ThrowsAsync(
public void SevenZipArchive_LZMAAES_NoPasswordExceptionTest() =>
Assert.Throws(
typeof(CryptographicException),
async () =>
await ArchiveFileReadAsync(
"7Zip.LZMA.Aes.7z",
new ReaderOptions { Password = null }
)
() => ArchiveFileRead("7Zip.LZMA.Aes.7z", new ReaderOptions { Password = null })
); //was failing with ArgumentNullException not CryptographicException like rar
[Fact]
public Task SevenZipArchive_PPMd_StreamRead() => ArchiveStreamReadAsync("7Zip.PPMd.7z");
public void SevenZipArchive_PPMd_StreamRead() => ArchiveStreamRead("7Zip.PPMd.7z");
[Fact]
public Task SevenZipArchive_PPMd_StreamRead_Extract_All() =>
ArchiveStreamReadExtractAllAsync("7Zip.PPMd.7z", CompressionType.PPMd);
public void SevenZipArchive_PPMd_StreamRead_Extract_All() =>
ArchiveStreamReadExtractAll("7Zip.PPMd.7z", CompressionType.PPMd);
[Fact]
public Task SevenZipArchive_PPMd_PathRead() => ArchiveFileReadAsync("7Zip.PPMd.7z");
public void SevenZipArchive_PPMd_PathRead() => ArchiveFileRead("7Zip.PPMd.7z");
[Fact]
public Task SevenZipArchive_LZMA2_StreamRead() => ArchiveStreamReadAsync("7Zip.LZMA2.7z");
public void SevenZipArchive_LZMA2_StreamRead() => ArchiveStreamRead("7Zip.LZMA2.7z");
[Fact]
public Task SevenZipArchive_LZMA2_PathRead() => ArchiveFileReadAsync("7Zip.LZMA2.7z");
public void SevenZipArchive_LZMA2_PathRead() => ArchiveFileRead("7Zip.LZMA2.7z");
[Fact]
public Task SevenZipArchive_LZMA2_EXE_StreamRead() =>
ArchiveStreamReadAsync(
new SevenZipFactory(),
"7Zip.LZMA2.exe",
new() { LookForHeader = true }
);
public void SevenZipArchive_LZMA2_EXE_StreamRead() =>
ArchiveStreamRead(new SevenZipFactory(), "7Zip.LZMA2.exe", new() { LookForHeader = true });
[Fact]
public Task SevenZipArchive_LZMA2_EXE_PathRead() =>
ArchiveFileReadAsync(
new SevenZipFactory(),
"7Zip.LZMA2.exe",
new() { LookForHeader = true }
);
public void SevenZipArchive_LZMA2_EXE_PathRead() =>
ArchiveFileRead(new SevenZipFactory(), "7Zip.LZMA2.exe", new() { LookForHeader = true });
[Fact]
public Task SevenZipArchive_LZMA2AES_StreamRead() =>
ArchiveStreamReadAsync(
"7Zip.LZMA2.Aes.7z",
new ReaderOptions { Password = "testpassword" }
);
public void SevenZipArchive_LZMA2AES_StreamRead() =>
ArchiveStreamRead("7Zip.LZMA2.Aes.7z", new ReaderOptions { Password = "testpassword" });
[Fact]
public Task SevenZipArchive_LZMA2AES_PathRead() =>
ArchiveFileReadAsync("7Zip.LZMA2.Aes.7z", new ReaderOptions { Password = "testpassword" });
public void SevenZipArchive_LZMA2AES_PathRead() =>
ArchiveFileRead("7Zip.LZMA2.Aes.7z", new ReaderOptions { Password = "testpassword" });
[Fact]
public Task SevenZipArchive_BZip2_StreamRead() => ArchiveStreamReadAsync("7Zip.BZip2.7z");
public void SevenZipArchive_BZip2_StreamRead() => ArchiveStreamRead("7Zip.BZip2.7z");
[Fact]
public Task SevenZipArchive_BZip2_PathRead() => ArchiveFileReadAsync("7Zip.BZip2.7z");
public void SevenZipArchive_BZip2_PathRead() => ArchiveFileRead("7Zip.BZip2.7z");
[Fact]
public Task SevenZipArchive_LZMA_Time_Attributes_PathRead() =>
ArchiveFileReadExAsync("7Zip.LZMA.7z");
public void SevenZipArchive_LZMA_Time_Attributes_PathRead() =>
ArchiveFileReadEx("7Zip.LZMA.7z");
[Fact]
public Task SevenZipArchive_BZip2_Split() =>
Assert.ThrowsAsync<InvalidOperationException>(async () =>
await ArchiveStreamReadAsync(
public void SevenZipArchive_BZip2_Split() =>
Assert.Throws<InvalidOperationException>(() =>
ArchiveStreamRead(
null,
"Original.7z.001",
"Original.7z.002",
@@ -114,8 +98,8 @@ public class SevenZipArchiveTests : ArchiveTests
//Same as archive as Original.7z.001 ... 007 files without the root directory 'Original\' in the archive - this caused the verify to fail
[Fact]
public Task SevenZipArchive_BZip2_Split_Working() =>
ArchiveStreamMultiReadAsync(
public void SevenZipArchive_BZip2_Split_Working() =>
ArchiveStreamMultiRead(
null,
"7Zip.BZip2.split.001",
"7Zip.BZip2.split.002",
@@ -128,8 +112,8 @@ public class SevenZipArchiveTests : ArchiveTests
//will detect and load other files
[Fact]
public Task SevenZipArchive_BZip2_Split_FirstFileRead() =>
ArchiveFileReadAsync("7Zip.BZip2.split.001");
public void SevenZipArchive_BZip2_Split_FirstFileRead() =>
ArchiveFileRead("7Zip.BZip2.split.001");
//"7Zip.BZip2.split.002",
//"7Zip.BZip2.split.003",
@@ -139,15 +123,15 @@ public class SevenZipArchiveTests : ArchiveTests
//"7Zip.BZip2.split.007"
[Fact]
public Task SevenZipArchive_ZSTD_StreamRead() => ArchiveStreamReadAsync("7Zip.ZSTD.7z");
public void SevenZipArchive_ZSTD_StreamRead() => ArchiveStreamRead("7Zip.ZSTD.7z");
[Fact]
public Task SevenZipArchive_ZSTD_PathRead() => ArchiveFileReadAsync("7Zip.ZSTD.7z");
public void SevenZipArchive_ZSTD_PathRead() => ArchiveFileRead("7Zip.ZSTD.7z");
[Fact]
public Task SevenZipArchive_ZSTD_Split() =>
Assert.ThrowsAsync<InvalidOperationException>(async () =>
await ArchiveStreamReadAsync(
public void SevenZipArchive_ZSTD_Split() =>
Assert.Throws<InvalidOperationException>(() =>
ArchiveStreamRead(
null,
"7Zip.ZSTD.Split.7z.001",
"7Zip.ZSTD.Split.7z.002",
@@ -159,53 +143,53 @@ public class SevenZipArchiveTests : ArchiveTests
);
[Fact]
public Task SevenZipArchive_EOS_FileRead() => ArchiveFileReadAsync("7Zip.eos.7z");
public void SevenZipArchive_EOS_FileRead() => ArchiveFileRead("7Zip.eos.7z");
[Fact]
public Task SevenZipArchive_Delta_FileRead() => ArchiveFileReadAsync("7Zip.delta.7z");
public void SevenZipArchive_Delta_FileRead() => ArchiveFileRead("7Zip.delta.7z");
[Fact]
public Task SevenZipArchive_ARM_FileRead() => ArchiveFileReadAsync("7Zip.ARM.7z");
public void SevenZipArchive_ARM_FileRead() => ArchiveFileRead("7Zip.ARM.7z");
[Fact]
public Task SevenZipArchive_ARMT_FileRead() => ArchiveFileReadAsync("7Zip.ARMT.7z");
public void SevenZipArchive_ARMT_FileRead() => ArchiveFileRead("7Zip.ARMT.7z");
[Fact]
public Task SevenZipArchive_BCJ_FileRead() => ArchiveFileReadAsync("7Zip.BCJ.7z");
public void SevenZipArchive_BCJ_FileRead() => ArchiveFileRead("7Zip.BCJ.7z");
[Fact]
public Task SevenZipArchive_BCJ2_FileRead() => ArchiveFileReadAsync("7Zip.BCJ2.7z");
public void SevenZipArchive_BCJ2_FileRead() => ArchiveFileRead("7Zip.BCJ2.7z");
[Fact]
public Task SevenZipArchive_IA64_FileRead() => ArchiveFileReadAsync("7Zip.IA64.7z");
public void SevenZipArchive_IA64_FileRead() => ArchiveFileRead("7Zip.IA64.7z");
[Fact]
public Task SevenZipArchive_PPC_FileRead() => ArchiveFileReadAsync("7Zip.PPC.7z");
public void SevenZipArchive_PPC_FileRead() => ArchiveFileRead("7Zip.PPC.7z");
[Fact]
public Task SevenZipArchive_SPARC_FileRead() => ArchiveFileReadAsync("7Zip.SPARC.7z");
public void SevenZipArchive_SPARC_FileRead() => ArchiveFileRead("7Zip.SPARC.7z");
[Fact]
public Task SevenZipArchive_ARM64_FileRead() => ArchiveFileReadAsync("7Zip.ARM64.7z");
public void SevenZipArchive_ARM64_FileRead() => ArchiveFileRead("7Zip.ARM64.7z");
[Fact]
public Task SevenZipArchive_RISCV_FileRead() => ArchiveFileReadAsync("7Zip.RISCV.7z");
public void SevenZipArchive_RISCV_FileRead() => ArchiveFileRead("7Zip.RISCV.7z");
[Fact]
public Task SevenZipArchive_Filters_FileRead() => ArchiveFileReadAsync("7Zip.Filters.7z");
public void SevenZipArchive_Filters_FileRead() => ArchiveFileRead("7Zip.Filters.7z");
[Fact]
public Task SevenZipArchive_Delta_Distance() =>
ArchiveDeltaDistanceReadAsync("7Zip.delta.distance.7z");
public void SevenZipArchive_Delta_Distance() =>
ArchiveDeltaDistanceRead("7Zip.delta.distance.7z");
[Fact]
public async Task SevenZipArchive_Tar_PathRead()
public void SevenZipArchive_Tar_PathRead()
{
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "7Zip.Tar.tar.7z")))
using (var archive = SevenZipArchive.Open(stream))
{
var entry = archive.Entries.First();
await entry.WriteToFileAsync(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull()));
entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull()));
var size = entry.Size;
var scratch = new FileInfo(Path.Combine(SCRATCH_FILES_PATH, "7Zip.Tar.tar"));
@@ -215,7 +199,7 @@ public class SevenZipArchiveTests : ArchiveTests
Assert.Equal(size, test.Length);
}
await CompareArchivesByPathAsync(
CompareArchivesByPath(
Path.Combine(SCRATCH_FILES_PATH, "7Zip.Tar.tar"),
Path.Combine(TEST_ARCHIVES_PATH, "7Zip.Tar.tar")
);

View File

@@ -16,283 +16,283 @@ namespace SharpCompress.Test.Tar;
public class TarArchiveTests : ArchiveTests
{
public TarArchiveTests() => UseExtensionInsteadOfNameToVerify = true;
/*
[Fact]
public void TarArchiveStreamRead() => ArchiveStreamRead("Tar.tar");
[Fact]
public void TarArchivePathRead() => ArchiveFileRead("Tar.tar");
[Fact]
public void Tar_FileName_Exactly_100_Characters()
[Fact]
public void TarArchiveStreamRead() => ArchiveStreamRead("Tar.tar");
[Fact]
public void TarArchivePathRead() => ArchiveFileRead("Tar.tar");
[Fact]
public void Tar_FileName_Exactly_100_Characters()
{
var archive = "Tar_FileName_Exactly_100_Characters.tar";
// create the 100 char filename
var filename =
"filename_with_exactly_100_characters_______________________________________________________________X";
// Step 1: create a tar file containing a file with the test name
using (Stream stream = File.OpenWrite(Path.Combine(SCRATCH2_FILES_PATH, archive)))
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, CompressionType.None))
using (Stream inputStream = new MemoryStream())
{
var archive = "Tar_FileName_Exactly_100_Characters.tar";
// create the 100 char filename
var filename =
"filename_with_exactly_100_characters_______________________________________________________________X";
// Step 1: create a tar file containing a file with the test name
using (Stream stream = File.OpenWrite(Path.Combine(SCRATCH2_FILES_PATH, archive)))
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, CompressionType.None))
using (Stream inputStream = new MemoryStream())
{
var sw = new StreamWriter(inputStream);
sw.Write("dummy filecontent");
sw.Flush();
inputStream.Position = 0;
writer.Write(filename, inputStream, null);
}
// Step 2: check if the written tar file can be read correctly
var unmodified = Path.Combine(SCRATCH2_FILES_PATH, archive);
using (var archive2 = TarArchive.Open(unmodified))
{
Assert.Equal(1, archive2.Entries.Count);
Assert.Contains(filename, archive2.Entries.Select(entry => entry.Key));
foreach (var entry in archive2.Entries)
{
Assert.Equal(
"dummy filecontent",
new StreamReader(entry.OpenEntryStream()).ReadLine()
);
}
}
var sw = new StreamWriter(inputStream);
sw.Write("dummy filecontent");
sw.Flush();
inputStream.Position = 0;
writer.Write(filename, inputStream, null);
}
[Fact]
public void Tar_NonUstarArchiveWithLongNameDoesNotSkipEntriesAfterTheLongOne()
// Step 2: check if the written tar file can be read correctly
var unmodified = Path.Combine(SCRATCH2_FILES_PATH, archive);
using (var archive2 = TarArchive.Open(unmodified))
{
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "very long filename.tar");
using var archive = TarArchive.Open(unmodified);
Assert.Equal(5, archive.Entries.Count);
Assert.Contains("very long filename/", archive.Entries.Select(entry => entry.Key));
Assert.Contains(
"very long filename/very long filename very long filename very long filename very long filename very long filename very long filename very long filename very long filename very long filename very long filename.jpg",
archive.Entries.Select(entry => entry.Key)
);
Assert.Contains("z_file 1.txt", archive.Entries.Select(entry => entry.Key));
Assert.Contains("z_file 2.txt", archive.Entries.Select(entry => entry.Key));
Assert.Contains("z_file 3.txt", archive.Entries.Select(entry => entry.Key));
}
[Fact]
public void Tar_VeryLongFilepathReadback()
{
var archive = "Tar_VeryLongFilepathReadback.tar";
// create a very long filename
var longFilename = "";
for (var i = 0; i < 600; i = longFilename.Length)
Assert.Equal(1, archive2.Entries.Count);
Assert.Contains(filename, archive2.Entries.Select(entry => entry.Key));
foreach (var entry in archive2.Entries)
{
longFilename += i.ToString("D10") + "-";
}
longFilename += ".txt";
// Step 1: create a tar file containing a file with a long name
using (Stream stream = File.OpenWrite(Path.Combine(SCRATCH2_FILES_PATH, archive)))
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, CompressionType.None))
using (Stream inputStream = new MemoryStream())
{
var sw = new StreamWriter(inputStream);
sw.Write("dummy filecontent");
sw.Flush();
inputStream.Position = 0;
writer.Write(longFilename, inputStream, null);
}
// Step 2: check if the written tar file can be read correctly
var unmodified = Path.Combine(SCRATCH2_FILES_PATH, archive);
using (var archive2 = TarArchive.Open(unmodified))
{
Assert.Equal(1, archive2.Entries.Count);
Assert.Contains(longFilename, archive2.Entries.Select(entry => entry.Key));
foreach (var entry in archive2.Entries)
{
Assert.Equal(
"dummy filecontent",
new StreamReader(entry.OpenEntryStream()).ReadLine()
);
}
}
}
[Fact]
public void Tar_UstarArchivePathReadLongName()
{
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "ustar with long names.tar");
using var archive = TarArchive.Open(unmodified);
Assert.Equal(6, archive.Entries.Count);
Assert.Contains("Directory/", archive.Entries.Select(entry => entry.Key));
Assert.Contains(
"Directory/Some file with veeeeeeeeeery loooooooooong name",
archive.Entries.Select(entry => entry.Key)
);
Assert.Contains(
"Directory/Directory with veeeeeeeeeery loooooooooong name/",
archive.Entries.Select(entry => entry.Key)
);
Assert.Contains(
"Directory/Directory with veeeeeeeeeery loooooooooong name/Some file with veeeeeeeeeery loooooooooong name",
archive.Entries.Select(entry => entry.Key)
);
Assert.Contains(
"Directory/Directory with veeeeeeeeeery loooooooooong name/Directory with veeeeeeeeeery loooooooooong name/",
archive.Entries.Select(entry => entry.Key)
);
Assert.Contains(
"Directory/Directory with veeeeeeeeeery loooooooooong name/Directory with veeeeeeeeeery loooooooooong name/Some file with veeeeeeeeeery loooooooooong name",
archive.Entries.Select(entry => entry.Key)
);
}
[Fact]
public void Tar_Create_New()
{
var scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.tar");
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar");
// var aropt = new Ar
using (var archive = TarArchive.Create())
{
archive.AddAllFromDirectory(ORIGINAL_FILES_PATH);
var twopt = new TarWriterOptions(CompressionType.None, true);
twopt.ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(866) };
archive.SaveTo(scratchPath, twopt);
}
CompareArchivesByPath(unmodified, scratchPath);
}
[Fact]
public void Tar_Random_Write_Add()
{
var jpg = Path.Combine(ORIGINAL_FILES_PATH, "jpg", "test.jpg");
var scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.mod.tar");
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.mod.tar");
var modified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar");
using (var archive = TarArchive.Open(unmodified))
{
archive.AddEntry("jpg\\test.jpg", jpg);
archive.SaveTo(scratchPath, CompressionType.None);
}
CompareArchivesByPath(modified, scratchPath);
}
[Fact]
public void Tar_Random_Write_Remove()
{
var scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.mod.tar");
var modified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.mod.tar");
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar");
using (var archive = TarArchive.Open(unmodified))
{
var entry = archive.Entries.Single(x =>
x.Key.NotNull().EndsWith("jpg", StringComparison.OrdinalIgnoreCase)
Assert.Equal(
"dummy filecontent",
new StreamReader(entry.OpenEntryStream()).ReadLine()
);
archive.RemoveEntry(entry);
archive.SaveTo(scratchPath, CompressionType.None);
}
CompareArchivesByPath(modified, scratchPath);
}
[Fact]
public void Tar_Containing_Rar_Archive()
}
[Fact]
public void Tar_NonUstarArchiveWithLongNameDoesNotSkipEntriesAfterTheLongOne()
{
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "very long filename.tar");
using var archive = TarArchive.Open(unmodified);
Assert.Equal(5, archive.Entries.Count);
Assert.Contains("very long filename/", archive.Entries.Select(entry => entry.Key));
Assert.Contains(
"very long filename/very long filename very long filename very long filename very long filename very long filename very long filename very long filename very long filename very long filename very long filename.jpg",
archive.Entries.Select(entry => entry.Key)
);
Assert.Contains("z_file 1.txt", archive.Entries.Select(entry => entry.Key));
Assert.Contains("z_file 2.txt", archive.Entries.Select(entry => entry.Key));
Assert.Contains("z_file 3.txt", archive.Entries.Select(entry => entry.Key));
}
[Fact]
public void Tar_VeryLongFilepathReadback()
{
var archive = "Tar_VeryLongFilepathReadback.tar";
// create a very long filename
var longFilename = "";
for (var i = 0; i < 600; i = longFilename.Length)
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.ContainsRar.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var archive = ArchiveFactory.Open(stream);
Assert.True(archive.Type == ArchiveType.Tar);
longFilename += i.ToString("D10") + "-";
}
[Fact]
public void Tar_Empty_Archive()
longFilename += ".txt";
// Step 1: create a tar file containing a file with a long name
using (Stream stream = File.OpenWrite(Path.Combine(SCRATCH2_FILES_PATH, archive)))
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, CompressionType.None))
using (Stream inputStream = new MemoryStream())
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.Empty.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var archive = ArchiveFactory.Open(stream);
Assert.True(archive.Type == ArchiveType.Tar);
var sw = new StreamWriter(inputStream);
sw.Write("dummy filecontent");
sw.Flush();
inputStream.Position = 0;
writer.Write(longFilename, inputStream, null);
}
[Theory]
[InlineData(10)]
[InlineData(128)]
public void Tar_Japanese_Name(int length)
// Step 2: check if the written tar file can be read correctly
var unmodified = Path.Combine(SCRATCH2_FILES_PATH, archive);
using (var archive2 = TarArchive.Open(unmodified))
{
using var mstm = new MemoryStream();
var enc = new ArchiveEncoding { Default = Encoding.UTF8 };
Assert.Equal(1, archive2.Entries.Count);
Assert.Contains(longFilename, archive2.Entries.Select(entry => entry.Key));
foreach (var entry in archive2.Entries)
{
Assert.Equal(
"dummy filecontent",
new StreamReader(entry.OpenEntryStream()).ReadLine()
);
}
}
}
[Fact]
public void Tar_UstarArchivePathReadLongName()
{
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "ustar with long names.tar");
using var archive = TarArchive.Open(unmodified);
Assert.Equal(6, archive.Entries.Count);
Assert.Contains("Directory/", archive.Entries.Select(entry => entry.Key));
Assert.Contains(
"Directory/Some file with veeeeeeeeeery loooooooooong name",
archive.Entries.Select(entry => entry.Key)
);
Assert.Contains(
"Directory/Directory with veeeeeeeeeery loooooooooong name/",
archive.Entries.Select(entry => entry.Key)
);
Assert.Contains(
"Directory/Directory with veeeeeeeeeery loooooooooong name/Some file with veeeeeeeeeery loooooooooong name",
archive.Entries.Select(entry => entry.Key)
);
Assert.Contains(
"Directory/Directory with veeeeeeeeeery loooooooooong name/Directory with veeeeeeeeeery loooooooooong name/",
archive.Entries.Select(entry => entry.Key)
);
Assert.Contains(
"Directory/Directory with veeeeeeeeeery loooooooooong name/Directory with veeeeeeeeeery loooooooooong name/Some file with veeeeeeeeeery loooooooooong name",
archive.Entries.Select(entry => entry.Key)
);
}
[Fact]
public void Tar_Create_New()
{
var scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.tar");
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar");
// var aropt = new Ar
using (var archive = TarArchive.Create())
{
archive.AddAllFromDirectory(ORIGINAL_FILES_PATH);
var twopt = new TarWriterOptions(CompressionType.None, true);
twopt.ArchiveEncoding = enc;
var fname = new string((char)0x3042, length);
using (var tw = new TarWriter(mstm, twopt))
using (var input = new MemoryStream(new byte[32]))
twopt.ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(866) };
archive.SaveTo(scratchPath, twopt);
}
CompareArchivesByPath(unmodified, scratchPath);
}
[Fact]
public void Tar_Random_Write_Add()
{
var jpg = Path.Combine(ORIGINAL_FILES_PATH, "jpg", "test.jpg");
var scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.mod.tar");
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.mod.tar");
var modified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar");
using (var archive = TarArchive.Open(unmodified))
{
archive.AddEntry("jpg\\test.jpg", jpg);
archive.SaveTo(scratchPath, CompressionType.None);
}
CompareArchivesByPath(modified, scratchPath);
}
[Fact]
public void Tar_Random_Write_Remove()
{
var scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.mod.tar");
var modified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.mod.tar");
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar");
using (var archive = TarArchive.Open(unmodified))
{
var entry = archive.Entries.Single(x =>
x.Key.NotNull().EndsWith("jpg", StringComparison.OrdinalIgnoreCase)
);
archive.RemoveEntry(entry);
archive.SaveTo(scratchPath, CompressionType.None);
}
CompareArchivesByPath(modified, scratchPath);
}
[Fact]
public void Tar_Containing_Rar_Archive()
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.ContainsRar.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var archive = ArchiveFactory.Open(stream);
Assert.True(archive.Type == ArchiveType.Tar);
}
[Fact]
public void Tar_Empty_Archive()
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.Empty.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var archive = ArchiveFactory.Open(stream);
Assert.True(archive.Type == ArchiveType.Tar);
}
[Theory]
[InlineData(10)]
[InlineData(128)]
public void Tar_Japanese_Name(int length)
{
using var mstm = new MemoryStream();
var enc = new ArchiveEncoding { Default = Encoding.UTF8 };
var twopt = new TarWriterOptions(CompressionType.None, true);
twopt.ArchiveEncoding = enc;
var fname = new string((char)0x3042, length);
using (var tw = new TarWriter(mstm, twopt))
using (var input = new MemoryStream(new byte[32]))
{
tw.Write(fname, input, null);
}
using (var inputMemory = new MemoryStream(mstm.ToArray()))
{
var tropt = new ReaderOptions { ArchiveEncoding = enc };
using (var tr = TarReader.Open(inputMemory, tropt))
{
tw.Write(fname, input, null);
}
using (var inputMemory = new MemoryStream(mstm.ToArray()))
{
var tropt = new ReaderOptions { ArchiveEncoding = enc };
using (var tr = TarReader.Open(inputMemory, tropt))
while (tr.MoveToNextEntry())
{
while (tr.MoveToNextEntry())
{
Assert.Equal(fname, tr.Entry.Key);
}
Assert.Equal(fname, tr.Entry.Key);
}
}
}
[Fact]
public void Tar_Read_One_At_A_Time()
}
[Fact]
public void Tar_Read_One_At_A_Time()
{
var archiveEncoding = new ArchiveEncoding { Default = Encoding.UTF8 };
var tarWriterOptions = new TarWriterOptions(CompressionType.None, true)
{
var archiveEncoding = new ArchiveEncoding { Default = Encoding.UTF8 };
var tarWriterOptions = new TarWriterOptions(CompressionType.None, true)
{
ArchiveEncoding = archiveEncoding,
};
var testBytes = Encoding.UTF8.GetBytes("This is a test.");
using var memoryStream = new MemoryStream();
using (var tarWriter = new TarWriter(memoryStream, tarWriterOptions))
using (var testFileStream = new MemoryStream(testBytes))
{
tarWriter.Write("test1.txt", testFileStream);
testFileStream.Position = 0;
tarWriter.Write("test2.txt", testFileStream);
}
memoryStream.Position = 0;
var numberOfEntries = 0;
using (var archiveFactory = TarArchive.Open(memoryStream))
{
foreach (var entry in archiveFactory.Entries)
{
++numberOfEntries;
using var tarEntryStream = entry.OpenEntryStream();
using var testFileStream = new MemoryStream();
tarEntryStream.CopyTo(testFileStream);
Assert.Equal(testBytes.Length, testFileStream.Length);
}
}
Assert.Equal(2, numberOfEntries);
ArchiveEncoding = archiveEncoding,
};
var testBytes = Encoding.UTF8.GetBytes("This is a test.");
using var memoryStream = new MemoryStream();
using (var tarWriter = new TarWriter(memoryStream, tarWriterOptions))
using (var testFileStream = new MemoryStream(testBytes))
{
tarWriter.Write("test1.txt", testFileStream);
testFileStream.Position = 0;
tarWriter.Write("test2.txt", testFileStream);
}
[Fact]
public void Tar_Detect_Test()
memoryStream.Position = 0;
var numberOfEntries = 0;
using (var archiveFactory = TarArchive.Open(memoryStream))
{
var isTar = TarArchive.IsTarFile(Path.Combine(TEST_ARCHIVES_PATH, "false.positive.tar"));
Assert.False(isTar);
}*/
foreach (var entry in archiveFactory.Entries)
{
++numberOfEntries;
using var tarEntryStream = entry.OpenEntryStream();
using var testFileStream = new MemoryStream();
tarEntryStream.CopyTo(testFileStream);
Assert.Equal(testBytes.Length, testFileStream.Length);
}
}
Assert.Equal(2, numberOfEntries);
}
[Fact]
public void Tar_Detect_Test()
{
var isTar = TarArchive.IsTarFile(Path.Combine(TEST_ARCHIVES_PATH, "false.positive.tar"));
Assert.False(isTar);
}
}

View File

@@ -12,255 +12,255 @@ namespace SharpCompress.Test.Tar;
public class TarReaderTests : ReaderTests
{
public TarReaderTests() => UseExtensionInsteadOfNameToVerify = true;
/*
[Fact]
public void Tar_Reader() => Read("Tar.tar", CompressionType.None);
[Fact]
public void Tar_Skip()
[Fact]
public void Tar_Reader() => Read("Tar.tar", CompressionType.None);
[Fact]
public void Tar_Skip()
{
using Stream stream = new ForwardOnlyStream(
File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar"))
);
using var reader = ReaderFactory.Open(stream);
var x = 0;
while (reader.MoveToNextEntry())
{
using Stream stream = new ForwardOnlyStream(
File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar"))
);
using var reader = ReaderFactory.Open(stream);
var x = 0;
while (reader.MoveToNextEntry())
if (!reader.Entry.IsDirectory)
{
if (!reader.Entry.IsDirectory)
x++;
if (x % 2 == 0)
{
x++;
if (x % 2 == 0)
{
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
}
[Fact]
public void Tar_Z_Reader() => Read("Tar.tar.Z", CompressionType.Lzw);
[Fact]
public void Tar_BZip2_Reader() => Read("Tar.tar.bz2", CompressionType.BZip2);
[Fact]
public void Tar_GZip_Reader() => Read("Tar.tar.gz", CompressionType.GZip);
[Fact]
public void Tar_ZStandard_Reader() => Read("Tar.tar.zst", CompressionType.ZStandard);
[Fact]
public void Tar_LZip_Reader() => Read("Tar.tar.lz", CompressionType.LZip);
[Fact]
public void Tar_Xz_Reader() => Read("Tar.tar.xz", CompressionType.Xz);
[Fact]
public void Tar_GZip_OldGnu_Reader() => Read("Tar.oldgnu.tar.gz", CompressionType.GZip);
[Fact]
public void Tar_BZip2_Entry_Stream()
}
[Fact]
public void Tar_Z_Reader() => Read("Tar.tar.Z", CompressionType.Lzw);
[Fact]
public void Tar_BZip2_Reader() => Read("Tar.tar.bz2", CompressionType.BZip2);
[Fact]
public void Tar_GZip_Reader() => Read("Tar.tar.gz", CompressionType.GZip);
[Fact]
public void Tar_ZStandard_Reader() => Read("Tar.tar.zst", CompressionType.ZStandard);
[Fact]
public void Tar_LZip_Reader() => Read("Tar.tar.lz", CompressionType.LZip);
[Fact]
public void Tar_Xz_Reader() => Read("Tar.tar.xz", CompressionType.Xz);
[Fact]
public void Tar_GZip_OldGnu_Reader() => Read("Tar.oldgnu.tar.gz", CompressionType.GZip);
[Fact]
public void Tar_BZip2_Entry_Stream()
{
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2")))
using (var reader = TarReader.Open(stream))
{
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2")))
using (var reader = TarReader.Open(stream))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(CompressionType.BZip2, reader.Entry.CompressionType);
using var entryStream = reader.OpenEntryStream();
var file = Path.GetFileName(reader.Entry.Key);
var folder =
Path.GetDirectoryName(reader.Entry.Key)
?? throw new ArgumentNullException();
var destdir = Path.Combine(SCRATCH_FILES_PATH, folder);
if (!Directory.Exists(destdir))
{
Directory.CreateDirectory(destdir);
}
var destinationFileName = Path.Combine(destdir, file.NotNull());
using var fs = File.OpenWrite(destinationFileName);
entryStream.CopyTo(fs);
}
}
}
VerifyFiles();
}
[Fact]
public void Tar_LongNamesWithLongNameExtension()
{
var filePaths = new List<string>();
using (
Stream stream = File.OpenRead(
Path.Combine(TEST_ARCHIVES_PATH, "Tar.LongPathsWithLongNameExtension.tar")
)
)
using (var reader = TarReader.Open(stream))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
filePaths.Add(reader.Entry.Key.NotNull("Entry Key is null"));
}
}
}
Assert.Equal(3, filePaths.Count);
Assert.Contains("a.txt", filePaths);
Assert.Contains(
"wp-content/plugins/gravityformsextend/lib/Aws/Symfony/Component/ClassLoader/Tests/Fixtures/Apc/beta/Apc/ApcPrefixCollision/A/B/Bar.php",
filePaths
);
Assert.Contains(
"wp-content/plugins/gravityformsextend/lib/Aws/Symfony/Component/ClassLoader/Tests/Fixtures/Apc/beta/Apc/ApcPrefixCollision/A/B/Foo.php",
filePaths
);
}
[Fact]
public void Tar_BZip2_Skip_Entry_Stream()
{
using Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2"));
using var reader = TarReader.Open(stream);
var names = new List<string>();
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(CompressionType.BZip2, reader.Entry.CompressionType);
using var entryStream = reader.OpenEntryStream();
entryStream.SkipEntry();
names.Add(reader.Entry.Key.NotNull());
var file = Path.GetFileName(reader.Entry.Key);
var folder =
Path.GetDirectoryName(reader.Entry.Key)
?? throw new ArgumentNullException();
var destdir = Path.Combine(SCRATCH_FILES_PATH, folder);
if (!Directory.Exists(destdir))
{
Directory.CreateDirectory(destdir);
}
var destinationFileName = Path.Combine(destdir, file.NotNull());
using var fs = File.OpenWrite(destinationFileName);
entryStream.CopyTo(fs);
}
}
Assert.Equal(3, names.Count);
}
[Fact]
public void Tar_Containing_Rar_Reader()
VerifyFiles();
}
[Fact]
public void Tar_LongNamesWithLongNameExtension()
{
var filePaths = new List<string>();
using (
Stream stream = File.OpenRead(
Path.Combine(TEST_ARCHIVES_PATH, "Tar.LongPathsWithLongNameExtension.tar")
)
)
using (var reader = TarReader.Open(stream))
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.ContainsRar.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var reader = ReaderFactory.Open(stream);
Assert.True(reader.ArchiveType == ArchiveType.Tar);
}
[Fact]
public void Tar_With_TarGz_With_Flushed_EntryStream()
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.ContainsTarGz.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var reader = ReaderFactory.Open(stream);
Assert.True(reader.MoveToNextEntry());
Assert.Equal("inner.tar.gz", reader.Entry.Key);
using var entryStream = reader.OpenEntryStream();
using var flushingStream = new FlushOnDisposeStream(entryStream);
// Extract inner.tar.gz
using var innerReader = ReaderFactory.Open(flushingStream);
Assert.True(innerReader.MoveToNextEntry());
Assert.Equal("test", innerReader.Entry.Key);
}
[Fact]
public void Tar_Broken_Stream()
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var reader = ReaderFactory.Open(stream);
var memoryStream = new MemoryStream();
Assert.True(reader.MoveToNextEntry());
Assert.True(reader.MoveToNextEntry());
reader.WriteEntryTo(memoryStream);
stream.Close();
Assert.Throws<IncompleteArchiveException>(() => reader.MoveToNextEntry());
}
[Fact]
public void Tar_Corrupted()
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "TarCorrupted.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var reader = ReaderFactory.Open(stream);
var memoryStream = new MemoryStream();
Assert.True(reader.MoveToNextEntry());
Assert.True(reader.MoveToNextEntry());
reader.WriteEntryTo(memoryStream);
stream.Close();
Assert.Throws<IncompleteArchiveException>(() => reader.MoveToNextEntry());
}
#if !NETFRAMEWORK
[Fact]
public void Tar_GZip_With_Symlink_Entries()
{
var isWindows = System.Runtime.InteropServices.RuntimeInformation.IsOSPlatform(
System.Runtime.InteropServices.OSPlatform.Windows
);
using Stream stream = File.OpenRead(
Path.Combine(TEST_ARCHIVES_PATH, "TarWithSymlink.tar.gz")
);
using var reader = TarReader.Open(stream);
while (reader.MoveToNextEntry())
{
if (reader.Entry.IsDirectory)
if (!reader.Entry.IsDirectory)
{
continue;
filePaths.Add(reader.Entry.Key.NotNull("Entry Key is null"));
}
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions
{
ExtractFullPath = true,
Overwrite = true,
WriteSymbolicLink = (sourcePath, targetPath) =>
{
if (!isWindows)
{
var link = new Mono.Unix.UnixSymbolicLinkInfo(sourcePath);
if (File.Exists(sourcePath))
{
link.Delete(); // equivalent to ln -s -f
}
link.CreateSymbolicLinkTo(targetPath);
}
},
}
);
if (!isWindows)
}
}
Assert.Equal(3, filePaths.Count);
Assert.Contains("a.txt", filePaths);
Assert.Contains(
"wp-content/plugins/gravityformsextend/lib/Aws/Symfony/Component/ClassLoader/Tests/Fixtures/Apc/beta/Apc/ApcPrefixCollision/A/B/Bar.php",
filePaths
);
Assert.Contains(
"wp-content/plugins/gravityformsextend/lib/Aws/Symfony/Component/ClassLoader/Tests/Fixtures/Apc/beta/Apc/ApcPrefixCollision/A/B/Foo.php",
filePaths
);
}
[Fact]
public void Tar_BZip2_Skip_Entry_Stream()
{
using Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2"));
using var reader = TarReader.Open(stream);
var names = new List<string>();
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(CompressionType.BZip2, reader.Entry.CompressionType);
using var entryStream = reader.OpenEntryStream();
entryStream.SkipEntry();
names.Add(reader.Entry.Key.NotNull());
}
}
Assert.Equal(3, names.Count);
}
[Fact]
public void Tar_Containing_Rar_Reader()
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.ContainsRar.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var reader = ReaderFactory.Open(stream);
Assert.True(reader.ArchiveType == ArchiveType.Tar);
}
[Fact]
public void Tar_With_TarGz_With_Flushed_EntryStream()
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.ContainsTarGz.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var reader = ReaderFactory.Open(stream);
Assert.True(reader.MoveToNextEntry());
Assert.Equal("inner.tar.gz", reader.Entry.Key);
using var entryStream = reader.OpenEntryStream();
using var flushingStream = new FlushOnDisposeStream(entryStream);
// Extract inner.tar.gz
using var innerReader = ReaderFactory.Open(flushingStream);
Assert.True(innerReader.MoveToNextEntry());
Assert.Equal("test", innerReader.Entry.Key);
}
[Fact]
public void Tar_Broken_Stream()
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var reader = ReaderFactory.Open(stream);
var memoryStream = new MemoryStream();
Assert.True(reader.MoveToNextEntry());
Assert.True(reader.MoveToNextEntry());
reader.WriteEntryTo(memoryStream);
stream.Close();
Assert.Throws<IncompleteArchiveException>(() => reader.MoveToNextEntry());
}
[Fact]
public void Tar_Corrupted()
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "TarCorrupted.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var reader = ReaderFactory.Open(stream);
var memoryStream = new MemoryStream();
Assert.True(reader.MoveToNextEntry());
Assert.True(reader.MoveToNextEntry());
reader.WriteEntryTo(memoryStream);
stream.Close();
Assert.Throws<IncompleteArchiveException>(() => reader.MoveToNextEntry());
}
#if !NETFRAMEWORK
[Fact]
public void Tar_GZip_With_Symlink_Entries()
{
var isWindows = System.Runtime.InteropServices.RuntimeInformation.IsOSPlatform(
System.Runtime.InteropServices.OSPlatform.Windows
);
using Stream stream = File.OpenRead(
Path.Combine(TEST_ARCHIVES_PATH, "TarWithSymlink.tar.gz")
);
using var reader = TarReader.Open(stream);
while (reader.MoveToNextEntry())
{
if (reader.Entry.IsDirectory)
{
continue;
}
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions
{
if (reader.Entry.LinkTarget != null)
ExtractFullPath = true,
Overwrite = true,
WriteSymbolicLink = (sourcePath, targetPath) =>
{
var path = Path.Combine(SCRATCH_FILES_PATH, reader.Entry.Key.NotNull());
var link = new Mono.Unix.UnixSymbolicLinkInfo(path);
if (link.HasContents)
if (!isWindows)
{
// need to convert the link to an absolute path for comparison
var target = reader.Entry.LinkTarget;
var realTarget = Path.GetFullPath(
Path.Combine($"{Path.GetDirectoryName(path)}", target)
);
Assert.Equal(realTarget, link.GetContents().ToString());
}
else
{
Assert.True(false, "Symlink has no target");
var link = new Mono.Unix.UnixSymbolicLinkInfo(sourcePath);
if (File.Exists(sourcePath))
{
link.Delete(); // equivalent to ln -s -f
}
link.CreateSymbolicLinkTo(targetPath);
}
},
}
);
if (!isWindows)
{
if (reader.Entry.LinkTarget != null)
{
var path = Path.Combine(SCRATCH_FILES_PATH, reader.Entry.Key.NotNull());
var link = new Mono.Unix.UnixSymbolicLinkInfo(path);
if (link.HasContents)
{
// need to convert the link to an absolute path for comparison
var target = reader.Entry.LinkTarget;
var realTarget = Path.GetFullPath(
Path.Combine($"{Path.GetDirectoryName(path)}", target)
);
Assert.Equal(realTarget, link.GetContents().ToString());
}
else
{
Assert.True(false, "Symlink has no target");
}
}
}
}
#endif*/
}
#endif
}

View File

@@ -1,6 +1,5 @@
using System.IO;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Writers.Tar;
using Xunit;
@@ -21,8 +20,8 @@ public class TarWriterTests : WriterTests
: base(ArchiveType.Tar) => UseExtensionInsteadOfNameToVerify = true;
[Fact]
public Task Tar_Writer() =>
WriteAsync(
public void Tar_Writer() =>
Write(
CompressionType.None,
"Tar.noEmptyDirs.tar",
"Tar.noEmptyDirs.tar",
@@ -30,8 +29,8 @@ public class TarWriterTests : WriterTests
);
[Fact]
public Task Tar_BZip2_Writer() =>
WriteAsync(
public void Tar_BZip2_Writer() =>
Write(
CompressionType.BZip2,
"Tar.noEmptyDirs.tar.bz2",
"Tar.noEmptyDirs.tar.bz2",
@@ -39,8 +38,8 @@ public class TarWriterTests : WriterTests
);
[Fact]
public Task Tar_LZip_Writer() =>
WriteAsync(
public void Tar_LZip_Writer() =>
Write(
CompressionType.LZip,
"Tar.noEmptyDirs.tar.lz",
"Tar.noEmptyDirs.tar.lz",
@@ -48,13 +47,9 @@ public class TarWriterTests : WriterTests
);
[Fact]
public Task Tar_Rar_Write() =>
Assert.ThrowsAsync<InvalidFormatException>(async () =>
await WriteAsync(
CompressionType.Rar,
"Zip.ppmd.noEmptyDirs.zip",
"Zip.ppmd.noEmptyDirs.zip"
)
public void Tar_Rar_Write() =>
Assert.Throws<InvalidFormatException>(() =>
Write(CompressionType.Rar, "Zip.ppmd.noEmptyDirs.zip", "Zip.ppmd.noEmptyDirs.zip")
);
[Theory]

View File

@@ -3,7 +3,6 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Readers;
using Xunit;
@@ -203,11 +202,7 @@ public class TestBase : IDisposable
Assert.Equal(fi1.Attributes, fi2.Attributes);
}
protected async Task CompareArchivesByPathAsync(
string file1,
string file2,
Encoding? encoding = null
)
protected void CompareArchivesByPath(string file1, string file2, Encoding? encoding = null)
{
var readerOptions = new ReaderOptions { LeaveStreamOpen = false };
readerOptions.ArchiveEncoding.Default = encoding ?? Encoding.Default;
@@ -218,13 +213,13 @@ public class TestBase : IDisposable
using (var archive1 = ReaderFactory.Open(File.OpenRead(file1), readerOptions))
using (var archive2 = ReaderFactory.Open(File.OpenRead(file2), readerOptions))
{
while (await archive1.MoveToNextEntryAsync())
while (archive1.MoveToNextEntry())
{
Assert.True(await archive2.MoveToNextEntryAsync());
Assert.True(archive2.MoveToNextEntry());
archive1Entries.Add(archive1.Entry.Key.NotNull());
archive2Entries.Add(archive2.Entry.Key.NotNull());
}
Assert.False(await archive2.MoveToNextEntryAsync());
Assert.False(archive2.MoveToNextEntry());
}
archive1Entries.Sort();
archive2Entries.Sort();

View File

@@ -1,6 +1,5 @@
using System.IO;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -14,7 +13,7 @@ public class WriterTests : TestBase
protected WriterTests(ArchiveType type) => _type = type;
protected async Task WriteAsync(
protected void Write(
CompressionType compressionType,
string archive,
string archiveToVerifyAgainst,
@@ -30,8 +29,7 @@ public class WriterTests : TestBase
using var writer = WriterFactory.Open(stream, _type, writerOptions);
writer.WriteAll(ORIGINAL_FILES_PATH, "*", SearchOption.AllDirectories);
}
await CompareArchivesByPathAsync(
CompareArchivesByPath(
Path.Combine(SCRATCH2_FILES_PATH, archive),
Path.Combine(TEST_ARCHIVES_PATH, archiveToVerifyAgainst)
);
@@ -46,7 +44,7 @@ public class WriterTests : TestBase
SharpCompressStream.Create(stream, leaveOpen: true),
readerOptions
);
await reader.WriteAllToDirectoryAsync(
reader.WriteAllToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true }
);

View File

@@ -1,7 +1,6 @@
using System;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Common;
using SharpCompress.Common.Zip;
@@ -24,67 +23,67 @@ public class Zip64Tests : WriterTests
private const long FOUR_GB_LIMIT = ((long)uint.MaxValue) + 1;
[Trait("format", "zip64")]
public Task Zip64_Single_Large_File() =>
public void Zip64_Single_Large_File() =>
// One single file, requires zip64
RunSingleTestAsync(1, FOUR_GB_LIMIT, setZip64: true, forwardOnly: false);
RunSingleTest(1, FOUR_GB_LIMIT, setZip64: true, forwardOnly: false);
[Trait("format", "zip64")]
public Task Zip64_Two_Large_Files() =>
public void Zip64_Two_Large_Files() =>
// One single file, requires zip64
RunSingleTestAsync(2, FOUR_GB_LIMIT, setZip64: true, forwardOnly: false);
RunSingleTest(2, FOUR_GB_LIMIT, setZip64: true, forwardOnly: false);
[Trait("format", "zip64")]
public Task Zip64_Two_Small_files() =>
public void Zip64_Two_Small_files() =>
// Multiple files, does not require zip64
RunSingleTestAsync(2, FOUR_GB_LIMIT / 2, setZip64: false, forwardOnly: false);
RunSingleTest(2, FOUR_GB_LIMIT / 2, setZip64: false, forwardOnly: false);
[Trait("format", "zip64")]
public Task Zip64_Two_Small_files_stream() =>
public void Zip64_Two_Small_files_stream() =>
// Multiple files, does not require zip64, and works with streams
RunSingleTestAsync(2, FOUR_GB_LIMIT / 2, setZip64: false, forwardOnly: true);
RunSingleTest(2, FOUR_GB_LIMIT / 2, setZip64: false, forwardOnly: true);
[Trait("format", "zip64")]
public Task Zip64_Two_Small_Files_Zip64() =>
public void Zip64_Two_Small_Files_Zip64() =>
// Multiple files, use zip64 even though it is not required
RunSingleTestAsync(2, FOUR_GB_LIMIT / 2, setZip64: true, forwardOnly: false);
RunSingleTest(2, FOUR_GB_LIMIT / 2, setZip64: true, forwardOnly: false);
[Trait("format", "zip64")]
public async Task Zip64_Single_Large_File_Fail()
public void Zip64_Single_Large_File_Fail()
{
try
{
// One single file, should fail
await RunSingleTestAsync(1, FOUR_GB_LIMIT, setZip64: false, forwardOnly: false);
RunSingleTest(1, FOUR_GB_LIMIT, setZip64: false, forwardOnly: false);
throw new InvalidOperationException("Test did not fail?");
}
catch (NotSupportedException) { }
}
[Trait("zip64", "true")]
public async Task Zip64_Single_Large_File_Zip64_Streaming_Fail()
public void Zip64_Single_Large_File_Zip64_Streaming_Fail()
{
try
{
// One single file, should fail (fast) with zip64
await RunSingleTestAsync(1, FOUR_GB_LIMIT, setZip64: true, forwardOnly: true);
RunSingleTest(1, FOUR_GB_LIMIT, setZip64: true, forwardOnly: true);
throw new InvalidOperationException("Test did not fail?");
}
catch (NotSupportedException) { }
}
[Trait("zip64", "true")]
public async Task Zip64_Single_Large_File_Streaming_Fail()
public void Zip64_Single_Large_File_Streaming_Fail()
{
try
{
// One single file, should fail once the write discovers the problem
await RunSingleTestAsync(1, FOUR_GB_LIMIT, setZip64: false, forwardOnly: true);
RunSingleTest(1, FOUR_GB_LIMIT, setZip64: false, forwardOnly: true);
throw new InvalidOperationException("Test did not fail?");
}
catch (NotSupportedException) { }
}
public async Task RunSingleTestAsync(
public void RunSingleTest(
long files,
long filesize,
bool setZip64,
@@ -105,7 +104,7 @@ public class Zip64Tests : WriterTests
CreateZipArchive(filename, files, filesize, writeChunkSize, setZip64, forwardOnly);
}
var resForward = await ReadForwardOnlyAsync(filename);
var resForward = ReadForwardOnly(filename);
if (resForward.Item1 != files)
{
throw new InvalidOperationException(
@@ -169,7 +168,7 @@ public class Zip64Tests : WriterTests
}
}
public async Task<Tuple<long, long>> ReadForwardOnlyAsync(string filename)
public Tuple<long, long> ReadForwardOnly(string filename)
{
long count = 0;
long size = 0;
@@ -177,9 +176,9 @@ public class Zip64Tests : WriterTests
using (var fs = File.OpenRead(filename))
using (var rd = ZipReader.Open(fs, new ReaderOptions { LookForHeader = false }))
{
while (await rd.MoveToNextEntryAsync())
while (rd.MoveToNextEntry())
{
using (await rd.OpenEntryStreamAsync()) { }
using (rd.OpenEntryStream()) { }
count++;
if (prev != null)

File diff suppressed because it is too large Load Diff

View File

@@ -17,219 +17,219 @@ namespace SharpCompress.Test.Zip;
public class ZipTypesLevelsWithCrcRatioTests : ArchiveTests
{
public ZipTypesLevelsWithCrcRatioTests() => UseExtensionInsteadOfNameToVerify = true;
/*
[Theory]
[InlineData(CompressionType.Deflate, 1, 1, 0.11f)] // was 0.8f, actual 0.104
[InlineData(CompressionType.Deflate, 3, 1, 0.08f)] // was 0.8f, actual 0.078
[InlineData(CompressionType.Deflate, 6, 1, 0.05f)] // was 0.8f, actual ~0.042
[InlineData(CompressionType.Deflate, 9, 1, 0.04f)] // was 0.7f, actual 0.038
[InlineData(CompressionType.ZStandard, 1, 1, 0.025f)] // was 0.8f, actual 0.023
[InlineData(CompressionType.ZStandard, 3, 1, 0.015f)] // was 0.7f, actual 0.013
[InlineData(CompressionType.ZStandard, 9, 1, 0.006f)] // was 0.7f, actual 0.005
[InlineData(CompressionType.ZStandard, 22, 1, 0.005f)] // was 0.7f, actual 0.004
[InlineData(CompressionType.BZip2, 0, 1, 0.035f)] // was 0.8f, actual 0.033
[InlineData(CompressionType.LZMA, 0, 1, 0.005f)] // was 0.8f, actual 0.004
[InlineData(CompressionType.None, 0, 1, 1.001f)] // was 1.1f, actual 1.000
[InlineData(CompressionType.Deflate, 6, 2, 0.045f)] // was 0.8f, actual 0.042
[InlineData(CompressionType.ZStandard, 3, 2, 0.012f)] // was 0.7f, actual 0.010
[InlineData(CompressionType.BZip2, 0, 2, 0.035f)] // was 0.8f, actual 0.032
[InlineData(CompressionType.Deflate, 9, 3, 0.04f)] // was 0.7f, actual 0.038
[InlineData(CompressionType.ZStandard, 9, 3, 0.003f)] // was 0.7f, actual 0.002
public void Zip_Create_Archive_With_3_Files_Crc32_Test(
CompressionType compressionType,
int compressionLevel,
int sizeMb,
float expectedRatio
)
[Theory]
[InlineData(CompressionType.Deflate, 1, 1, 0.11f)] // was 0.8f, actual 0.104
[InlineData(CompressionType.Deflate, 3, 1, 0.08f)] // was 0.8f, actual 0.078
[InlineData(CompressionType.Deflate, 6, 1, 0.05f)] // was 0.8f, actual ~0.042
[InlineData(CompressionType.Deflate, 9, 1, 0.04f)] // was 0.7f, actual 0.038
[InlineData(CompressionType.ZStandard, 1, 1, 0.025f)] // was 0.8f, actual 0.023
[InlineData(CompressionType.ZStandard, 3, 1, 0.015f)] // was 0.7f, actual 0.013
[InlineData(CompressionType.ZStandard, 9, 1, 0.006f)] // was 0.7f, actual 0.005
[InlineData(CompressionType.ZStandard, 22, 1, 0.005f)] // was 0.7f, actual 0.004
[InlineData(CompressionType.BZip2, 0, 1, 0.035f)] // was 0.8f, actual 0.033
[InlineData(CompressionType.LZMA, 0, 1, 0.005f)] // was 0.8f, actual 0.004
[InlineData(CompressionType.None, 0, 1, 1.001f)] // was 1.1f, actual 1.000
[InlineData(CompressionType.Deflate, 6, 2, 0.045f)] // was 0.8f, actual 0.042
[InlineData(CompressionType.ZStandard, 3, 2, 0.012f)] // was 0.7f, actual 0.010
[InlineData(CompressionType.BZip2, 0, 2, 0.035f)] // was 0.8f, actual 0.032
[InlineData(CompressionType.Deflate, 9, 3, 0.04f)] // was 0.7f, actual 0.038
[InlineData(CompressionType.ZStandard, 9, 3, 0.003f)] // was 0.7f, actual 0.002
public void Zip_Create_Archive_With_3_Files_Crc32_Test(
CompressionType compressionType,
int compressionLevel,
int sizeMb,
float expectedRatio
)
{
const int OneMiB = 1024 * 1024;
var baseSize = sizeMb * OneMiB;
// Generate test content for files with sizes based on the sizeMb parameter
var file1Data = TestPseudoTextStream.Create(baseSize);
var file2Data = TestPseudoTextStream.Create(baseSize * 2);
var file3Data = TestPseudoTextStream.Create(baseSize * 3);
var expectedFiles = new Dictionary<string, (byte[] data, uint crc)>
{
const int OneMiB = 1024 * 1024;
var baseSize = sizeMb * OneMiB;
// Generate test content for files with sizes based on the sizeMb parameter
var file1Data = TestPseudoTextStream.Create(baseSize);
var file2Data = TestPseudoTextStream.Create(baseSize * 2);
var file3Data = TestPseudoTextStream.Create(baseSize * 3);
var expectedFiles = new Dictionary<string, (byte[] data, uint crc)>
{
[$"file1_{sizeMb}MiB.txt"] = (file1Data, CalculateCrc32(file1Data)),
[$"data/file2_{sizeMb * 2}MiB.txt"] = (file2Data, CalculateCrc32(file2Data)),
[$"deep/nested/file3_{sizeMb * 3}MiB.txt"] = (file3Data, CalculateCrc32(file3Data)),
};
// Create zip archive in memory
using var zipStream = new MemoryStream();
using (var writer = CreateWriterWithLevel(zipStream, compressionType, compressionLevel))
{
writer.Write($"file1_{sizeMb}MiB.txt", new MemoryStream(file1Data));
writer.Write($"data/file2_{sizeMb * 2}MiB.txt", new MemoryStream(file2Data));
writer.Write($"deep/nested/file3_{sizeMb * 3}MiB.txt", new MemoryStream(file3Data));
}
// Calculate and output actual compression ratio
var originalSize = file1Data.Length + file2Data.Length + file3Data.Length;
var actualRatio = (double)zipStream.Length / originalSize;
//Debug.WriteLine($"Zip_Create_Archive_With_3_Files_Crc32_Test: {compressionType} Level={compressionLevel} Size={sizeMb}MB Expected={expectedRatio:F3} Actual={actualRatio:F3}");
// Verify compression occurred (except for None compression type)
if (compressionType != CompressionType.None)
{
Assert.True(
zipStream.Length < originalSize,
$"Compression failed: compressed={zipStream.Length}, original={originalSize}"
);
}
// Verify compression ratio
VerifyCompressionRatio(
originalSize,
zipStream.Length,
expectedRatio,
$"{compressionType} level {compressionLevel}"
);
// Verify archive content and CRC32
VerifyArchiveContent(zipStream, expectedFiles);
// Verify compression type is correctly set
VerifyCompressionType(zipStream, compressionType);
[$"file1_{sizeMb}MiB.txt"] = (file1Data, CalculateCrc32(file1Data)),
[$"data/file2_{sizeMb * 2}MiB.txt"] = (file2Data, CalculateCrc32(file2Data)),
[$"deep/nested/file3_{sizeMb * 3}MiB.txt"] = (file3Data, CalculateCrc32(file3Data)),
};
// Create zip archive in memory
using var zipStream = new MemoryStream();
using (var writer = CreateWriterWithLevel(zipStream, compressionType, compressionLevel))
{
writer.Write($"file1_{sizeMb}MiB.txt", new MemoryStream(file1Data));
writer.Write($"data/file2_{sizeMb * 2}MiB.txt", new MemoryStream(file2Data));
writer.Write($"deep/nested/file3_{sizeMb * 3}MiB.txt", new MemoryStream(file3Data));
}
[Theory]
[InlineData(CompressionType.Deflate, 1, 4, 0.11f)] // was 0.8, actual 0.105
[InlineData(CompressionType.Deflate, 3, 4, 0.08f)] // was 0.8, actual 0.077
[InlineData(CompressionType.Deflate, 6, 4, 0.045f)] // was 0.8, actual 0.042
[InlineData(CompressionType.Deflate, 9, 4, 0.04f)] // was 0.8, actual 0.037
[InlineData(CompressionType.ZStandard, 1, 4, 0.025f)] // was 0.8, actual 0.022
[InlineData(CompressionType.ZStandard, 3, 4, 0.012f)] // was 0.8, actual 0.010
[InlineData(CompressionType.ZStandard, 9, 4, 0.003f)] // was 0.8, actual 0.002
[InlineData(CompressionType.ZStandard, 22, 4, 0.003f)] // was 0.8, actual 0.002
[InlineData(CompressionType.BZip2, 0, 4, 0.035f)] // was 0.8, actual 0.032
[InlineData(CompressionType.LZMA, 0, 4, 0.003f)] // was 0.8, actual 0.002
public void Zip_WriterFactory_Crc32_Test(
CompressionType compressionType,
int compressionLevel,
int sizeMb,
float expectedRatio
)
// Calculate and output actual compression ratio
var originalSize = file1Data.Length + file2Data.Length + file3Data.Length;
var actualRatio = (double)zipStream.Length / originalSize;
//Debug.WriteLine($"Zip_Create_Archive_With_3_Files_Crc32_Test: {compressionType} Level={compressionLevel} Size={sizeMb}MB Expected={expectedRatio:F3} Actual={actualRatio:F3}");
// Verify compression occurred (except for None compression type)
if (compressionType != CompressionType.None)
{
var fileSize = sizeMb * 1024 * 1024;
var testData = TestPseudoTextStream.Create(fileSize);
var expectedCrc = CalculateCrc32(testData);
// Create archive with specified compression level
using var zipStream = new MemoryStream();
var writerOptions = new ZipWriterOptions(compressionType)
{
CompressionLevel = compressionLevel,
};
using (var writer = WriterFactory.Open(zipStream, ArchiveType.Zip, writerOptions))
{
writer.Write(
$"{compressionType}_level_{compressionLevel}_{sizeMb}MiB.txt",
new MemoryStream(testData)
);
}
// Calculate and output actual compression ratio
var actualRatio = (double)zipStream.Length / testData.Length;
//Debug.WriteLine($"Zip_WriterFactory_Crc32_Test: {compressionType} Level={compressionLevel} Size={sizeMb}MB Expected={expectedRatio:F3} Actual={actualRatio:F3}");
VerifyCompressionRatio(
testData.Length,
zipStream.Length,
expectedRatio,
$"{compressionType} level {compressionLevel}"
Assert.True(
zipStream.Length < originalSize,
$"Compression failed: compressed={zipStream.Length}, original={originalSize}"
);
// Verify the archive
zipStream.Position = 0;
using var archive = ZipArchive.Open(zipStream);
var entry = archive.Entries.Single(e => !e.IsDirectory);
using var entryStream = entry.OpenEntryStream();
using var extractedStream = new MemoryStream();
entryStream.CopyTo(extractedStream);
var extractedData = extractedStream.ToArray();
var actualCrc = CalculateCrc32(extractedData);
Assert.Equal(compressionType, entry.CompressionType);
Assert.Equal(expectedCrc, actualCrc);
Assert.Equal(testData.Length, extractedData.Length);
}
// Verify compression ratio
VerifyCompressionRatio(
originalSize,
zipStream.Length,
expectedRatio,
$"{compressionType} level {compressionLevel}"
);
// Verify archive content and CRC32
VerifyArchiveContent(zipStream, expectedFiles);
// Verify compression type is correctly set
VerifyCompressionType(zipStream, compressionType);
}
[Theory]
[InlineData(CompressionType.Deflate, 1, 4, 0.11f)] // was 0.8, actual 0.105
[InlineData(CompressionType.Deflate, 3, 4, 0.08f)] // was 0.8, actual 0.077
[InlineData(CompressionType.Deflate, 6, 4, 0.045f)] // was 0.8, actual 0.042
[InlineData(CompressionType.Deflate, 9, 4, 0.04f)] // was 0.8, actual 0.037
[InlineData(CompressionType.ZStandard, 1, 4, 0.025f)] // was 0.8, actual 0.022
[InlineData(CompressionType.ZStandard, 3, 4, 0.012f)] // was 0.8, actual 0.010
[InlineData(CompressionType.ZStandard, 9, 4, 0.003f)] // was 0.8, actual 0.002
[InlineData(CompressionType.ZStandard, 22, 4, 0.003f)] // was 0.8, actual 0.002
[InlineData(CompressionType.BZip2, 0, 4, 0.035f)] // was 0.8, actual 0.032
[InlineData(CompressionType.LZMA, 0, 4, 0.003f)] // was 0.8, actual 0.002
public void Zip_WriterFactory_Crc32_Test(
CompressionType compressionType,
int compressionLevel,
int sizeMb,
float expectedRatio
)
{
var fileSize = sizeMb * 1024 * 1024;
var testData = TestPseudoTextStream.Create(fileSize);
var expectedCrc = CalculateCrc32(testData);
// Create archive with specified compression level
using var zipStream = new MemoryStream();
var writerOptions = new ZipWriterOptions(compressionType)
{
CompressionLevel = compressionLevel,
};
using (var writer = WriterFactory.Open(zipStream, ArchiveType.Zip, writerOptions))
{
writer.Write(
$"{compressionType}_level_{compressionLevel}_{sizeMb}MiB.txt",
new MemoryStream(testData)
);
}
// Calculate and output actual compression ratio
var actualRatio = (double)zipStream.Length / testData.Length;
//Debug.WriteLine($"Zip_WriterFactory_Crc32_Test: {compressionType} Level={compressionLevel} Size={sizeMb}MB Expected={expectedRatio:F3} Actual={actualRatio:F3}");
VerifyCompressionRatio(
testData.Length,
zipStream.Length,
expectedRatio,
$"{compressionType} level {compressionLevel}"
);
// Verify the archive
zipStream.Position = 0;
using var archive = ZipArchive.Open(zipStream);
var entry = archive.Entries.Single(e => !e.IsDirectory);
using var entryStream = entry.OpenEntryStream();
using var extractedStream = new MemoryStream();
entryStream.CopyTo(extractedStream);
var extractedData = extractedStream.ToArray();
var actualCrc = CalculateCrc32(extractedData);
Assert.Equal(compressionType, entry.CompressionType);
Assert.Equal(expectedCrc, actualCrc);
Assert.Equal(testData.Length, extractedData.Length);
Assert.Equal(testData, extractedData);
}
[Theory]
[InlineData(CompressionType.Deflate, 1, 2, 0.11f)] // was 0.8, actual 0.104
[InlineData(CompressionType.Deflate, 3, 2, 0.08f)] // was 0.8, actual 0.077
[InlineData(CompressionType.Deflate, 6, 2, 0.045f)] // was 0.8, actual 0.042
[InlineData(CompressionType.Deflate, 9, 2, 0.04f)] // was 0.7, actual 0.038
[InlineData(CompressionType.ZStandard, 1, 2, 0.025f)] // was 0.8, actual 0.023
[InlineData(CompressionType.ZStandard, 3, 2, 0.015f)] // was 0.7, actual 0.012
[InlineData(CompressionType.ZStandard, 9, 2, 0.006f)] // was 0.7, actual 0.005
[InlineData(CompressionType.ZStandard, 22, 2, 0.005f)] // was 0.7, actual 0.004
[InlineData(CompressionType.BZip2, 0, 2, 0.035f)] // was 0.8, actual 0.032
[InlineData(CompressionType.LZMA, 0, 2, 0.005f)] // was 0.8, actual 0.004
public void Zip_ZipArchiveOpen_Crc32_Test(
CompressionType compressionType,
int compressionLevel,
int sizeMb,
float expectedRatio
)
{
var fileSize = sizeMb * 1024 * 1024;
var testData = TestPseudoTextStream.Create(fileSize);
var expectedCrc = CalculateCrc32(testData);
// Create archive with specified compression and level
using var zipStream = new MemoryStream();
using (var writer = CreateWriterWithLevel(zipStream, compressionType, compressionLevel))
{
writer.Write(
$"{compressionType}_{compressionLevel}_{sizeMb}MiB.txt",
new MemoryStream(testData)
);
}
// Calculate and output actual compression ratio
var actualRatio = (double)zipStream.Length / testData.Length;
//Debug.WriteLine($"Zip_ZipArchiveOpen_Crc32_Test: {compressionType} Level={compressionLevel} Size={sizeMb}MB Expected={expectedRatio:F3} Actual={actualRatio:F3}");
// Verify the archive
zipStream.Position = 0;
using var archive = ZipArchive.Open(zipStream);
var entry = archive.Entries.Single(e => !e.IsDirectory);
using var entryStream = entry.OpenEntryStream();
using var extractedStream = new MemoryStream();
entryStream.CopyTo(extractedStream);
var extractedData = extractedStream.ToArray();
var actualCrc = CalculateCrc32(extractedData);
Assert.Equal(compressionType, entry.CompressionType);
Assert.Equal(expectedCrc, actualCrc);
Assert.Equal(testData.Length, extractedData.Length);
// For smaller files, verify full content; for larger, spot check
if (testData.Length <= sizeMb * 2)
{
Assert.Equal(testData, extractedData);
}
[Theory]
[InlineData(CompressionType.Deflate, 1, 2, 0.11f)] // was 0.8, actual 0.104
[InlineData(CompressionType.Deflate, 3, 2, 0.08f)] // was 0.8, actual 0.077
[InlineData(CompressionType.Deflate, 6, 2, 0.045f)] // was 0.8, actual 0.042
[InlineData(CompressionType.Deflate, 9, 2, 0.04f)] // was 0.7, actual 0.038
[InlineData(CompressionType.ZStandard, 1, 2, 0.025f)] // was 0.8, actual 0.023
[InlineData(CompressionType.ZStandard, 3, 2, 0.015f)] // was 0.7, actual 0.012
[InlineData(CompressionType.ZStandard, 9, 2, 0.006f)] // was 0.7, actual 0.005
[InlineData(CompressionType.ZStandard, 22, 2, 0.005f)] // was 0.7, actual 0.004
[InlineData(CompressionType.BZip2, 0, 2, 0.035f)] // was 0.8, actual 0.032
[InlineData(CompressionType.LZMA, 0, 2, 0.005f)] // was 0.8, actual 0.004
public void Zip_ZipArchiveOpen_Crc32_Test(
CompressionType compressionType,
int compressionLevel,
int sizeMb,
float expectedRatio
)
else
{
var fileSize = sizeMb * 1024 * 1024;
var testData = TestPseudoTextStream.Create(fileSize);
var expectedCrc = CalculateCrc32(testData);
// Create archive with specified compression and level
using var zipStream = new MemoryStream();
using (var writer = CreateWriterWithLevel(zipStream, compressionType, compressionLevel))
{
writer.Write(
$"{compressionType}_{compressionLevel}_{sizeMb}MiB.txt",
new MemoryStream(testData)
);
}
// Calculate and output actual compression ratio
var actualRatio = (double)zipStream.Length / testData.Length;
//Debug.WriteLine($"Zip_ZipArchiveOpen_Crc32_Test: {compressionType} Level={compressionLevel} Size={sizeMb}MB Expected={expectedRatio:F3} Actual={actualRatio:F3}");
// Verify the archive
zipStream.Position = 0;
using var archive = ZipArchive.Open(zipStream);
var entry = archive.Entries.Single(e => !e.IsDirectory);
using var entryStream = entry.OpenEntryStream();
using var extractedStream = new MemoryStream();
entryStream.CopyTo(extractedStream);
var extractedData = extractedStream.ToArray();
var actualCrc = CalculateCrc32(extractedData);
Assert.Equal(compressionType, entry.CompressionType);
Assert.Equal(expectedCrc, actualCrc);
Assert.Equal(testData.Length, extractedData.Length);
// For smaller files, verify full content; for larger, spot check
if (testData.Length <= sizeMb * 2)
{
Assert.Equal(testData, extractedData);
}
else
{
VerifyDataSpotCheck(testData, extractedData);
}
VerifyCompressionRatio(
testData.Length,
zipStream.Length,
expectedRatio,
$"{compressionType} Level {compressionLevel}"
);
}*/
VerifyDataSpotCheck(testData, extractedData);
}
VerifyCompressionRatio(
testData.Length,
zipStream.Length,
expectedRatio,
$"{compressionType} Level {compressionLevel}"
);
}
}

View File

@@ -1,6 +1,5 @@
using System;
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Common;
using SharpCompress.IO;
@@ -12,26 +11,25 @@ using Xunit;
namespace SharpCompress.Test.Zip;
/*
public class ZipReaderTests : ReaderTests
{
public ZipReaderTests() => UseExtensionInsteadOfNameToVerify = true;
[Fact]
public async Task Issue_269_Double_Skip()
public void Issue_269_Double_Skip()
{
var path = Path.Combine(TEST_ARCHIVES_PATH, "PrePostHeaders.zip");
using Stream stream = new ForwardOnlyStream(File.OpenRead(path));
using var reader = ReaderFactory.Open(stream);
var count = 0;
while (await reader.MoveToNextEntryAsync())
while (reader.MoveToNextEntry())
{
count++;
if (!reader.Entry.IsDirectory)
{
if (count % 2 != 0)
{
await reader.WriteEntryToAsync(Stream.Null);
reader.WriteEntryTo(Stream.Null);
}
}
}
@@ -57,21 +55,21 @@ public class ZipReaderTests : ReaderTests
public void Zip_Deflate_Streamed_Read() => Read("Zip.deflate.dd.zip", CompressionType.Deflate);
[Fact]
public async Task Zip_Deflate_Streamed_Skip()
public void Zip_Deflate_Streamed_Skip()
{
using Stream stream = new ForwardOnlyStream(
File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd.zip"))
);
using var reader = ReaderFactory.Open(stream);
var x = 0;
while (await reader.MoveToNextEntryAsync())
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
x++;
if (x % 2 == 0)
{
await reader.WriteEntryToDirectoryAsync(
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -106,19 +104,19 @@ public class ZipReaderTests : ReaderTests
Read("Zip.deflate.noEmptyDirs.zip", CompressionType.Deflate);
[Fact]
public async Task Zip_BZip2_PkwareEncryption_Read()
public void Zip_BZip2_PkwareEncryption_Read()
{
using (
Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.bzip2.pkware.zip"))
)
using (var reader = ZipReader.Open(stream, new ReaderOptions { Password = "test" }))
{
while (await reader.MoveToNextEntryAsync())
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(CompressionType.BZip2, reader.Entry.CompressionType);
await reader.WriteEntryToDirectoryAsync(
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -129,18 +127,18 @@ public class ZipReaderTests : ReaderTests
}
[Fact]
public async Task Zip_Reader_Disposal_Test()
public void Zip_Reader_Disposal_Test()
{
using var stream = new TestStream(
File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd.zip"))
);
using (var reader = ReaderFactory.Open(stream))
{
while (await reader.MoveToNextEntryAsync())
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
await reader.WriteEntryToDirectoryAsync(
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -151,17 +149,17 @@ public class ZipReaderTests : ReaderTests
}
[Fact]
public async Task Zip_Reader_Disposal_Test2()
public void Zip_Reader_Disposal_Test2()
{
using var stream = new TestStream(
File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd.zip"))
);
var reader = ReaderFactory.Open(stream);
while (await reader.MoveToNextEntryAsync())
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
await reader.WriteEntryToDirectoryAsync(
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -171,8 +169,8 @@ public class ZipReaderTests : ReaderTests
}
[Fact]
public Task Zip_LZMA_WinzipAES_Read() =>
Assert.ThrowsAsync<NotSupportedException>(async () =>
public void Zip_LZMA_WinzipAES_Read() =>
Assert.Throws<NotSupportedException>(() =>
{
using (
Stream stream = File.OpenRead(
@@ -181,12 +179,12 @@ public class ZipReaderTests : ReaderTests
)
using (var reader = ZipReader.Open(stream, new ReaderOptions { Password = "test" }))
{
while (await reader.MoveToNextEntryAsync())
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(CompressionType.Unknown, reader.Entry.CompressionType);
await reader.WriteEntryToDirectoryAsync(
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -197,7 +195,7 @@ public class ZipReaderTests : ReaderTests
});
[Fact]
public async Task Zip_Deflate_WinzipAES_Read()
public void Zip_Deflate_WinzipAES_Read()
{
using (
Stream stream = File.OpenRead(
@@ -206,12 +204,12 @@ public class ZipReaderTests : ReaderTests
)
using (var reader = ZipReader.Open(stream, new ReaderOptions { Password = "test" }))
{
while (await reader.MoveToNextEntryAsync())
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(CompressionType.Unknown, reader.Entry.CompressionType);
await reader.WriteEntryToDirectoryAsync(
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -222,18 +220,18 @@ public class ZipReaderTests : ReaderTests
}
[Fact]
public async Task Zip_Deflate_ZipCrypto_Read()
public void Zip_Deflate_ZipCrypto_Read()
{
var count = 0;
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "zipcrypto.zip")))
using (var reader = ZipReader.Open(stream, new ReaderOptions { Password = "test" }))
{
while (await reader.MoveToNextEntryAsync())
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(CompressionType.None, reader.Entry.CompressionType);
await reader.WriteEntryToDirectoryAsync(
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -245,7 +243,7 @@ public class ZipReaderTests : ReaderTests
}
[Fact]
public async Task TestSharpCompressWithEmptyStream()
public void TestSharpCompressWithEmptyStream()
{
var expected = new[]
{
@@ -269,9 +267,9 @@ public class ZipReaderTests : ReaderTests
SharpCompressStream.Create(stream, leaveOpen: true, throwOnDispose: true)
);
var i = 0;
while (await zipReader.MoveToNextEntryAsync())
while (zipReader.MoveToNextEntry())
{
using (var entry = await zipReader.OpenEntryStreamAsync())
using (var entry = zipReader.OpenEntryStream())
{
var tempStream = new MemoryStream();
const int bufSize = 0x1000;
@@ -290,7 +288,7 @@ public class ZipReaderTests : ReaderTests
}
[Fact]
public async Task Zip_None_Issue86_Streamed_Read()
public void Zip_None_Issue86_Streamed_Read()
{
var keys = new[] { "Empty1", "Empty2", "Dir1/", "Dir2/", "Fake1", "Fake2", "Internal.zip" };
@@ -300,7 +298,7 @@ public class ZipReaderTests : ReaderTests
using var reader = ZipReader.Open(stream);
foreach (var key in keys)
{
await reader.MoveToNextEntryAsync();
reader.MoveToNextEntry();
Assert.Equal(reader.Entry.Key, key);
@@ -310,11 +308,11 @@ public class ZipReaderTests : ReaderTests
}
}
Assert.False(await reader.MoveToNextEntryAsync());
Assert.False(reader.MoveToNextEntry());
}
[Fact]
public async Task Zip_ReaderMoveToNextEntryAsync()
public void Zip_ReaderMoveToNextEntry()
{
var keys = new[] { "version", "sizehint", "data/0/metadata", "data/0/records" };
@@ -322,61 +320,61 @@ public class ZipReaderTests : ReaderTests
using var reader = ZipReader.Open(fileStream);
foreach (var key in keys)
{
await reader.MoveToNextEntryAsync();
reader.MoveToNextEntry();
Assert.Equal(reader.Entry.Key, key);
}
}
[Fact]
public async Task Issue_685()
public void Issue_685()
{
var count = 0;
using var fileStream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Issue_685.zip"));
using var reader = ZipReader.Open(fileStream);
while (await reader.MoveToNextEntryAsync())
while (reader.MoveToNextEntry())
{
count++;
reader.OpenEntryStreamAsync().Dispose(); // Uncomment for workaround
reader.OpenEntryStream().Dispose(); // Uncomment for workaround
}
Assert.Equal(4, count);
}
[Fact]
public async Task Zip_ReaderFactory_Uncompressed_Read_All()
public void Zip_ReaderFactory_Uncompressed_Read_All()
{
var zipPath = Path.Combine(TEST_ARCHIVES_PATH, "Zip.uncompressed.zip");
using var stream = File.OpenRead(zipPath);
using var reader = ReaderFactory.Open(stream);
while (await reader.MoveToNextEntryAsync())
while (reader.MoveToNextEntry())
{
var target = new MemoryStream();
await (await reader.OpenEntryStreamAsync()).CopyToAsync(target);
reader.OpenEntryStream().CopyTo(target);
}
}
[Fact]
public async Task Zip_ReaderFactory_Uncompressed_Skip_All()
public void Zip_ReaderFactory_Uncompressed_Skip_All()
{
var zipPath = Path.Combine(TEST_ARCHIVES_PATH, "Zip.uncompressed.zip");
using var stream = File.OpenRead(zipPath);
using var reader = ReaderFactory.Open(stream);
while (await reader.MoveToNextEntryAsync()) { }
while (reader.MoveToNextEntry()) { }
}
//this test uses a large 7zip file containing a zip file inside it to test zip64 support
// we probably shouldn't be allowing ExtractAllEntries here but it works for now.
[Fact]
public async Task Zip_Uncompressed_64bit()
public void Zip_Uncompressed_64bit()
{
var zipPath = Path.Combine(TEST_ARCHIVES_PATH, "64bitstream.zip.7z");
using var stream = File.OpenRead(zipPath);
var archive = ArchiveFactory.Open(stream);
var reader = archive.ExtractAllEntries();
await reader.MoveToNextEntryAsync();
var zipReader = ZipReader.Open(await reader.OpenEntryStreamAsync());
reader.MoveToNextEntry();
var zipReader = ZipReader.Open(reader.OpenEntryStream());
var x = 0;
while (await zipReader.MoveToNextEntryAsync())
while (zipReader.MoveToNextEntry())
{
x++;
}
@@ -391,13 +389,12 @@ public class ZipReaderTests : ReaderTests
Path.Combine(TEST_ARCHIVES_PATH, "Zip.none.encrypted.zip"),
new ReaderOptions { Password = "test" }
);
reader.MoveToNextEntryAsync();
reader.MoveToNextEntry();
Assert.Equal("first.txt", reader.Entry.Key);
Assert.Equal(199, reader.Entry.Size);
reader.OpenEntryStreamAsync().Dispose();
reader.MoveToNextEntryAsync();
reader.OpenEntryStream().Dispose();
reader.MoveToNextEntry();
Assert.Equal("second.txt", reader.Entry.Key);
Assert.Equal(197, reader.Entry.Size);
}
}
*/

View File

@@ -1,5 +1,4 @@
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common;
using Xunit;
@@ -11,8 +10,8 @@ public class ZipWriterTests : WriterTests
: base(ArchiveType.Zip) { }
[Fact]
public Task Zip_Deflate_Write() =>
WriteAsync(
public void Zip_Deflate_Write() =>
Write(
CompressionType.Deflate,
"Zip.deflate.noEmptyDirs.zip",
"Zip.deflate.noEmptyDirs.zip",
@@ -20,8 +19,8 @@ public class ZipWriterTests : WriterTests
);
[Fact]
public Task Zip_BZip2_Write() =>
WriteAsync(
public void Zip_BZip2_Write() =>
Write(
CompressionType.BZip2,
"Zip.bzip2.noEmptyDirs.zip",
"Zip.bzip2.noEmptyDirs.zip",
@@ -29,8 +28,8 @@ public class ZipWriterTests : WriterTests
);
[Fact]
public Task Zip_None_Write() =>
WriteAsync(
public void Zip_None_Write() =>
Write(
CompressionType.None,
"Zip.none.noEmptyDirs.zip",
"Zip.none.noEmptyDirs.zip",
@@ -38,8 +37,8 @@ public class ZipWriterTests : WriterTests
);
[Fact]
public Task Zip_LZMA_Write() =>
WriteAsync(
public void Zip_LZMA_Write() =>
Write(
CompressionType.LZMA,
"Zip.lzma.noEmptyDirs.zip",
"Zip.lzma.noEmptyDirs.zip",
@@ -47,8 +46,8 @@ public class ZipWriterTests : WriterTests
);
[Fact]
public Task Zip_PPMd_Write() =>
WriteAsync(
public void Zip_PPMd_Write() =>
Write(
CompressionType.PPMd,
"Zip.ppmd.noEmptyDirs.zip",
"Zip.ppmd.noEmptyDirs.zip",
@@ -56,12 +55,8 @@ public class ZipWriterTests : WriterTests
);
[Fact]
public Task Zip_Rar_Write() =>
Assert.ThrowsAsync<InvalidFormatException>(async () =>
await WriteAsync(
CompressionType.Rar,
"Zip.ppmd.noEmptyDirs.zip",
"Zip.ppmd.noEmptyDirs.zip"
)
public void Zip_Rar_Write() =>
Assert.Throws<InvalidFormatException>(() =>
Write(CompressionType.Rar, "Zip.ppmd.noEmptyDirs.zip", "Zip.ppmd.noEmptyDirs.zip")
);
}