Compare commits

...

69 Commits

Author SHA1 Message Date
Adam Hathcock
8cf9328642 use async for zip reader 2025-10-25 13:55:13 +01:00
Adam Hathcock
3558feee75 fix more tests 2025-10-23 16:33:11 +01:00
Adam Hathcock
a281744d6d fix some usage 2025-10-23 15:45:56 +01:00
Adam Hathcock
7a476b98fb fmt 2025-10-23 15:31:31 +01:00
Adam Hathcock
ba089fb6cc fix some tests, rar fails 2025-10-23 15:31:18 +01:00
Adam Hathcock
c10cfa2a22 fmt 2025-10-23 15:04:44 +01:00
Adam Hathcock
1fb6ad4474 changed some tests 2025-10-23 15:04:31 +01:00
Adam Hathcock
d5f93c5c08 merge fixes 2025-10-23 14:48:24 +01:00
Adam Hathcock
773e3ac048 Merge remote-tracking branch 'origin/master' into adam/async-rar
# Conflicts:
#	src/SharpCompress/Archives/IArchiveEntryExtensions.cs
#	src/SharpCompress/Archives/IArchiveExtensions.cs
#	src/SharpCompress/Utility.cs
2025-10-23 14:45:36 +01:00
Adam Hathcock
e043e06656 Merge pull request #969 from adamhathcock/adam/perf
Add JB perf testing project.
2025-10-23 14:34:43 +01:00
Adam Hathcock
14b52599f4 Update src/SharpCompress/Compressors/Rar/UnpackV1/Unpack.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-23 14:20:54 +01:00
Adam Hathcock
e3e2c0c567 Update tests/SharpCompress.Performance/LargeMemoryStream.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-23 14:19:16 +01:00
Adam Hathcock
4fc5d60f03 reduce visibility 2025-10-23 14:16:39 +01:00
Adam Hathcock
c37a9e0f82 Merge remote-tracking branch 'origin/adam/perf' into adam/perf 2025-10-23 13:50:31 +01:00
Adam Hathcock
fed17ebb96 fmt 2025-10-23 13:50:07 +01:00
Adam Hathcock
eeac678872 More usage of pool and better copy 2025-10-23 13:49:54 +01:00
Adam Hathcock
f9ed0f2df9 Update tests/SharpCompress.Performance/Program.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-23 11:47:42 +01:00
Adam Hathcock
0ddbacac85 Update src/SharpCompress/Compressors/Rar/UnpackV1/UnpackUtility.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-23 11:47:27 +01:00
Adam Hathcock
f0d28aa5cf fmt 2025-10-23 11:43:38 +01:00
Adam Hathcock
cc84f6fee4 more making rar faster 2025-10-23 11:43:21 +01:00
Adam Hathcock
00e6eef369 used AI to optimize some copys and shifting 2025-10-23 11:18:50 +01:00
Adam Hathcock
1ae71907bc don't need to clear everything 2025-10-23 10:53:54 +01:00
Adam Hathcock
3ff688fba2 clear and null check 2025-10-23 10:48:18 +01:00
Adam Hathcock
bb59b3d456 add pool to LZMA out window 2025-10-23 09:54:52 +01:00
Adam Hathcock
186ea74ada add some fixes for rar to pool memory 2025-10-23 09:40:15 +01:00
Adam Hathcock
c108f2dcf3 add perf testing project using JB memory and cpu 2025-10-23 09:39:57 +01:00
Adam Hathcock
4cca232d83 Merge pull request #959 from adamhathcock/adam/xz-wrapped-often
Removed wrappers that weren't needed (probably)
2025-10-22 11:54:47 +01:00
Adam Hathcock
1db511e9cb Merge branch 'master' into adam/xz-wrapped-often 2025-10-22 11:51:46 +01:00
Adam Hathcock
76afa7d3bf Merge pull request #968 from adamhathcock/adam/rework-deps
rework dependencies to be correct for frameworks and update
2025-10-22 11:51:30 +01:00
Adam Hathcock
3513f7b1cd Update src/SharpCompress/SharpCompress.csproj
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-22 10:51:12 +01:00
Adam Hathcock
4531fe39e6 Merge branch 'master' into adam/rework-deps 2025-10-22 10:48:16 +01:00
Adam Hathcock
8d276a85bc rework dependencies to be correct for frameworks and update 2025-10-22 10:47:43 +01:00
Adam Hathcock
5f0d042bc3 Merge pull request #967 from adamhathcock/adam/reduce-custom-utilities
Reduce custom utilities for arrays/bytes
2025-10-22 10:41:10 +01:00
Adam Hathcock
408f07e3c4 Merge branch 'master' into adam/reduce-custom-utilities 2025-10-22 10:38:01 +01:00
Adam Hathcock
d1a540c90c use windows instead of skippable fact 2025-10-22 10:32:47 +01:00
Adam Hathcock
00df8e930e add windows only compile constant 2025-10-22 10:30:40 +01:00
Adam Hathcock
3b768b1b77 Merge pull request #961 from adamhathcock/dependabot/nuget/AwesomeAssertions-9.2.1
Bump AwesomeAssertions from 9.2.0 to 9.2.1
2025-10-22 10:25:01 +01:00
Adam Hathcock
42a7ececa0 Merge branch 'master' into adam/xz-wrapped-often 2025-10-22 10:22:36 +01:00
Adam Hathcock
e8867de049 Merge branch 'master' into dependabot/nuget/AwesomeAssertions-9.2.1 2025-10-22 10:21:59 +01:00
Adam Hathcock
a1dfa3dfa3 xplat tests for path characters 2025-10-22 10:21:22 +01:00
Adam Hathcock
83917d4f79 Merge remote-tracking branch 'origin/master' into adam/reduce-custom-utilities 2025-10-22 10:17:20 +01:00
Adam Hathcock
513cd4f905 some AI suggestions 2025-10-22 10:16:45 +01:00
Adam Hathcock
eda0309df3 Merge pull request #966 from adamhathcock/adam/reduce-stackalloc
Remove a dynamically created stackalloc
2025-10-22 10:13:14 +01:00
Adam Hathcock
74e27c028e fix the span length 2025-10-22 10:10:07 +01:00
Adam Hathcock
36c06c4089 ugh, this is used because it shadows a field 2025-10-22 09:32:19 +01:00
Adam Hathcock
249b8a9cdd add AI generated tests 2025-10-22 09:28:07 +01:00
Adam Hathcock
62bee15f00 fmt 2025-10-22 09:19:30 +01:00
Adam Hathcock
d8797b69e4 remove do while 2025-10-22 09:19:09 +01:00
Adam Hathcock
084fe72b02 Consolidate not null 2025-10-22 09:17:13 +01:00
Adam Hathcock
c823acaa3f optimize ReadFully and Skip 2025-10-22 09:10:16 +01:00
Adam Hathcock
e0d6cd9cb7 Try to reduce custom functions for array/byte management 2025-10-22 09:00:21 +01:00
Adam Hathcock
01021e102b remove some extra stackallocs 2025-10-22 08:36:03 +01:00
Adam Hathcock
6de738ff17 reduce dynamic stackallocs in unpackv1 2025-10-22 08:32:19 +01:00
Adam Hathcock
c0612547eb Merge pull request #964 from adamhathcock/adam/extract-all-solid-only
Only allow extract all on archives that are solid (some rars and 7zip only)
2025-10-21 14:08:23 +01:00
Adam Hathcock
e960907698 Update src/SharpCompress/Archives/AbstractArchive.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-21 13:55:56 +01:00
Adam Hathcock
84e03b1b27 Allow 7zip files of all sizes? 2025-10-21 10:28:58 +01:00
Adam Hathcock
f1a80da34b fix tests that use extract all wrongly 2025-10-21 09:56:29 +01:00
Adam Hathcock
5a5a55e556 fmt 2025-10-21 09:22:35 +01:00
Adam Hathcock
e1f132b45b Only allow extract all on archives that are solid (some rars and 7zip only) 2025-10-21 09:21:46 +01:00
dependabot[bot]
087011aede Bump AwesomeAssertions from 9.2.0 to 9.2.1
---
updated-dependencies:
- dependency-name: AwesomeAssertions
  dependency-version: 9.2.1
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-10-20 10:44:17 +00:00
Adam Hathcock
d1409d6dde more methods for async 2025-10-17 11:01:37 +01:00
Adam Hathcock
e30a88e634 Started rar async...interface changes were viral 2025-10-17 10:50:09 +01:00
Adam Hathcock
1430bf9b31 fmt 2025-10-15 09:54:13 +01:00
Adam Hathcock
4e5de817ef Removed too many wrappers
# Conflicts:
#	src/SharpCompress/Compressors/Xz/XZIndex.cs
2025-10-15 09:53:46 +01:00
Adam Hathcock
5d6b94f8c3 Merge pull request #952 from adamhathcock/dependabot/github_actions/actions/checkout-5
Bump actions/checkout from 4 to 5
2025-10-14 08:25:53 +01:00
Adam Hathcock
8dfbe56f42 Merge branch 'master' into dependabot/github_actions/actions/checkout-5 2025-10-14 08:23:18 +01:00
Adam Hathcock
df79d983d7 Merge pull request #957 from adamhathcock/dependabot/github_actions/actions/setup-dotnet-5
Bump actions/setup-dotnet from 4 to 5
2025-10-14 08:22:47 +01:00
dependabot[bot]
6c23a28826 Bump actions/setup-dotnet from 4 to 5
Bumps [actions/setup-dotnet](https://github.com/actions/setup-dotnet) from 4 to 5.
- [Release notes](https://github.com/actions/setup-dotnet/releases)
- [Commits](https://github.com/actions/setup-dotnet/compare/v4...v5)

---
updated-dependencies:
- dependency-name: actions/setup-dotnet
  dependency-version: '5'
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-10-13 16:21:25 +00:00
dependabot[bot]
f72289570a Bump actions/checkout from 4 to 5
Bumps [actions/checkout](https://github.com/actions/checkout) from 4 to 5.
- [Release notes](https://github.com/actions/checkout/releases)
- [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md)
- [Commits](https://github.com/actions/checkout/compare/v4...v5)

---
updated-dependencies:
- dependency-name: actions/checkout
  dependency-version: '5'
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-10-13 16:12:51 +00:00
88 changed files with 5193 additions and 2743 deletions

View File

@@ -14,8 +14,8 @@ jobs:
os: [windows-latest, ubuntu-latest]
steps:
- uses: actions/checkout@v4
- uses: actions/setup-dotnet@v4
- uses: actions/checkout@v5
- uses: actions/setup-dotnet@v5
with:
dotnet-version: 8.0.x
- run: dotnet run --project build/build.csproj

View File

@@ -1,18 +1,18 @@
<Project>
<ItemGroup>
<PackageVersion Include="Bullseye" Version="6.0.0" />
<PackageVersion Include="AwesomeAssertions" Version="9.2.0" />
<PackageVersion Include="AwesomeAssertions" Version="9.2.1" />
<PackageVersion Include="Glob" Version="1.1.9" />
<PackageVersion Include="JetBrains.Profiler.SelfApi" Version="2.5.14" />
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="8.0.0" />
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="17.13.0" />
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="18.0.0" />
<PackageVersion Include="Mono.Posix.NETStandard" Version="1.0.0" />
<PackageVersion Include="SimpleExec" Version="12.0.0" />
<PackageVersion Include="System.Buffers" Version="4.6.0" />
<PackageVersion Include="System.Memory" Version="4.6.0" />
<PackageVersion Include="System.Buffers" Version="4.6.1" />
<PackageVersion Include="System.Memory" Version="4.6.3" />
<PackageVersion Include="System.Text.Encoding.CodePages" Version="8.0.0" />
<PackageVersion Include="xunit" Version="2.9.3" />
<PackageVersion Include="xunit.runner.visualstudio" Version="3.1.5" />
<PackageVersion Include="xunit.SkippableFact" Version="1.5.23" />
<PackageVersion Include="ZstdSharp.Port" Version="0.8.6" />
<PackageVersion Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
<PackageVersion Include="Microsoft.NETFramework.ReferenceAssemblies" Version="1.0.3" />

View File

@@ -23,6 +23,8 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Config", "Config", "{CDB425
.github\workflows\dotnetcore.yml = .github\workflows\dotnetcore.yml
EndProjectSection
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SharpCompress.Performance", "tests\SharpCompress.Performance\SharpCompress.Performance.csproj", "{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -41,6 +43,10 @@ Global
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Debug|Any CPU.Build.0 = Debug|Any CPU
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Release|Any CPU.ActiveCfg = Release|Any CPU
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Release|Any CPU.Build.0 = Release|Any CPU
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}.Debug|Any CPU.Build.0 = Debug|Any CPU
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}.Release|Any CPU.ActiveCfg = Release|Any CPU
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
@@ -48,5 +54,6 @@ Global
GlobalSection(NestedProjects) = preSolution
{FD19DDD8-72B2-4024-8665-0D1F7A2AA998} = {3C5BE746-03E5-4895-9988-0B57F162F86C}
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F} = {0F0901FF-E8D9-426A-B5A2-17C7F47C1529}
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17} = {0F0901FF-E8D9-426A-B5A2-17C7F47C1529}
EndGlobalSection
EndGlobal

View File

@@ -144,6 +144,12 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
/// <returns></returns>
public IReader ExtractAllEntries()
{
if (!IsSolid && Type != ArchiveType.SevenZip)
{
throw new InvalidOperationException(
"ExtractAllEntries can only be used on solid archives or 7Zip archives (which require random access)."
);
}
((IArchiveExtractionListener)this).EnsureEntriesLoaded();
return CreateReaderForSolidExtraction();
}

View File

@@ -2,6 +2,7 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Factories;
using SharpCompress.IO;
@@ -45,7 +46,7 @@ public static class ArchiveFactory
/// <param name="options"></param>
public static IArchive Open(string filePath, ReaderOptions? options = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), options);
}
@@ -68,7 +69,7 @@ public static class ArchiveFactory
/// <param name="options"></param>
public static IArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? options = null)
{
fileInfos.CheckNotNull(nameof(fileInfos));
fileInfos.NotNull(nameof(fileInfos));
var filesArray = fileInfos.ToArray();
if (filesArray.Length == 0)
{
@@ -81,7 +82,7 @@ public static class ArchiveFactory
return Open(fileInfo, options);
}
fileInfo.CheckNotNull(nameof(fileInfo));
fileInfo.NotNull(nameof(fileInfo));
options ??= new ReaderOptions { LeaveStreamOpen = false };
return FindFactory<IMultiArchiveFactory>(fileInfo).Open(filesArray, options);
@@ -94,7 +95,7 @@ public static class ArchiveFactory
/// <param name="options"></param>
public static IArchive Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
{
streams.CheckNotNull(nameof(streams));
streams.NotNull(nameof(streams));
var streamsArray = streams.ToArray();
if (streamsArray.Length == 0)
{
@@ -107,7 +108,7 @@ public static class ArchiveFactory
return Open(firstStream, options);
}
firstStream.CheckNotNull(nameof(firstStream));
firstStream.NotNull(nameof(firstStream));
options ??= new ReaderOptions();
return FindFactory<IMultiArchiveFactory>(firstStream).Open(streamsArray, options);
@@ -116,20 +117,20 @@ public static class ArchiveFactory
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(
public static async Task WriteToDirectoryAsync(
string sourceArchive,
string destinationDirectory,
ExtractionOptions? options = null
)
{
using var archive = Open(sourceArchive);
archive.WriteToDirectory(destinationDirectory, options);
await archive.WriteToDirectoryAsync(destinationDirectory, options);
}
private static T FindFactory<T>(FileInfo finfo)
where T : IFactory
{
finfo.CheckNotNull(nameof(finfo));
finfo.NotNull(nameof(finfo));
using Stream stream = finfo.OpenRead();
return FindFactory<T>(stream);
}
@@ -137,7 +138,7 @@ public static class ArchiveFactory
private static T FindFactory<T>(Stream stream)
where T : IFactory
{
stream.CheckNotNull(nameof(stream));
stream.NotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
{
throw new ArgumentException("Stream should be readable and seekable");
@@ -172,7 +173,7 @@ public static class ArchiveFactory
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
filePath.NotNullOrEmpty(nameof(filePath));
using Stream s = File.OpenRead(filePath);
return IsArchive(s, out type, bufferSize);
}
@@ -184,7 +185,7 @@ public static class ArchiveFactory
)
{
type = null;
stream.CheckNotNull(nameof(stream));
stream.NotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
{
@@ -215,7 +216,7 @@ public static class ArchiveFactory
/// <returns></returns>
public static IEnumerable<string> GetFileParts(string part1)
{
part1.CheckNotNullOrEmpty(nameof(part1));
part1.NotNullOrEmpty(nameof(part1));
return GetFileParts(new FileInfo(part1)).Select(a => a.FullName);
}
@@ -226,7 +227,7 @@ public static class ArchiveFactory
/// <returns></returns>
public static IEnumerable<FileInfo> GetFileParts(FileInfo part1)
{
part1.CheckNotNull(nameof(part1));
part1.NotNull(nameof(part1));
yield return part1;
foreach (var factory in Factory.Factories.OfType<IFactory>())

View File

@@ -21,7 +21,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
/// <param name="readerOptions"></param>
public static GZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
@@ -32,7 +32,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
/// <param name="readerOptions"></param>
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
fileInfo.NotNull(nameof(fileInfo));
return new GZipArchive(
new SourceStream(
fileInfo,
@@ -52,7 +52,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
ReaderOptions? readerOptions = null
)
{
fileInfos.CheckNotNull(nameof(fileInfos));
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new GZipArchive(
new SourceStream(
@@ -70,7 +70,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
/// <param name="readerOptions"></param>
public static GZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.CheckNotNull(nameof(streams));
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new GZipArchive(
new SourceStream(
@@ -88,7 +88,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
/// <param name="readerOptions"></param>
public static GZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{

View File

@@ -1,5 +1,6 @@
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common.GZip;
namespace SharpCompress.Archives.GZip;
@@ -20,6 +21,9 @@ public class GZipArchiveEntry : GZipEntry, IArchiveEntry
return Parts.Single().GetCompressedStream().NotNull();
}
public virtual async Task<Stream> OpenEntryStreamAsync() =>
await Task.FromResult(OpenEntryStream());
#region IArchiveEntry Members
public IArchive Archive { get; }

View File

@@ -1,4 +1,5 @@
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Archives;
@@ -11,6 +12,12 @@ public interface IArchiveEntry : IEntry
/// </summary>
Stream OpenEntryStream();
/// <summary>
/// Opens the current entry as a stream that will decompress as it is read.
/// Read the entire stream or use SkipEntry on EntryStream.
/// </summary>
Task<Stream> OpenEntryStreamAsync();
/// <summary>
/// The archive can find all the parts of the archive needed to extract this entry.
/// </summary>

View File

@@ -1,4 +1,5 @@
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
@@ -6,7 +7,7 @@ namespace SharpCompress.Archives;
public static class IArchiveEntryExtensions
{
public static void WriteTo(this IArchiveEntry archiveEntry, Stream streamToWriteTo)
public static async Task WriteToAsync(this IArchiveEntry archiveEntry, Stream streamToWriteTo)
{
if (archiveEntry.IsDirectory)
{
@@ -21,11 +22,11 @@ public static class IArchiveEntryExtensions
archiveEntry.Size,
archiveEntry.CompressedSize
);
var entryStream = archiveEntry.OpenEntryStream();
var entryStream = await archiveEntry.OpenEntryStreamAsync();
using (entryStream)
{
using Stream s = new ListeningStream(streamListener, entryStream);
s.TransferTo(streamToWriteTo);
await s.CopyToAsync(streamToWriteTo);
}
streamListener.FireEntryExtractionEnd(archiveEntry);
}
@@ -33,34 +34,34 @@ public static class IArchiveEntryExtensions
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(
public static async Task WriteEntryToDirectoryAsync(
this IArchiveEntry entry,
string destinationDirectory,
ExtractionOptions? options = null
) =>
ExtractionMethods.WriteEntryToDirectory(
await ExtractionMethods.WriteEntryToDirectoryAsync(
entry,
destinationDirectory,
options,
entry.WriteToFile
entry.WriteToFileAsync
);
/// <summary>
/// Extract to specific file
/// </summary>
public static void WriteToFile(
public static Task WriteToFileAsync(
this IArchiveEntry entry,
string destinationFileName,
ExtractionOptions? options = null
) =>
ExtractionMethods.WriteEntryToFile(
ExtractionMethods.WriteEntryToFileAsync(
entry,
destinationFileName,
options,
(x, fm) =>
async (x, fm) =>
{
using var fs = File.Open(destinationFileName, fm);
entry.WriteTo(fs);
await entry.WriteToAsync(fs);
}
);
}

View File

@@ -1,8 +1,8 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
@@ -13,14 +13,14 @@ public static class IArchiveExtensions
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(
public static async Task WriteToDirectoryAsync(
this IArchive archive,
string destinationDirectory,
ExtractionOptions? options = null
)
{
using var reader = archive.ExtractAllEntries();
reader.WriteAllToDirectory(destinationDirectory, options);
await reader.WriteAllToDirectoryAsync(destinationDirectory, options);
}
/// <summary>
@@ -30,7 +30,7 @@ public static class IArchiveExtensions
/// <param name="destination">The folder to extract into.</param>
/// <param name="progressReport">Optional progress report callback.</param>
/// <param name="cancellationToken">Optional cancellation token.</param>
public static void ExtractToDirectory(
public static async Task ExtractToDirectoryAsync(
this IArchive archive,
string destination,
Action<double>? progressReport = null,
@@ -45,12 +45,10 @@ public static class IArchiveExtensions
var seenDirectories = new HashSet<string>();
// Extract
var entries = archive.ExtractAllEntries();
while (entries.MoveToNextEntry())
foreach (var entry in archive.Entries)
{
cancellationToken.ThrowIfCancellationRequested();
var entry = entries.Entry;
if (entry.IsDirectory)
{
var dirPath = Path.Combine(destination, entry.Key.NotNull("Entry Key is null"));
@@ -77,7 +75,7 @@ public static class IArchiveExtensions
// Write file
using var fs = File.OpenWrite(path);
entries.WriteEntryTo(fs);
await entry.WriteToAsync(fs);
// Update progress
bytesRead += entry.Size;

View File

@@ -95,7 +95,7 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
/// <param name="options"></param>
public static RarArchive Open(string filePath, ReaderOptions? options = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
filePath.NotNullOrEmpty(nameof(filePath));
var fileInfo = new FileInfo(filePath);
return new RarArchive(
new SourceStream(
@@ -113,7 +113,7 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
/// <param name="options"></param>
public static RarArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
fileInfo.NotNull(nameof(fileInfo));
return new RarArchive(
new SourceStream(
fileInfo,
@@ -130,7 +130,7 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
/// <param name="options"></param>
public static RarArchive Open(Stream stream, ReaderOptions? options = null)
{
stream.CheckNotNull(nameof(stream));
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
@@ -150,7 +150,7 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
ReaderOptions? readerOptions = null
)
{
fileInfos.CheckNotNull(nameof(fileInfos));
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new RarArchive(
new SourceStream(
@@ -168,7 +168,7 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
/// <param name="readerOptions"></param>
public static RarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.CheckNotNull(nameof(streams));
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new RarArchive(
new SourceStream(

View File

@@ -2,6 +2,7 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
@@ -66,18 +67,23 @@ public class RarArchiveEntry : RarEntry, IArchiveEntry
}
}
public Stream OpenEntryStream()
public Stream OpenEntryStream() =>
throw new NotSupportedException(
"Synchronous extraction is not supported. Use OpenEntryStreamAsync instead."
);
public async Task<Stream> OpenEntryStreamAsync()
{
if (IsRarV3)
{
return new RarStream(
return await RarStream.Create(
archive.UnpackV1.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
);
}
return new RarStream(
return await RarStream.Create(
archive.UnpackV2017.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)

View File

@@ -2,6 +2,7 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.SevenZip;
using SharpCompress.Compressors.LZMA.Utilites;
@@ -21,7 +22,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.CheckNotNullOrEmpty("filePath");
filePath.NotNullOrEmpty("filePath");
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
@@ -32,7 +33,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull("fileInfo");
fileInfo.NotNull("fileInfo");
return new SevenZipArchive(
new SourceStream(
fileInfo,
@@ -52,7 +53,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
ReaderOptions? readerOptions = null
)
{
fileInfos.CheckNotNull(nameof(fileInfos));
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new SevenZipArchive(
new SourceStream(
@@ -73,7 +74,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
ReaderOptions? readerOptions = null
)
{
streams.CheckNotNull(nameof(streams));
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new SevenZipArchive(
new SourceStream(
@@ -91,7 +92,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull("stream");
stream.NotNull("stream");
if (stream is not { CanSeek: true })
{
@@ -253,8 +254,8 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
}
}
protected override EntryStream GetEntryStream() =>
CreateEntryStream(
protected override Task<EntryStream> GetEntryStreamAsync() =>
CreateEntryStreamAsync(
new ReadOnlySubStream(
_currentStream.NotNull("currentStream is not null"),
_currentItem?.Size ?? 0

View File

@@ -1,4 +1,5 @@
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common.SevenZip;
namespace SharpCompress.Archives.SevenZip;
@@ -10,6 +11,9 @@ public class SevenZipArchiveEntry : SevenZipEntry, IArchiveEntry
public Stream OpenEntryStream() => FilePart.GetCompressedStream();
public virtual async Task<Stream> OpenEntryStreamAsync() =>
await Task.FromResult(OpenEntryStream());
public IArchive Archive { get; }
public bool IsComplete => true;

View File

@@ -22,7 +22,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
/// <param name="readerOptions"></param>
public static TarArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
@@ -33,7 +33,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
/// <param name="readerOptions"></param>
public static TarArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
fileInfo.NotNull(nameof(fileInfo));
return new TarArchive(
new SourceStream(
fileInfo,
@@ -53,7 +53,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
ReaderOptions? readerOptions = null
)
{
fileInfos.CheckNotNull(nameof(fileInfos));
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new TarArchive(
new SourceStream(
@@ -71,7 +71,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
/// <param name="readerOptions"></param>
public static TarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.CheckNotNull(nameof(streams));
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new TarArchive(
new SourceStream(
@@ -89,7 +89,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
/// <param name="readerOptions"></param>
public static TarArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
@@ -178,7 +178,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
using (var entryStream = entry.OpenEntryStream())
{
using var memoryStream = new MemoryStream();
entryStream.TransferTo(memoryStream);
entryStream.CopyTo(memoryStream);
memoryStream.Position = 0;
var bytes = memoryStream.ToArray();

View File

@@ -1,5 +1,6 @@
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Tar;
@@ -12,6 +13,9 @@ public class TarArchiveEntry : TarEntry, IArchiveEntry
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();
public virtual async Task<Stream> OpenEntryStreamAsync() =>
await Task.FromResult(OpenEntryStream());
#region IArchiveEntry Members
public IArchive Archive { get; }

View File

@@ -43,7 +43,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
/// <param name="readerOptions"></param>
public static ZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
@@ -54,7 +54,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
/// <param name="readerOptions"></param>
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
fileInfo.NotNull(nameof(fileInfo));
return new ZipArchive(
new SourceStream(
fileInfo,
@@ -74,7 +74,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
ReaderOptions? readerOptions = null
)
{
fileInfos.CheckNotNull(nameof(fileInfos));
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new ZipArchive(
new SourceStream(
@@ -92,7 +92,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
/// <param name="readerOptions"></param>
public static ZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.CheckNotNull(nameof(streams));
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new ZipArchive(
new SourceStream(
@@ -110,7 +110,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
/// <param name="readerOptions"></param>
public static ZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{

View File

@@ -1,5 +1,6 @@
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common.Zip;
namespace SharpCompress.Archives.Zip;
@@ -11,6 +12,9 @@ public class ZipArchiveEntry : ZipEntry, IArchiveEntry
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();
public virtual async Task<Stream> OpenEntryStreamAsync() =>
await Task.FromResult(OpenEntryStream());
#region IArchiveEntry Members
public IArchive Archive { get; }

View File

@@ -1,6 +1,8 @@
using System;
using System.IO;
using System.IO.Compression;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -51,6 +53,49 @@ public class EntryStream : Stream, IStreamStack
_completed = true;
}
/// <summary>
/// When reading a stream from OpenEntryStream, the stream must be completed so use this to finish reading the entire entry.
/// </summary>
public async Task SkipEntryAsync()
{
await this.SkipAsync();
_completed = true;
}
#if !NETSTANDARD2_0 && !NETFRAMEWORK
public override async ValueTask DisposeAsync()
{
if (!(_completed || _reader.Cancelled))
{
await SkipEntryAsync();
}
//Need a safe standard approach to this - it's okay for compression to overreads. Handling needs to be standardised
if (_stream is IStreamStack ss)
{
if (ss.BaseStream() is SharpCompress.Compressors.Deflate.DeflateStream deflateStream)
{
deflateStream.Flush(); //Deflate over reads. Knock it back
}
else if (ss.BaseStream() is SharpCompress.Compressors.LZMA.LzmaStream lzmaStream)
{
lzmaStream.Flush(); //Lzma over reads. Knock it back
}
}
if (_isDisposed)
{
return;
}
_isDisposed = true;
#if DEBUG_STREAMS
this.DebugDispose(typeof(EntryStream));
#endif
await base.DisposeAsync();
await _stream.DisposeAsync();
}
#endif
protected override void Dispose(bool disposing)
{
if (!(_completed || _reader.Cancelled))
@@ -99,6 +144,16 @@ public class EntryStream : Stream, IStreamStack
set => throw new NotSupportedException();
}
public override async Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
var read = await _stream.ReadAsync(buffer, offset, count, cancellationToken);
if (read <= 0)
{
_completed = true;
}
return read;
}
public override int Read(byte[] buffer, int offset, int count)
{
var read = _stream.Read(buffer, offset, count);

View File

@@ -1,5 +1,6 @@
using System;
using System.IO;
using System.Threading.Tasks;
namespace SharpCompress.Common;
@@ -8,11 +9,11 @@ internal static class ExtractionMethods
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteEntryToDirectory(
public static async Task WriteEntryToDirectoryAsync(
IEntry entry,
string destinationDirectory,
ExtractionOptions? options,
Action<string, ExtractionOptions?> write
Func<string, ExtractionOptions?, Task> write
)
{
string destinationFileName;
@@ -77,7 +78,7 @@ internal static class ExtractionMethods
"Entry is trying to write a file outside of the destination directory."
);
}
write(destinationFileName, options);
await write(destinationFileName, options);
}
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
{
@@ -85,11 +86,11 @@ internal static class ExtractionMethods
}
}
public static void WriteEntryToFile(
public static async Task WriteEntryToFileAsync(
IEntry entry,
string destinationFileName,
ExtractionOptions? options,
Action<string, FileMode> openAndWrite
Func<string, FileMode, Task> openAndWrite
)
{
if (entry.LinkTarget != null)
@@ -112,7 +113,7 @@ internal static class ExtractionMethods
fm = FileMode.CreateNew;
}
openAndWrite(destinationFileName, fm);
await openAndWrite(destinationFileName, fm);
entry.PreserveExtractionOptions(destinationFileName, options);
}
}

View File

@@ -28,6 +28,7 @@ using System;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Compressors.Deflate;
@@ -325,6 +326,16 @@ public class DeflateStream : Stream, IStreamStack
return _baseStream.Read(buffer, offset, count);
}
public override Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
if (_disposed)
{
throw new ObjectDisposedException("DeflateStream");
}
return _baseStream.ReadAsync(buffer, offset, count, cancellationToken);
}
public override int ReadByte()
{
if (_disposed)

View File

@@ -27,10 +27,13 @@
// ------------------------------------------------------------------
using System;
using System.Buffers;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
@@ -197,6 +200,148 @@ internal class ZlibBaseStream : Stream, IStreamStack
} while (!done);
}
#if !NETSTANDARD2_0 && !NETFRAMEWORK
private async ValueTask FinishAsync(CancellationToken cancellationToken)
{
if (_z is null)
{
return;
}
if (_streamMode == StreamMode.Writer)
{
var done = false;
do
{
_z.OutputBuffer = workingBuffer;
_z.NextOut = 0;
_z.AvailableBytesOut = _workingBuffer.Length;
var rc =
(_wantCompress) ? _z.Deflate(FlushType.Finish) : _z.Inflate(FlushType.Finish);
if (rc != ZlibConstants.Z_STREAM_END && rc != ZlibConstants.Z_OK)
{
var verb = (_wantCompress ? "de" : "in") + "flating";
if (_z.Message is null)
{
throw new ZlibException(String.Format("{0}: (rc = {1})", verb, rc));
}
throw new ZlibException(verb + ": " + _z.Message);
}
if (_workingBuffer.Length - _z.AvailableBytesOut > 0)
{
_stream.Write(_workingBuffer, 0, _workingBuffer.Length - _z.AvailableBytesOut);
}
done = _z.AvailableBytesIn == 0 && _z.AvailableBytesOut != 0;
// If GZIP and de-compress, we're done when 8 bytes remain.
if (_flavor == ZlibStreamFlavor.GZIP && !_wantCompress)
{
done = (_z.AvailableBytesIn == 8 && _z.AvailableBytesOut != 0);
}
} while (!done);
Flush();
// workitem 7159
if (_flavor == ZlibStreamFlavor.GZIP)
{
if (_wantCompress)
{
// Emit the GZIP trailer: CRC32 and size mod 2^32
using var intBufOwner = MemoryPool<byte>.Shared.Rent(4);
var intBuf = intBufOwner.Memory.Slice(0, 4);
BinaryPrimitives.WriteInt32LittleEndian(intBuf.Span, crc.Crc32Result);
await _stream.WriteAsync(intBuf, cancellationToken);
var c2 = (int)(crc.TotalBytesRead & 0x00000000FFFFFFFF);
BinaryPrimitives.WriteInt32LittleEndian(intBuf.Span, c2);
await _stream.WriteAsync(intBuf, cancellationToken);
}
else
{
throw new ZlibException("Writing with decompression is not supported.");
}
}
}
// workitem 7159
else if (_streamMode == StreamMode.Reader)
{
if (_flavor == ZlibStreamFlavor.GZIP)
{
if (!_wantCompress)
{
// workitem 8501: handle edge case (decompress empty stream)
if (_z.TotalBytesOut == 0L)
{
return;
}
// Read and potentially verify the GZIP trailer: CRC32 and size mod 2^32
using var trailerOwner = MemoryPool<byte>.Shared.Rent(8);
var trailer = trailerOwner.Memory.Slice(0, 8);
// workitem 8679
if (_z.AvailableBytesIn != 8)
{
// Make sure we have read to the end of the stream
_z.InputBuffer.AsSpan(_z.NextIn, _z.AvailableBytesIn).CopyTo(trailer.Span);
var bytesNeeded = 8 - _z.AvailableBytesIn;
var bytesRead = await _stream.ReadAsync(
trailer.Slice(_z.AvailableBytesIn, bytesNeeded), cancellationToken
);
if (bytesNeeded != bytesRead)
{
throw new ZlibException(
String.Format(
"Protocol error. AvailableBytesIn={0}, expected 8",
_z.AvailableBytesIn + bytesRead
)
);
}
}
else
{
_z.InputBuffer.AsSpan(_z.NextIn, trailer.Length).CopyTo(trailer.Span);
}
var crc32_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer.Span);
var crc32_actual = crc.Crc32Result;
var isize_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer.Span.Slice(4));
var isize_actual = (Int32)(_z.TotalBytesOut & 0x00000000FFFFFFFF);
if (crc32_actual != crc32_expected)
{
throw new ZlibException(
String.Format(
"Bad CRC32 in GZIP stream. (actual({0:X8})!=expected({1:X8}))",
crc32_actual,
crc32_expected
)
);
}
if (isize_actual != isize_expected)
{
throw new ZlibException(
String.Format(
"Bad size in GZIP stream. (actual({0})!=expected({1}))",
isize_actual,
isize_expected
)
);
}
}
else
{
throw new ZlibException("Reading with compression is not supported.");
}
}
}
}
#else
private void finish()
{
if (_z is null)
@@ -334,7 +479,7 @@ internal class ZlibBaseStream : Stream, IStreamStack
}
}
}
#endif
private void end()
{
if (z is null)
@@ -352,6 +497,36 @@ internal class ZlibBaseStream : Stream, IStreamStack
_z = null;
}
#if !NETSTANDARD2_0 && !NETFRAMEWORK
public override async ValueTask DisposeAsync()
{
if (isDisposed)
{
return;
}
isDisposed = true;
#if DEBUG_STREAMS
this.DebugDispose(typeof(ZlibBaseStream));
#endif
await base.DisposeAsync();
if (_stream is null)
{
return;
}
try
{
await FinishAsync(CancellationToken.None);
}
finally
{
end();
_stream?.Dispose();
_stream = null;
}
}
#else
protected override void Dispose(bool disposing)
{
if (isDisposed)
@@ -382,6 +557,8 @@ internal class ZlibBaseStream : Stream, IStreamStack
}
}
#endif
public override void Flush()
{
_stream.Flush();
@@ -389,7 +566,6 @@ internal class ZlibBaseStream : Stream, IStreamStack
((IStreamStack)this).Rewind(z.AvailableBytesIn); //unused
z.AvailableBytesIn = 0;
}
public override Int64 Seek(Int64 offset, SeekOrigin origin) =>
throw new NotSupportedException();
@@ -494,6 +670,194 @@ internal class ZlibBaseStream : Stream, IStreamStack
return totalBytesRead;
}
#if !NETSTANDARD2_0 && !NETFRAMEWORK
public override async Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
// According to MS documentation, any implementation of the IO.Stream.Read function must:
// (a) throw an exception if offset & count reference an invalid part of the buffer,
// or if count < 0, or if buffer is null
// (b) return 0 only upon EOF, or if count = 0
// (c) if not EOF, then return at least 1 byte, up to <count> bytes
if (_streamMode == StreamMode.Undefined)
{
if (!_stream.CanRead)
{
throw new ZlibException("The stream is not readable.");
}
// for the first read, set up some controls.
_streamMode = StreamMode.Reader;
// (The first reference to _z goes through the private accessor which
// may initialize it.)
z.AvailableBytesIn = 0;
if (_flavor == ZlibStreamFlavor.GZIP)
{
_gzipHeaderByteCount = _ReadAndValidateGzipHeader();
// workitem 8501: handle edge case (decompress empty stream)
if (_gzipHeaderByteCount == 0)
{
return 0;
}
}
}
if (_streamMode != StreamMode.Reader)
{
throw new ZlibException("Cannot Read after Writing.");
}
var rc = 0;
// set up the output of the deflate/inflate codec:
_z.OutputBuffer = buffer;
_z.NextOut = offset;
_z.AvailableBytesOut = count;
if (count == 0)
{
return 0;
}
if (nomoreinput && _wantCompress)
{
// no more input data available; therefore we flush to
// try to complete the read
rc = _z.Deflate(FlushType.Finish);
if (rc != ZlibConstants.Z_OK && rc != ZlibConstants.Z_STREAM_END)
{
throw new ZlibException(
String.Format("Deflating: rc={0} msg={1}", rc, _z.Message)
);
}
rc = (count - _z.AvailableBytesOut);
// calculate CRC after reading
if (crc != null)
{
crc.SlurpBlock(buffer, offset, rc);
}
return rc;
}
if (buffer is null)
{
throw new ArgumentNullException(nameof(buffer));
}
if (count < 0)
{
throw new ArgumentOutOfRangeException(nameof(count));
}
if (offset < buffer.GetLowerBound(0))
{
throw new ArgumentOutOfRangeException(nameof(offset));
}
if ((offset + count) > buffer.GetLength(0))
{
throw new ArgumentOutOfRangeException(nameof(count));
}
// This is necessary in case _workingBuffer has been resized. (new byte[])
// (The first reference to _workingBuffer goes through the private accessor which
// may initialize it.)
_z.InputBuffer = workingBuffer;
do
{
// need data in _workingBuffer in order to deflate/inflate. Here, we check if we have any.
if ((_z.AvailableBytesIn == 0) && (!nomoreinput))
{
// No data available, so try to Read data from the captive stream.
_z.NextIn = 0;
_z.AvailableBytesIn = await _stream.ReadAsync(_workingBuffer, 0, _workingBuffer.Length, cancellationToken);
if (_z.AvailableBytesIn == 0)
{
nomoreinput = true;
}
}
// we have data in InputBuffer; now compress or decompress as appropriate
rc = (_wantCompress) ? _z.Deflate(_flushMode) : _z.Inflate(_flushMode);
if (nomoreinput && (rc == ZlibConstants.Z_BUF_ERROR))
{
return 0;
}
if (rc != ZlibConstants.Z_OK && rc != ZlibConstants.Z_STREAM_END)
{
throw new ZlibException(
String.Format(
"{0}flating: rc={1} msg={2}",
(_wantCompress ? "de" : "in"),
rc,
_z.Message
)
);
}
if (
(nomoreinput || rc == ZlibConstants.Z_STREAM_END) && (_z.AvailableBytesOut == count)
)
{
break; // nothing more to read
}
} //while (_z.AvailableBytesOut == count && rc == ZlibConstants.Z_OK);
while (_z.AvailableBytesOut > 0 && !nomoreinput && rc == ZlibConstants.Z_OK);
// workitem 8557
// is there more room in output?
if (_z.AvailableBytesOut > 0)
{
if (rc == ZlibConstants.Z_OK && _z.AvailableBytesIn == 0)
{
// deferred
}
// are we completely done reading?
if (nomoreinput)
{
// and in compression?
if (_wantCompress)
{
// no more input data available; therefore we flush to
// try to complete the read
rc = _z.Deflate(FlushType.Finish);
if (rc != ZlibConstants.Z_OK && rc != ZlibConstants.Z_STREAM_END)
{
throw new ZlibException(
String.Format("Deflating: rc={0} msg={1}", rc, _z.Message)
);
}
}
}
}
rc = (count - _z.AvailableBytesOut);
// calculate CRC after reading
if (crc != null)
{
crc.SlurpBlock(buffer, offset, rc);
}
if (rc == ZlibConstants.Z_STREAM_END && z.AvailableBytesIn != 0 && !_wantCompress)
{
//rewind the buffer
((IStreamStack)this).Rewind(z.AvailableBytesIn); //unused
z.AvailableBytesIn = 0;
}
return rc;
}
public override Int32 Read(Byte[] buffer, Int32 offset, Int32 count)
=> throw new NotSupportedException("Use ReadAsync instead.");
#else
public override Int32 Read(Byte[] buffer, Int32 offset, Int32 count)
{
// According to MS documentation, any implementation of the IO.Stream.Read function must:
@@ -677,6 +1041,7 @@ internal class ZlibBaseStream : Stream, IStreamStack
return rc;
}
#endif
public override Boolean CanRead => _stream.CanRead;

View File

@@ -1,11 +1,12 @@
#nullable disable
using System;
using System.Buffers;
using System.IO;
namespace SharpCompress.Compressors.LZMA.LZ;
internal class OutWindow
internal class OutWindow : IDisposable
{
private byte[] _buffer;
private int _windowSize;
@@ -15,19 +16,22 @@ internal class OutWindow
private int _pendingDist;
private Stream _stream;
public long _total;
public long _limit;
private long _total;
private long _limit;
public long Total => _total;
public void Create(int windowSize)
{
if (_windowSize != windowSize)
{
_buffer = new byte[windowSize];
}
else
{
_buffer[windowSize - 1] = 0;
if (_buffer is not null)
{
ArrayPool<byte>.Shared.Return(_buffer);
}
_buffer = ArrayPool<byte>.Shared.Rent(windowSize);
}
_buffer[windowSize - 1] = 0;
_windowSize = windowSize;
_pos = 0;
_streamPos = 0;
@@ -36,7 +40,22 @@ internal class OutWindow
_limit = 0;
}
public void Reset() => Create(_windowSize);
public void Dispose()
{
ReleaseStream();
if (_buffer is null)
{
return;
}
ArrayPool<byte>.Shared.Return(_buffer);
_buffer = null;
}
public void Reset()
{
ReleaseStream();
Create(_windowSize);
}
public void Init(Stream stream)
{
@@ -66,7 +85,7 @@ internal class OutWindow
_stream = null;
}
public void Flush()
private void Flush()
{
if (_stream is null)
{

View File

@@ -294,7 +294,7 @@ public class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Stream
}
else
{
_outWindow.SetLimit(long.MaxValue - _outWindow._total);
_outWindow.SetLimit(long.MaxValue - _outWindow.Total);
}
var rangeDecoder = new RangeCoder.Decoder();
@@ -305,6 +305,7 @@ public class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Stream
_outWindow.ReleaseStream();
rangeDecoder.ReleaseStream();
_outWindow.Dispose();
_outWindow = null;
}
@@ -316,7 +317,7 @@ public class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Stream
while (outWindow.HasSpace)
{
var posState = (uint)outWindow._total & _posStateMask;
var posState = (uint)outWindow.Total & _posStateMask;
if (
_isMatchDecoders[(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState]
.Decode(rangeDecoder) == 0
@@ -328,18 +329,14 @@ public class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Stream
{
b = _literalDecoder.DecodeWithMatchByte(
rangeDecoder,
(uint)outWindow._total,
(uint)outWindow.Total,
prevByte,
outWindow.GetByte((int)_rep0)
);
}
else
{
b = _literalDecoder.DecodeNormal(
rangeDecoder,
(uint)outWindow._total,
prevByte
);
b = _literalDecoder.DecodeNormal(rangeDecoder, (uint)outWindow.Total, prevByte);
}
outWindow.PutByte(b);
_state.UpdateChar();
@@ -424,7 +421,7 @@ public class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Stream
_rep0 = posSlot;
}
}
if (_rep0 >= outWindow._total || _rep0 >= dictionarySizeCheck)
if (_rep0 >= outWindow.Total || _rep0 >= dictionarySizeCheck)
{
if (_rep0 == 0xFFFFFFFF)
{

View File

@@ -178,6 +178,7 @@ public class LzmaStream : Stream, IStreamStack
_position = _encoder.Code(null, true);
}
_inputStream?.Dispose();
_outWindow.Dispose();
}
base.Dispose(disposing);
}

View File

@@ -1,12 +1,18 @@
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Compressors.Rar;
internal interface IRarUnpack
{
#if NETSTANDARD2_0 || NETFRAMEWORK
void DoUnpack(FileHeader fileHeader, Stream readStream, Stream writeStream);
void DoUnpack();
#else
ValueTask DoUnpackAsync(FileHeader fileHeader, Stream readStream, Stream writeStream);
ValueTask DoUnpackAsync();
#endif
// eg u/i pause/resume button
bool Suspended { get; set; }

View File

@@ -1,6 +1,7 @@
using System;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
@@ -9,6 +10,17 @@ namespace SharpCompress.Compressors.Rar;
internal class RarBLAKE2spStream : RarStream, IStreamStack
{
public static async ValueTask<RarBLAKE2spStream> Create(
IRarUnpack unpack,
FileHeader fileHeader,
MultiVolumeReadOnlyStream readStream
)
{
var rs = new RarBLAKE2spStream(unpack, fileHeader, readStream);
await RarStream.Initialize(rs, unpack, fileHeader, readStream);
return rs;
}
#if DEBUG_STREAMS
long IStreamStack.InstanceId { get; set; }
#endif
@@ -103,7 +115,7 @@ internal class RarBLAKE2spStream : RarStream, IStreamStack
byte[] _hash = { };
public RarBLAKE2spStream(
protected RarBLAKE2spStream(
IRarUnpack unpack,
FileHeader fileHeader,
MultiVolumeReadOnlyStream readStream

View File

@@ -1,5 +1,6 @@
using System;
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
@@ -31,7 +32,18 @@ internal class RarCrcStream : RarStream, IStreamStack
private uint currentCrc;
private readonly bool disableCRC;
public RarCrcStream(
public static async ValueTask<RarCrcStream> Create(
IRarUnpack unpack,
FileHeader fileHeader,
MultiVolumeReadOnlyStream readStream
)
{
var rs = new RarCrcStream(unpack, fileHeader, readStream);
await Initialize(rs, unpack, fileHeader, readStream);
return rs;
}
private RarCrcStream(
IRarUnpack unpack,
FileHeader fileHeader,
MultiVolumeReadOnlyStream readStream

View File

@@ -1,8 +1,8 @@
#nullable disable
using System;
using System.Buffers;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
@@ -36,18 +36,48 @@ internal class RarStream : Stream, IStreamStack
private bool fetch;
private byte[] tmpBuffer = ArrayPool<byte>.Shared.Rent(65536);
private byte[]? tmpBuffer = ArrayPool<byte>.Shared.Rent(65536);
private int tmpOffset;
private int tmpCount;
private byte[] outBuffer;
private byte[]? outBuffer;
private int outOffset;
private int outCount;
private int outTotal;
private bool isDisposed;
private long _position;
public RarStream(IRarUnpack unpack, FileHeader fileHeader, Stream readStream)
public static async ValueTask<RarStream> Create(
IRarUnpack unpack,
FileHeader fileHeader,
Stream readStream
)
{
var rs = new RarStream(unpack, fileHeader, readStream);
await Initialize(rs, unpack, fileHeader, readStream);
return rs;
}
internal static async ValueTask Initialize(
RarStream rs,
IRarUnpack unpack,
FileHeader fileHeader,
Stream readStream
)
{
rs.fetch = true;
#if !NETSTANDARD2_0 && !NETFRAMEWORK
await unpack.DoUnpackAsync(fileHeader, readStream, rs);
#else
unpack.DoUnpack(fileHeader, readStream, rs);
await Task.CompletedTask;
#endif
rs.fetch = false;
rs._position = 0;
}
protected RarStream(IRarUnpack unpack, FileHeader fileHeader, Stream readStream)
{
this.unpack = unpack;
this.fileHeader = fileHeader;
@@ -56,11 +86,6 @@ internal class RarStream : Stream, IStreamStack
#if DEBUG_STREAMS
this.DebugConstruct(typeof(RarStream));
#endif
fetch = true;
unpack.DoUnpack(fileHeader, readStream, this);
fetch = false;
_position = 0;
}
protected override void Dispose(bool disposing)
@@ -72,8 +97,11 @@ internal class RarStream : Stream, IStreamStack
#if DEBUG_STREAMS
this.DebugDispose(typeof(RarStream));
#endif
ArrayPool<byte>.Shared.Return(this.tmpBuffer);
this.tmpBuffer = null;
if (tmpBuffer != null)
{
ArrayPool<byte>.Shared.Return(this.tmpBuffer);
this.tmpBuffer = null;
}
}
isDisposed = true;
base.Dispose(disposing);
@@ -81,6 +109,26 @@ internal class RarStream : Stream, IStreamStack
}
}
#if !NETSTANDARD2_0 && !NETFRAMEWORK
public override async ValueTask DisposeAsync()
{
if (!isDisposed)
{
#if DEBUG_STREAMS
this.DebugDispose(typeof(RarStream));
#endif
if (tmpBuffer != null)
{
ArrayPool<byte>.Shared.Return(this.tmpBuffer);
this.tmpBuffer = null;
}
isDisposed = true;
await readStream.DisposeAsync().ConfigureAwait(false);
}
await base.DisposeAsync().ConfigureAwait(false);
}
#endif
public override bool CanRead => true;
public override bool CanSeek => false;
@@ -89,6 +137,8 @@ internal class RarStream : Stream, IStreamStack
public override void Flush() { }
public override Task FlushAsync(CancellationToken cancellationToken) => Task.CompletedTask;
public override long Length => fileHeader.UncompressedSize;
//commented out code always returned the length of the file
@@ -98,8 +148,102 @@ internal class RarStream : Stream, IStreamStack
set => throw new NotSupportedException();
}
#if !NETSTANDARD2_0 && !NETFRAMEWORK
public override async Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
cancellationToken.ThrowIfCancellationRequested();
if (tmpBuffer == null)
{
throw new ObjectDisposedException(nameof(RarStream));
}
outTotal = 0;
if (tmpCount > 0)
{
var toCopy = tmpCount < count ? tmpCount : count;
Buffer.BlockCopy(tmpBuffer, tmpOffset, buffer, offset, toCopy);
tmpOffset += toCopy;
tmpCount -= toCopy;
offset += toCopy;
count -= toCopy;
outTotal += toCopy;
}
if (count > 0 && unpack.DestSize > 0)
{
outBuffer = buffer;
outOffset = offset;
outCount = count;
fetch = true;
await unpack.DoUnpackAsync();
fetch = false;
}
_position += outTotal;
if (count > 0 && outTotal == 0 && _position != Length)
{
// sanity check, eg if we try to decompress a redir entry
throw new InvalidOperationException(
$"unpacked file size does not match header: expected {Length} found {_position}"
);
}
return outTotal;
}
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
outTotal = 0;
var count = buffer.Length;
if (tmpCount > 0)
{
outOffset = 0;
var toCopy = tmpCount < count ? tmpCount : count;
tmpBuffer.AsSpan(tmpOffset, toCopy).CopyTo(buffer.Span.Slice(outOffset, toCopy));
tmpOffset += toCopy;
tmpCount -= toCopy;
count -= toCopy;
outTotal += toCopy;
}
if (count > 0 && unpack.DestSize > 0)
{
outBuffer = buffer.ToArray();
outOffset = 0;
outCount = count;
fetch = true;
await unpack.DoUnpackAsync();
fetch = false;
}
_position += outTotal;
if (count > 0 && outTotal == 0 && _position != Length)
{
// sanity check, eg if we try to decompress a redir entry
throw new InvalidOperationException(
$"unpacked file size does not match header: expected {Length} found {_position}"
);
}
return outTotal;
}
public override int Read(byte[] buffer, int offset, int count) =>
throw new NotSupportedException("Use ReadAsync or ReadAsync(Memory<byte>) instead.");
#else
public override int Read(byte[] buffer, int offset, int count)
{
if (tmpBuffer == null)
{
throw new ObjectDisposedException(nameof(RarStream));
}
outTotal = 0;
if (tmpCount > 0)
{
@@ -130,6 +274,7 @@ internal class RarStream : Stream, IStreamStack
}
return outTotal;
}
#endif
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
@@ -143,6 +288,10 @@ internal class RarStream : Stream, IStreamStack
}
if (outCount > 0)
{
if (outBuffer == null)
{
throw new ObjectDisposedException(nameof(RarStream));
}
var toCopy = outCount < count ? outCount : count;
Buffer.BlockCopy(buffer, offset, outBuffer, outOffset, toCopy);
outOffset += toCopy;
@@ -153,6 +302,10 @@ internal class RarStream : Stream, IStreamStack
}
if (count > 0)
{
if (tmpBuffer == null)
{
throw new ObjectDisposedException(nameof(RarStream));
}
EnsureBufferCapacity(count);
Buffer.BlockCopy(buffer, offset, tmpBuffer, tmpCount, count);
tmpCount += count;
@@ -165,8 +318,81 @@ internal class RarStream : Stream, IStreamStack
}
}
public override Task WriteAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
Write(buffer, offset, count);
return Task.CompletedTask;
}
catch (Exception ex)
{
return Task.FromException(ex);
}
}
#if !NETSTANDARD2_0 && !NETFRAMEWORK
public override ValueTask WriteAsync(
ReadOnlyMemory<byte> buffer,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
if (!fetch)
{
throw new NotSupportedException();
}
var count = buffer.Length;
var offset = 0;
if (outCount > 0)
{
var toCopy = outCount < count ? outCount : count;
buffer.Span.Slice(offset, toCopy).CopyTo(outBuffer.AsSpan(outOffset, toCopy));
outOffset += toCopy;
outCount -= toCopy;
offset += toCopy;
count -= toCopy;
outTotal += toCopy;
}
if (count > 0)
{
EnsureBufferCapacity(count);
buffer.Span.Slice(offset, count).CopyTo(tmpBuffer.AsSpan(tmpCount, count));
tmpCount += count;
tmpOffset = 0;
unpack.Suspended = true;
}
else
{
unpack.Suspended = false;
}
return ValueTask.CompletedTask;
}
catch (Exception ex)
{
return new ValueTask(Task.FromException(ex));
}
}
#endif
private void EnsureBufferCapacity(int count)
{
if (tmpBuffer == null)
{
throw new ObjectDisposedException(nameof(RarStream));
}
if (this.tmpBuffer.Length < this.tmpCount + count)
{
var newLength =

View File

@@ -4,6 +4,7 @@ using System;
using System.Buffers;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Compressors.PPMd.H;
@@ -13,7 +14,7 @@ using SharpCompress.Compressors.Rar.VM;
namespace SharpCompress.Compressors.Rar.UnpackV1;
internal sealed partial class Unpack : BitInput, IRarUnpack, IDisposable
internal sealed partial class Unpack : BitInput, IRarUnpack
{
private readonly BitInput Inp;
private bool disposed;
@@ -22,15 +23,20 @@ internal sealed partial class Unpack : BitInput, IRarUnpack, IDisposable
// to ease in porting Unpack50.cs
Inp = this;
public void Dispose()
public override void Dispose()
{
if (!disposed)
{
base.Dispose();
if (!externalWindow)
{
ArrayPool<byte>.Shared.Return(window);
window = null;
if (window != null)
{
ArrayPool<byte>.Shared.Return(window);
window = null;
}
}
rarVM.Dispose();
disposed = true;
}
}
@@ -153,6 +159,20 @@ internal sealed partial class Unpack : BitInput, IRarUnpack, IDisposable
DoUnpack();
}
#if !NETSTANDARD2_0 && !NETFRAMEWORK
public ValueTask DoUnpackAsync()
{
DoUnpack();
return ValueTask.CompletedTask;
}
public ValueTask DoUnpackAsync(FileHeader fileHeader, Stream readStream, Stream writeStream)
{
DoUnpack(fileHeader, readStream, writeStream);
return ValueTask.CompletedTask;
}
#endif
public void DoUnpack()
{
if (fileHeader.CompressionMethod == 0)
@@ -574,104 +594,111 @@ internal sealed partial class Unpack : BitInput, IRarUnpack, IDisposable
var FilteredDataOffset = Prg.FilteredDataOffset;
var FilteredDataSize = Prg.FilteredDataSize;
var FilteredData = new byte[FilteredDataSize];
for (var i = 0; i < FilteredDataSize; i++)
var FilteredData = ArrayPool<byte>.Shared.Rent(FilteredDataSize);
try
{
FilteredData[i] = rarVM.Mem[FilteredDataOffset + i];
Array.Copy(
rarVM.Mem,
FilteredDataOffset,
FilteredData,
0,
FilteredDataSize
);
// Prg.GlobalData.get(FilteredDataOffset
// +
// i);
}
prgStack[I] = null;
while (I + 1 < prgStack.Count)
{
var NextFilter = prgStack[I + 1];
if (
NextFilter is null
|| NextFilter.BlockStart != BlockStart
|| NextFilter.BlockLength != FilteredDataSize
|| NextFilter.NextWindow
)
{
break;
}
// apply several filters to same data block
rarVM.setMemory(0, FilteredData, 0, FilteredDataSize);
// .SetMemory(0,FilteredData,FilteredDataSize);
var pPrg = filters[NextFilter.ParentFilter].Program;
var NextPrg = NextFilter.Program;
if (pPrg.GlobalData.Count > RarVM.VM_FIXEDGLOBALSIZE)
{
// copy global data from previous script execution
// if any
// NextPrg->GlobalData.Alloc(ParentPrg->GlobalData.Size());
NextPrg.GlobalData.SetSize(pPrg.GlobalData.Count);
// memcpy(&NextPrg->GlobalData[VM_FIXEDGLOBALSIZE],&ParentPrg->GlobalData[VM_FIXEDGLOBALSIZE],ParentPrg->GlobalData.Size()-VM_FIXEDGLOBALSIZE);
for (
var i = 0;
i < pPrg.GlobalData.Count - RarVM.VM_FIXEDGLOBALSIZE;
i++
)
{
NextPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i] = pPrg.GlobalData[
RarVM.VM_FIXEDGLOBALSIZE + i
];
}
}
ExecuteCode(NextPrg);
if (NextPrg.GlobalData.Count > RarVM.VM_FIXEDGLOBALSIZE)
{
// save global data for next script execution
if (pPrg.GlobalData.Count < NextPrg.GlobalData.Count)
{
pPrg.GlobalData.SetSize(NextPrg.GlobalData.Count);
}
// memcpy(&ParentPrg->GlobalData[VM_FIXEDGLOBALSIZE],&NextPrg->GlobalData[VM_FIXEDGLOBALSIZE],NextPrg->GlobalData.Size()-VM_FIXEDGLOBALSIZE);
for (
var i = 0;
i < NextPrg.GlobalData.Count - RarVM.VM_FIXEDGLOBALSIZE;
i++
)
{
pPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i] = NextPrg.GlobalData[
RarVM.VM_FIXEDGLOBALSIZE + i
];
}
}
else
{
pPrg.GlobalData.Clear();
}
FilteredDataOffset = NextPrg.FilteredDataOffset;
FilteredDataSize = NextPrg.FilteredDataSize;
FilteredData = new byte[FilteredDataSize];
for (var i = 0; i < FilteredDataSize; i++)
{
FilteredData[i] = NextPrg.GlobalData[FilteredDataOffset + i];
}
I++;
prgStack[I] = null;
while (I + 1 < prgStack.Count)
{
var NextFilter = prgStack[I + 1];
if (
NextFilter is null
|| NextFilter.BlockStart != BlockStart
|| NextFilter.BlockLength != FilteredDataSize
|| NextFilter.NextWindow
)
{
break;
}
// apply several filters to same data block
rarVM.setMemory(0, FilteredData, 0, FilteredDataSize);
// .SetMemory(0,FilteredData,FilteredDataSize);
var pPrg = filters[NextFilter.ParentFilter].Program;
var NextPrg = NextFilter.Program;
if (pPrg.GlobalData.Count > RarVM.VM_FIXEDGLOBALSIZE)
{
// copy global data from previous script execution
// if any
// NextPrg->GlobalData.Alloc(ParentPrg->GlobalData.Size());
NextPrg.GlobalData.SetSize(pPrg.GlobalData.Count);
// memcpy(&NextPrg->GlobalData[VM_FIXEDGLOBALSIZE],&ParentPrg->GlobalData[VM_FIXEDGLOBALSIZE],ParentPrg->GlobalData.Size()-VM_FIXEDGLOBALSIZE);
for (
var i = 0;
i < pPrg.GlobalData.Count - RarVM.VM_FIXEDGLOBALSIZE;
i++
)
{
NextPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i] =
pPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i];
}
}
ExecuteCode(NextPrg);
if (NextPrg.GlobalData.Count > RarVM.VM_FIXEDGLOBALSIZE)
{
// save global data for next script execution
if (pPrg.GlobalData.Count < NextPrg.GlobalData.Count)
{
pPrg.GlobalData.SetSize(NextPrg.GlobalData.Count);
}
// memcpy(&ParentPrg->GlobalData[VM_FIXEDGLOBALSIZE],&NextPrg->GlobalData[VM_FIXEDGLOBALSIZE],NextPrg->GlobalData.Size()-VM_FIXEDGLOBALSIZE);
for (
var i = 0;
i < NextPrg.GlobalData.Count - RarVM.VM_FIXEDGLOBALSIZE;
i++
)
{
pPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i] =
NextPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i];
}
}
else
{
pPrg.GlobalData.Clear();
}
FilteredDataOffset = NextPrg.FilteredDataOffset;
FilteredDataSize = NextPrg.FilteredDataSize;
if (FilteredData.Length < FilteredDataSize)
{
ArrayPool<byte>.Shared.Return(FilteredData);
FilteredData = ArrayPool<byte>.Shared.Rent(FilteredDataSize);
}
for (var i = 0; i < FilteredDataSize; i++)
{
FilteredData[i] = NextPrg.GlobalData[FilteredDataOffset + i];
}
I++;
prgStack[I] = null;
}
writeStream.Write(FilteredData, 0, FilteredDataSize);
writtenFileSize += FilteredDataSize;
destUnpSize -= FilteredDataSize;
WrittenBorder = BlockEnd;
WriteSize = (unpPtr - WrittenBorder) & PackDef.MAXWINMASK;
}
finally
{
ArrayPool<byte>.Shared.Return(FilteredData);
}
writeStream.Write(FilteredData, 0, FilteredDataSize);
unpSomeRead = true;
writtenFileSize += FilteredDataSize;
destUnpSize -= FilteredDataSize;
WrittenBorder = BlockEnd;
WriteSize = (unpPtr - WrittenBorder) & PackDef.MAXWINMASK;
}
else
{
@@ -695,15 +722,10 @@ internal sealed partial class Unpack : BitInput, IRarUnpack, IDisposable
private void UnpWriteArea(int startPtr, int endPtr)
{
if (endPtr != startPtr)
{
unpSomeRead = true;
}
if (endPtr < startPtr)
{
UnpWriteData(window, startPtr, -startPtr & PackDef.MAXWINMASK);
UnpWriteData(window, 0, endPtr);
unpAllBuf = true;
}
else
{
@@ -757,19 +779,27 @@ internal sealed partial class Unpack : BitInput, IRarUnpack, IDisposable
// System.out.println("copyString(" + length + ", " + distance + ")");
var destPtr = unpPtr - distance;
var safeZone = PackDef.MAXWINSIZE - 260;
// System.out.println(unpPtr+":"+distance);
if (destPtr >= 0 && destPtr < PackDef.MAXWINSIZE - 260 && unpPtr < PackDef.MAXWINSIZE - 260)
// Fast path: use Array.Copy for bulk operations when in safe zone
if (destPtr >= 0 && destPtr < safeZone && unpPtr < safeZone && distance >= length)
{
window[unpPtr++] = window[destPtr++];
while (--length > 0)
// Non-overlapping copy: can use Array.Copy directly
Array.Copy(window, destPtr, window, unpPtr, length);
unpPtr += length;
}
else if (destPtr >= 0 && destPtr < safeZone && unpPtr < safeZone)
{
// Overlapping copy in safe zone: use byte-by-byte to handle self-referential copies
for (int i = 0; i < length; i++)
{
window[unpPtr++] = window[destPtr++];
window[unpPtr + i] = window[destPtr + i];
}
unpPtr += length;
}
else
{
// Slow path with wraparound mask
while (length-- != 0)
{
window[unpPtr] = window[destPtr++ & PackDef.MAXWINMASK];
@@ -1028,7 +1058,7 @@ internal sealed partial class Unpack : BitInput, IRarUnpack, IDisposable
vmCode.Add((byte)(GetBits() >> 8));
AddBits(8);
}
return (AddVMCode(FirstByte, vmCode, Length));
return AddVMCode(FirstByte, vmCode);
}
private bool ReadVMCodePPM()
@@ -1073,12 +1103,12 @@ internal sealed partial class Unpack : BitInput, IRarUnpack, IDisposable
}
vmCode.Add((byte)Ch); // VMCode[I]=Ch;
}
return (AddVMCode(FirstByte, vmCode, Length));
return AddVMCode(FirstByte, vmCode);
}
private bool AddVMCode(int firstByte, List<byte> vmCode, int length)
private bool AddVMCode(int firstByte, List<byte> vmCode)
{
var Inp = new BitInput();
using var Inp = new BitInput();
Inp.InitBitInput();
// memcpy(Inp.InBuf,Code,Min(BitInput::MAX_SIZE,CodeSize));
@@ -1086,7 +1116,6 @@ internal sealed partial class Unpack : BitInput, IRarUnpack, IDisposable
{
Inp.InBuf[i] = vmCode[i];
}
rarVM.init();
int FiltPos;
if ((firstByte & 0x80) != 0)
@@ -1199,19 +1228,28 @@ internal sealed partial class Unpack : BitInput, IRarUnpack, IDisposable
{
return (false);
}
Span<byte> VMCode = stackalloc byte[VMCodeSize];
for (var I = 0; I < VMCodeSize; I++)
{
if (Inp.Overflow(3))
{
return (false);
}
VMCode[I] = (byte)(Inp.GetBits() >> 8);
Inp.AddBits(8);
}
// VM.Prepare(&VMCode[0],VMCodeSize,&Filter->Prg);
rarVM.prepare(VMCode, VMCodeSize, Filter.Program);
var VMCode = ArrayPool<byte>.Shared.Rent(VMCodeSize);
try
{
for (var I = 0; I < VMCodeSize; I++)
{
if (Inp.Overflow(3))
{
return (false);
}
VMCode[I] = (byte)(Inp.GetBits() >> 8);
Inp.AddBits(8);
}
// VM.Prepare(&VMCode[0],VMCodeSize,&Filter->Prg);
rarVM.prepare(VMCode.AsSpan(0, VMCodeSize), Filter.Program);
}
finally
{
ArrayPool<byte>.Shared.Return(VMCode);
}
}
StackFilter.Program.AltCommands = Filter.Program.Commands; // StackFilter->Prg.AltCmd=&Filter->Prg.Cmd[0];
StackFilter.Program.CommandCount = Filter.Program.CommandCount;

View File

@@ -19,14 +19,9 @@ internal partial class Unpack
private bool suspended;
internal bool unpAllBuf;
//private ComprDataIO unpIO;
private Stream readStream;
private Stream writeStream;
internal bool unpSomeRead;
private int readTop;
private long destUnpSize;
@@ -808,15 +803,10 @@ internal partial class Unpack
private void oldUnpWriteBuf()
{
if (unpPtr != wrPtr)
{
unpSomeRead = true;
}
if (unpPtr < wrPtr)
{
writeStream.Write(window, wrPtr, -wrPtr & PackDef.MAXWINMASK);
writeStream.Write(window, 0, unpPtr);
unpAllBuf = true;
}
else
{

View File

@@ -1,4 +1,5 @@
using System;
using System.Runtime.CompilerServices;
using SharpCompress.Compressors.Rar.VM;
namespace SharpCompress.Compressors.Rar.UnpackV1;
@@ -9,167 +10,15 @@ internal static class UnpackUtility
internal static uint DecodeNumber(this BitInput input, Decode.Decode dec) =>
(uint)input.decodeNumber(dec);
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static int decodeNumber(this BitInput input, Decode.Decode dec)
{
int bits;
long bitField = input.GetBits() & 0xfffe;
// if (bitField < dec.getDecodeLen()[8]) {
// if (bitField < dec.getDecodeLen()[4]) {
// if (bitField < dec.getDecodeLen()[2]) {
// if (bitField < dec.getDecodeLen()[1]) {
// bits = 1;
// } else {
// bits = 2;
// }
// } else {
// if (bitField < dec.getDecodeLen()[3]) {
// bits = 3;
// } else {
// bits = 4;
// }
// }
// } else {
// if (bitField < dec.getDecodeLen()[6]) {
// if (bitField < dec.getDecodeLen()[5])
// bits = 5;
// else
// bits = 6;
// } else {
// if (bitField < dec.getDecodeLen()[7]) {
// bits = 7;
// } else {
// bits = 8;
// }
// }
// }
// } else {
// if (bitField < dec.getDecodeLen()[12]) {
// if (bitField < dec.getDecodeLen()[10])
// if (bitField < dec.getDecodeLen()[9])
// bits = 9;
// else
// bits = 10;
// else if (bitField < dec.getDecodeLen()[11])
// bits = 11;
// else
// bits = 12;
// } else {
// if (bitField < dec.getDecodeLen()[14]) {
// if (bitField < dec.getDecodeLen()[13]) {
// bits = 13;
// } else {
// bits = 14;
// }
// } else {
// bits = 15;
// }
// }
// }
// addbits(bits);
// int N = dec.getDecodePos()[bits]
// + (((int) bitField - dec.getDecodeLen()[bits - 1]) >>> (16 - bits));
// if (N >= dec.getMaxNum()) {
// N = 0;
// }
// return (dec.getDecodeNum()[N]);
var decodeLen = dec.DecodeLen;
if (bitField < decodeLen[8])
{
if (bitField < decodeLen[4])
{
if (bitField < decodeLen[2])
{
if (bitField < decodeLen[1])
{
bits = 1;
}
else
{
bits = 2;
}
}
else
{
if (bitField < decodeLen[3])
{
bits = 3;
}
else
{
bits = 4;
}
}
}
else
{
if (bitField < decodeLen[6])
{
if (bitField < decodeLen[5])
{
bits = 5;
}
else
{
bits = 6;
}
}
else
{
if (bitField < decodeLen[7])
{
bits = 7;
}
else
{
bits = 8;
}
}
}
}
else
{
if (bitField < decodeLen[12])
{
if (bitField < decodeLen[10])
{
if (bitField < decodeLen[9])
{
bits = 9;
}
else
{
bits = 10;
}
}
else if (bitField < decodeLen[11])
{
bits = 11;
}
else
{
bits = 12;
}
}
else
{
if (bitField < decodeLen[14])
{
if (bitField < decodeLen[13])
{
bits = 13;
}
else
{
bits = 14;
}
}
else
{
bits = 15;
}
}
}
// Binary search to find the bit length - faster than nested ifs
int bits = FindDecodeBits(bitField, decodeLen);
input.AddBits(bits);
var N =
dec.DecodePos[bits]
@@ -181,6 +30,52 @@ internal static class UnpackUtility
return (dec.DecodeNum[N]);
}
/// <summary>
/// Fast binary search to find which bit length matches the bitField.
/// Optimized with cached array access to minimize memory lookups.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static int FindDecodeBits(long bitField, int[] decodeLen)
{
// Cache critical values to reduce array access overhead
long len4 = decodeLen[4];
long len8 = decodeLen[8];
long len12 = decodeLen[12];
if (bitField < len8)
{
if (bitField < len4)
{
long len2 = decodeLen[2];
if (bitField < len2)
{
return bitField < decodeLen[1] ? 1 : 2;
}
return bitField < decodeLen[3] ? 3 : 4;
}
long len6 = decodeLen[6];
if (bitField < len6)
{
return bitField < decodeLen[5] ? 5 : 6;
}
return bitField < decodeLen[7] ? 7 : 8;
}
if (bitField < len12)
{
long len10 = decodeLen[10];
if (bitField < len10)
{
return bitField < decodeLen[9] ? 9 : 10;
}
return bitField < decodeLen[11] ? 11 : 12;
}
long len14 = decodeLen[14];
return bitField < len14 ? (bitField < decodeLen[13] ? 13 : 14) : 15;
}
internal static void makeDecodeTables(
Span<byte> lenTab,
int offset,
@@ -194,8 +89,7 @@ internal static class UnpackUtility
long M,
N;
new Span<int>(dec.DecodeNum).Clear(); // memset(Dec->DecodeNum,0,Size*sizeof(*Dec->DecodeNum));
new Span<int>(dec.DecodeNum).Clear();
for (i = 0; i < size; i++)
{
lenCount[lenTab[offset + i] & 0xF]++;

View File

@@ -1,3 +1,4 @@
#if NETSTANDARD2_0 || NETFRAMEWORK
using System;
using System.IO;
using SharpCompress.Common.Rar.Headers;
@@ -106,3 +107,4 @@ internal partial class Unpack : IRarUnpack
public static byte[] EnsureCapacity(byte[] array, int length) =>
array.Length < length ? new byte[length] : array;
}
#endif

View File

@@ -413,7 +413,7 @@ internal partial class Unpack
else
//x memcpy(Mem,Window+BlockStart,BlockLength);
{
Utility.Copy(Window, BlockStart, Mem, 0, BlockLength);
Buffer.BlockCopy(Window, (int)BlockStart, Mem, 0, (int)BlockLength);
}
}
else
@@ -427,9 +427,21 @@ internal partial class Unpack
else
{
//x memcpy(Mem,Window+BlockStart,FirstPartLength);
Utility.Copy(Window, BlockStart, Mem, 0, FirstPartLength);
Buffer.BlockCopy(
Window,
(int)BlockStart,
Mem,
0,
(int)FirstPartLength
);
//x memcpy(Mem+FirstPartLength,Window,BlockEnd);
Utility.Copy(Window, 0, Mem, FirstPartLength, BlockEnd);
Buffer.BlockCopy(
Window,
0,
Mem,
(int)FirstPartLength,
(int)BlockEnd
);
}
}

View File

@@ -1,3 +1,4 @@
#if NETSTANDARD2_0 || NETFRAMEWORK
#nullable disable
using System;
@@ -29,12 +30,12 @@ internal sealed partial class Unpack : BitInput
Suspended = false;
UnpAllBuf = false;
UnpSomeRead = false;
/*#if RarV2017_RAR_SMP
MaxUserThreads = 1;
UnpThreadPool = CreateThreadPool();
ReadBufMT = null;
UnpThreadData = null;
#endif*/
// #if RarV2017_RAR_SMP
// MaxUserThreads = 1;
// UnpThreadPool = CreateThreadPool();
// ReadBufMT = null;
// UnpThreadData = null;
// #endif
MaxWinSize = 0;
MaxWinMask = 0;
@@ -197,21 +198,21 @@ internal sealed partial class Unpack : BitInput
break;
#endif
case 50: // RAR 5.0 compression algorithm.
/*#if RarV2017_RAR_SMP
if (MaxUserThreads > 1)
{
// We do not use the multithreaded unpack routine to repack RAR archives
// in 'suspended' mode, because unlike the single threaded code it can
// write more than one dictionary for same loop pass. So we would need
// larger buffers of unknown size. Also we do not support multithreading
// in fragmented window mode.
if (!Fragmented)
{
Unpack5MT(Solid);
break;
}
}
#endif*/
// #if RarV2017_RAR_SMP
// if (MaxUserThreads > 1)
// {
// // We do not use the multithreaded unpack routine to repack RAR archives
// // in 'suspended' mode, because unlike the single threaded code it can
// // write more than one dictionary for same loop pass. So we would need
// // larger buffers of unknown size. Also we do not support multithreading
// // in fragmented window mode.
// if (!Fragmented)
// {
// Unpack5MT(Solid);
// break;
// }
// }
// #endif
Unpack5(Solid);
break;
#if !Rar2017_NOSTRICT
@@ -407,3 +408,4 @@ internal sealed partial class Unpack : BitInput
}
}
}
#endif

View File

@@ -0,0 +1,411 @@
#if !NETSTANDARD2_0 && !NETFRAMEWORK
#nullable disable
using System;
using SharpCompress.Common;
using static SharpCompress.Compressors.Rar.UnpackV2017.PackDef;
using static SharpCompress.Compressors.Rar.UnpackV2017.UnpackGlobal;
#if !Rar2017_64bit
using size_t = System.UInt32;
#else
using nint = System.Int64;
using nuint = System.UInt64;
using size_t = System.UInt64;
#endif
namespace SharpCompress.Compressors.Rar.UnpackV2017;
internal sealed partial class Unpack : BitInput
{
public Unpack( /* ComprDataIO *DataIO */
)
//:Inp(true),VMCodeInp(true)
: base(true)
{
_UnpackCtor();
//UnpIO=DataIO;
Window = null;
Fragmented = false;
Suspended = false;
UnpAllBuf = false;
UnpSomeRead = false;
// #if RarV2017_RAR_SMP
// MaxUserThreads = 1;
// UnpThreadPool = CreateThreadPool();
// ReadBufMT = null;
// UnpThreadData = null;
// #endif
MaxWinSize = 0;
MaxWinMask = 0;
// Perform initialization, which should be done only once for all files.
// It prevents crash if first DoUnpack call is later made with wrong
// (true) 'Solid' value.
UnpInitData(false);
#if !RarV2017_SFX_MODULE
// RAR 1.5 decompression initialization
UnpInitData15(false);
InitHuff();
#endif
}
// later: may need Dispose() if we support thread pool
//Unpack::~Unpack()
//{
// InitFilters30(false);
//
// if (Window!=null)
// free(Window);
//#if RarV2017_RAR_SMP
// DestroyThreadPool(UnpThreadPool);
// delete[] ReadBufMT;
// delete[] UnpThreadData;
//#endif
//}
private void Init(size_t WinSize, bool Solid)
{
// If 32-bit RAR unpacks an archive with 4 GB dictionary, the window size
// will be 0 because of size_t overflow. Let's issue the memory error.
if (WinSize == 0)
//ErrHandler.MemoryError();
{
throw new InvalidFormatException(
"invalid window size (possibly due to a rar file with a 4GB being unpacked on a 32-bit platform)"
);
}
// Minimum window size must be at least twice more than maximum possible
// size of filter block, which is 0x10000 in RAR now. If window size is
// smaller, we can have a block with never cleared flt->NextWindow flag
// in UnpWriteBuf(). Minimum window size 0x20000 would be enough, but let's
// use 0x40000 for extra safety and possible filter area size expansion.
const size_t MinAllocSize = 0x40000;
if (WinSize < MinAllocSize)
{
WinSize = MinAllocSize;
}
if (WinSize <= MaxWinSize) // Use the already allocated window.
{
return;
}
if ((WinSize >> 16) > 0x10000) // Window size must not exceed 4 GB.
{
return;
}
// Archiving code guarantees that window size does not grow in the same
// solid stream. So if we are here, we are either creating a new window
// or increasing the size of non-solid window. So we could safely reject
// current window data without copying them to a new window, though being
// extra cautious, we still handle the solid window grow case below.
var Grow = Solid && (Window != null || Fragmented);
// We do not handle growth for existing fragmented window.
if (Grow && Fragmented)
//throw std::bad_alloc();
{
throw new InvalidFormatException("Grow && Fragmented");
}
var NewWindow = Fragmented ? null : new byte[WinSize];
if (NewWindow == null)
{
if (Grow || WinSize < 0x1000000)
{
// We do not support growth for new fragmented window.
// Also exclude RAR4 and small dictionaries.
//throw std::bad_alloc();
throw new InvalidFormatException("Grow || WinSize<0x1000000");
}
else
{
if (Window != null) // If allocated by preceding files.
{
//free(Window);
Window = null;
}
FragWindow.Init(WinSize);
Fragmented = true;
}
}
if (!Fragmented)
{
// Clean the window to generate the same output when unpacking corrupt
// RAR files, which may access unused areas of sliding dictionary.
// sharpcompress: don't need this, freshly allocated above
//memset(NewWindow,0,WinSize);
// If Window is not NULL, it means that window size has grown.
// In solid streams we need to copy data to a new window in such case.
// RAR archiving code does not allow it in solid streams now,
// but let's implement it anyway just in case we'll change it sometimes.
if (Grow)
{
for (size_t I = 1; I <= MaxWinSize; I++)
{
NewWindow[(UnpPtr - I) & (WinSize - 1)] = Window[
(UnpPtr - I) & (MaxWinSize - 1)
];
}
}
//if (Window!=null)
// free(Window);
Window = NewWindow;
}
MaxWinSize = WinSize;
MaxWinMask = MaxWinSize - 1;
}
private void DoUnpack(uint Method, bool Solid)
{
// Methods <50 will crash in Fragmented mode when accessing NULL Window.
// They cannot be called in such mode now, but we check it below anyway
// just for extra safety.
switch (Method)
{
#if !RarV2017_SFX_MODULE
case 15: // rar 1.5 compression
if (!Fragmented)
{
Unpack15(Solid);
}
break;
case 20: // rar 2.x compression
case 26: // files larger than 2GB
if (!Fragmented)
{
Unpack20(Solid);
}
break;
#endif
#if !RarV2017_RAR5ONLY
case 29: // rar 3.x compression
if (!Fragmented)
{
throw new NotImplementedException();
}
break;
#endif
case 50: // RAR 5.0 compression algorithm.
// #if RarV2017_RAR_SMP
// if (MaxUserThreads > 1)
// {
// // We do not use the multithreaded unpack routine to repack RAR archives
// // in 'suspended' mode, because unlike the single threaded code it can
// // write more than one dictionary for same loop pass. So we would need
// // larger buffers of unknown size. Also we do not support multithreading
// // in fragmented window mode.
// if (!Fragmented)
// {
// Unpack5MT(Solid);
// break;
// }
// }
// #endif
Unpack5(Solid);
break;
#if !Rar2017_NOSTRICT
default:
throw new InvalidFormatException("unknown compression method " + Method);
#endif
}
}
private void UnpInitData(bool Solid)
{
if (!Solid)
{
new Span<uint>(OldDist).Clear();
OldDistPtr = 0;
LastDist = LastLength = 0;
// memset(Window,0,MaxWinSize);
//memset(&BlockTables,0,sizeof(BlockTables));
BlockTables = new UnpackBlockTables();
// sharpcompress: no default ctor for struct
BlockTables.Init();
UnpPtr = WrPtr = 0;
WriteBorder = Math.Min(MaxWinSize, UNPACK_MAX_WRITE) & MaxWinMask;
}
// Filters never share several solid files, so we can safely reset them
// even in solid archive.
InitFilters();
Inp.InitBitInput();
WrittenFileSize = 0;
ReadTop = 0;
ReadBorder = 0;
//memset(&BlockHeader,0,sizeof(BlockHeader));
BlockHeader = new UnpackBlockHeader();
BlockHeader.BlockSize = -1; // '-1' means not defined yet.
#if !RarV2017_SFX_MODULE
UnpInitData20(Solid);
#endif
//UnpInitData30(Solid);
UnpInitData50(Solid);
}
// LengthTable contains the length in bits for every element of alphabet.
// Dec is the structure to decode Huffman code/
// Size is size of length table and DecodeNum field in Dec structure,
private void MakeDecodeTables(Span<byte> LengthTable, int offset, DecodeTable Dec, uint Size)
{
// Size of alphabet and DecodePos array.
Dec.MaxNum = Size;
// Calculate how many entries for every bit length in LengthTable we have.
var LengthCount = new uint[16];
//memset(LengthCount,0,sizeof(LengthCount));
for (size_t I = 0; I < Size; I++)
{
LengthCount[LengthTable[checked((int)(offset + I))] & 0xf]++;
}
// We must not calculate the number of zero length codes.
LengthCount[0] = 0;
// Set the entire DecodeNum to zero.
//memset(Dec->DecodeNum,0,Size*sizeof(*Dec->DecodeNum));
new Span<ushort>(Dec.DecodeNum).Clear();
// Initialize not really used entry for zero length code.
Dec.DecodePos[0] = 0;
// Start code for bit length 1 is 0.
Dec.DecodeLen[0] = 0;
// Right aligned upper limit code for current bit length.
uint UpperLimit = 0;
for (var I = 1; I < 16; I++)
{
// Adjust the upper limit code.
UpperLimit += LengthCount[I];
// Left aligned upper limit code.
var LeftAligned = UpperLimit << (16 - I);
// Prepare the upper limit code for next bit length.
UpperLimit *= 2;
// Store the left aligned upper limit code.
Dec.DecodeLen[I] = LeftAligned;
// Every item of this array contains the sum of all preceding items.
// So it contains the start position in code list for every bit length.
Dec.DecodePos[I] = Dec.DecodePos[I - 1] + LengthCount[I - 1];
}
// Prepare the copy of DecodePos. We'll modify this copy below,
// so we cannot use the original DecodePos.
var CopyDecodePos = new uint[Dec.DecodePos.Length];
//memcpy(CopyDecodePos,Dec->DecodePos,sizeof(CopyDecodePos));
Array.Copy(Dec.DecodePos, CopyDecodePos, CopyDecodePos.Length);
// For every bit length in the bit length table and so for every item
// of alphabet.
for (uint I = 0; I < Size; I++)
{
// Get the current bit length.
var _CurBitLength = (byte)(LengthTable[checked((int)(offset + I))] & 0xf);
if (_CurBitLength != 0)
{
// Last position in code list for current bit length.
var LastPos = CopyDecodePos[_CurBitLength];
// Prepare the decode table, so this position in code list will be
// decoded to current alphabet item number.
Dec.DecodeNum[LastPos] = (ushort)I;
// We'll use next position number for this bit length next time.
// So we pass through the entire range of positions available
// for every bit length.
CopyDecodePos[_CurBitLength]++;
}
}
// Define the number of bits to process in quick mode. We use more bits
// for larger alphabets. More bits means that more codes will be processed
// in quick mode, but also that more time will be spent to preparation
// of tables for quick decode.
switch (Size)
{
case NC:
case NC20:
case NC30:
Dec.QuickBits = MAX_QUICK_DECODE_BITS;
break;
default:
Dec.QuickBits = MAX_QUICK_DECODE_BITS - 3;
break;
}
// Size of tables for quick mode.
var QuickDataSize = 1U << (int)Dec.QuickBits;
// Bit length for current code, start from 1 bit codes. It is important
// to use 1 bit instead of 0 for minimum code length, so we are moving
// forward even when processing a corrupt archive.
//uint CurBitLength=1;
byte CurBitLength = 1;
// For every right aligned bit string which supports the quick decoding.
for (uint Code = 0; Code < QuickDataSize; Code++)
{
// Left align the current code, so it will be in usual bit field format.
var BitField = Code << (int)(16 - Dec.QuickBits);
// Prepare the table for quick decoding of bit lengths.
// Find the upper limit for current bit field and adjust the bit length
// accordingly if necessary.
while (CurBitLength < Dec.DecodeLen.Length && BitField >= Dec.DecodeLen[CurBitLength])
{
CurBitLength++;
}
// Translation of right aligned bit string to bit length.
Dec.QuickLen[Code] = CurBitLength;
// Prepare the table for quick translation of position in code list
// to position in alphabet.
// Calculate the distance from the start code for current bit length.
var Dist = BitField - Dec.DecodeLen[CurBitLength - 1];
// Right align the distance.
Dist >>= (16 - CurBitLength);
// Now we can calculate the position in the code list. It is the sum
// of first position for current bit length and right aligned distance
// between our bit field and start code for current bit length.
uint Pos;
if (
CurBitLength < Dec.DecodePos.Length
&& (Pos = Dec.DecodePos[CurBitLength] + Dist) < Size
)
{
// Define the code to alphabet number translation.
Dec.QuickNum[Code] = Dec.DecodeNum[Pos];
}
else
{
// Can be here for length table filled with zeroes only (empty).
Dec.QuickNum[Code] = 0;
}
}
}
}
#endif

View File

@@ -0,0 +1,114 @@
#if !NETSTANDARD2_0 && !NETFRAMEWORK
using System;
using System.Buffers;
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common.Rar.Headers;
#if !Rar2017_64bit
using size_t = System.UInt32;
#else
using nint = System.Int64;
using nuint = System.UInt64;
using size_t = System.UInt64;
#endif
namespace SharpCompress.Compressors.Rar.UnpackV2017;
internal partial class Unpack : IRarUnpack
{
private FileHeader fileHeader;
private Stream readStream;
private Stream writeStream;
private void _UnpackCtor()
{
for (var i = 0; i < AudV.Length; i++)
{
AudV[i] = new AudioVariables();
}
}
private int UnpIO_UnpRead(byte[] buf, int offset, int count) =>
// NOTE: caller has logic to check for -1 for error we throw instead.
readStream.Read(buf, offset, count);
private void UnpIO_UnpWrite(byte[] buf, size_t offset, uint count) =>
writeStream.Write(buf, checked((int)offset), checked((int)count));
public ValueTask DoUnpackAsync(FileHeader fileHeader, Stream readStream, Stream writeStream)
{
// as of 12/2017 .NET limits array indexing to using a signed integer
// MaxWinSize causes unpack to use a fragmented window when the file
// window size exceeds MaxWinSize
// uggh, that's not how this variable is used, it's the size of the currently allocated window buffer
//x MaxWinSize = ((uint)int.MaxValue) + 1;
// may be long.MaxValue which could indicate unknown size (not present in header)
DestUnpSize = fileHeader.UncompressedSize;
this.fileHeader = fileHeader;
this.readStream = readStream;
this.writeStream = writeStream;
if (!fileHeader.IsStored)
{
Init(fileHeader.WindowSize, fileHeader.IsSolid);
}
Suspended = false;
return DoUnpackAsync();
}
public ValueTask DoUnpackAsync()
{
if (fileHeader.IsStored)
{
return UnstoreFileAsync();
}
else
{
DoUnpack(fileHeader.CompressionAlgorithm, fileHeader.IsSolid);
return new ValueTask();
}
}
private async ValueTask UnstoreFileAsync()
{
var length = (int)Math.Min(0x10000, DestUnpSize);
using var buffer = MemoryPool<byte>.Shared.Rent(length);
do
{
var n = await readStream.ReadAsync(buffer.Memory);
if (n == 0)
{
break;
}
await writeStream.WriteAsync(buffer.Memory.Slice(0, n));
DestUnpSize -= n;
} while (!Suspended);
}
public bool Suspended { get; set; }
public long DestSize => DestUnpSize;
public int Char
{
get
{
// TODO: coderb: not sure where the "MAXSIZE-30" comes from, ported from V1 code
if (InAddr > MAX_SIZE - 30)
{
UnpReadBuf();
}
return InBuf[InAddr++];
}
}
public int PpmEscChar
{
get => PPMEscChar;
set => PPMEscChar = value;
}
public static byte[] EnsureCapacity(byte[] array, int length) =>
array.Length < length ? new byte[length] : array;
}
#endif

View File

@@ -1,6 +1,9 @@
using System;
using System.Buffers;
namespace SharpCompress.Compressors.Rar.VM;
internal class BitInput
internal class BitInput : IDisposable
{
/// <summary> the max size of the input</summary>
internal const int MAX_SIZE = 0x8000;
@@ -20,9 +23,11 @@ internal class BitInput
set => inBit = value;
}
public bool ExternalBuffer;
private byte[] _privateBuffer = ArrayPool<byte>.Shared.Rent(MAX_SIZE);
private bool _disposed;
/// <summary> </summary>
internal BitInput() => InBuf = new byte[MAX_SIZE];
internal BitInput() => InBuf = _privateBuffer;
internal byte[] InBuf { get; }
@@ -87,4 +92,14 @@ internal class BitInput
/// <returns> true if an Oververflow would occur
/// </returns>
internal bool Overflow(int IncPtr) => (inAddr + IncPtr >= MAX_SIZE);
public virtual void Dispose()
{
if (_disposed)
{
return;
}
ArrayPool<byte>.Shared.Return(_privateBuffer);
_disposed = true;
}
}

View File

@@ -1,6 +1,5 @@
#nullable disable
using System;
using System.Buffers;
using System.Buffers.Binary;
using System.Collections.Generic;
@@ -16,7 +15,9 @@ internal sealed class RarVM : BitInput
// Mem.set_Renamed(offset + 3, Byte.valueOf((sbyte) ((Utility.URShift(value_Renamed, 24)) & 0xff)));
//}
internal byte[] Mem { get; private set; }
internal byte[] Mem => _memory.NotNull();
private byte[]? _memory = ArrayPool<byte>.Shared.Rent(VM_MEMSIZE + 4);
public const int VM_MEMSIZE = 0x40000;
@@ -40,11 +41,18 @@ internal sealed class RarVM : BitInput
private int IP;
internal RarVM() =>
//InitBlock();
Mem = null;
internal RarVM() { }
internal void init() => Mem ??= new byte[VM_MEMSIZE + 4];
public override void Dispose()
{
base.Dispose();
if (_memory is null)
{
return;
}
ArrayPool<byte>.Shared.Return(_memory);
_memory = null;
}
private bool IsVMMem(byte[] mem) => Mem == mem;
@@ -776,9 +784,10 @@ internal sealed class RarVM : BitInput
}
}
public void prepare(ReadOnlySpan<byte> code, int codeSize, VMPreparedProgram prg)
public void prepare(ReadOnlySpan<byte> code, VMPreparedProgram prg)
{
InitBitInput();
var codeSize = code.Length;
var cpLength = Math.Min(MAX_SIZE, codeSize);
// memcpy(inBuf,Code,Min(CodeSize,BitInput::MAX_SIZE));
@@ -795,7 +804,7 @@ internal sealed class RarVM : BitInput
prg.CommandCount = 0;
if (xorSum == code[0])
{
var filterType = IsStandardFilter(code, codeSize);
var filterType = IsStandardFilter(code);
if (filterType != VMStandardFilters.VMSF_NONE)
{
var curCmd = new VMPreparedCommand();
@@ -1105,7 +1114,7 @@ internal sealed class RarVM : BitInput
}
}
private VMStandardFilters IsStandardFilter(ReadOnlySpan<byte> code, int codeSize)
private VMStandardFilters IsStandardFilter(ReadOnlySpan<byte> code)
{
VMStandardFilterSignature[] stdList =
{
@@ -1130,6 +1139,7 @@ internal sealed class RarVM : BitInput
private void ExecuteStandardFilter(VMStandardFilters filterType)
{
var mem = Mem;
switch (filterType)
{
case VMStandardFilters.VMSF_E8:
@@ -1148,7 +1158,7 @@ internal sealed class RarVM : BitInput
);
for (var curPos = 0; curPos < dataSize - 4; )
{
var curByte = Mem[curPos++];
var curByte = mem[curPos++];
if (curByte == 0xe8 || curByte == cmpByte2)
{
// #ifdef PRESENT_INT32
@@ -1164,19 +1174,19 @@ internal sealed class RarVM : BitInput
// SET_VALUE(false,Data,Addr-Offset);
// #else
var offset = curPos + fileOffset;
long Addr = GetValue(false, Mem, curPos);
long Addr = GetValue(false, mem, curPos);
if ((Addr & unchecked((int)0x80000000)) != 0)
{
if (((Addr + offset) & unchecked((int)0x80000000)) == 0)
{
SetValue(false, Mem, curPos, (int)Addr + fileSize);
SetValue(false, mem, curPos, (int)Addr + fileSize);
}
}
else
{
if (((Addr - fileSize) & unchecked((int)0x80000000)) != 0)
{
SetValue(false, Mem, curPos, (int)(Addr - offset));
SetValue(false, mem, curPos, (int)(Addr - offset));
}
}
@@ -1204,7 +1214,7 @@ internal sealed class RarVM : BitInput
while (curPos < dataSize - 21)
{
var Byte = (Mem[curPos] & 0x1f) - 0x10;
var Byte = (mem[curPos] & 0x1f) - 0x10;
if (Byte >= 0)
{
var cmdMask = Masks[Byte];
@@ -1250,7 +1260,7 @@ internal sealed class RarVM : BitInput
var channels = R[0] & unchecked((int)0xFFffFFff);
var srcPos = 0;
var border = (dataSize * 2) & unchecked((int)0xFFffFFff);
SetValue(false, Mem, VM_GLOBALMEMADDR + 0x20, dataSize);
SetValue(false, mem, VM_GLOBALMEMADDR + 0x20, dataSize);
if (dataSize >= VM_GLOBALMEMADDR / 2)
{
break;
@@ -1268,7 +1278,7 @@ internal sealed class RarVM : BitInput
destPos += channels
)
{
Mem[destPos] = (PrevByte = (byte)(PrevByte - Mem[srcPos++]));
mem[destPos] = (PrevByte = (byte)(PrevByte - mem[srcPos++]));
}
}
}
@@ -1283,7 +1293,7 @@ internal sealed class RarVM : BitInput
var channels = 3;
var srcPos = 0;
var destDataPos = dataSize;
SetValue(false, Mem, VM_GLOBALMEMADDR + 0x20, dataSize);
SetValue(false, mem, VM_GLOBALMEMADDR + 0x20, dataSize);
if (dataSize >= VM_GLOBALMEMADDR / 2 || posR < 0)
{
break;
@@ -1299,8 +1309,8 @@ internal sealed class RarVM : BitInput
if (upperPos >= 3)
{
var upperDataPos = destDataPos + upperPos;
var upperByte = Mem[upperDataPos] & 0xff;
var upperLeftByte = Mem[upperDataPos - 3] & 0xff;
var upperByte = mem[upperDataPos] & 0xff;
var upperLeftByte = mem[upperDataPos - 3] & 0xff;
predicted = prevByte + upperByte - upperLeftByte;
var pa = Math.Abs((int)(predicted - prevByte));
var pb = Math.Abs((int)(predicted - upperByte));
@@ -1326,15 +1336,15 @@ internal sealed class RarVM : BitInput
predicted = prevByte;
}
prevByte = ((predicted - Mem[srcPos++]) & 0xff) & 0xff;
Mem[destDataPos + i] = (byte)(prevByte & 0xff);
prevByte = ((predicted - mem[srcPos++]) & 0xff) & 0xff;
mem[destDataPos + i] = (byte)(prevByte & 0xff);
}
}
for (int i = posR, border = dataSize - 2; i < border; i += 3)
{
var G = Mem[destDataPos + i + 1];
Mem[destDataPos + i] = (byte)(Mem[destDataPos + i] + G);
Mem[destDataPos + i + 2] = (byte)(Mem[destDataPos + i + 2] + G);
var G = mem[destDataPos + i + 1];
mem[destDataPos + i] = (byte)(mem[destDataPos + i] + G);
mem[destDataPos + i + 2] = (byte)(mem[destDataPos + i + 2] + G);
}
}
break;
@@ -1347,7 +1357,7 @@ internal sealed class RarVM : BitInput
var destDataPos = dataSize;
//byte *SrcData=Mem,*DestData=SrcData+DataSize;
SetValue(false, Mem, VM_GLOBALMEMADDR + 0x20, dataSize);
SetValue(false, mem, VM_GLOBALMEMADDR + 0x20, dataSize);
if (dataSize >= VM_GLOBALMEMADDR / 2)
{
break;
@@ -1377,10 +1387,10 @@ internal sealed class RarVM : BitInput
var predicted = (8 * prevByte) + (K1 * D1) + (K2 * D2) + (K3 * D3);
predicted = Utility.URShift(predicted, 3) & 0xff;
long curByte = Mem[srcPos++];
long curByte = mem[srcPos++];
predicted -= curByte;
Mem[destDataPos + i] = (byte)predicted;
mem[destDataPos + i] = (byte)predicted;
prevDelta = (byte)(predicted - prevByte);
//fix java byte
@@ -1480,15 +1490,15 @@ internal sealed class RarVM : BitInput
}
while (srcPos < dataSize)
{
var curByte = Mem[srcPos++];
if (curByte == 2 && (curByte = Mem[srcPos++]) != 2)
var curByte = mem[srcPos++];
if (curByte == 2 && (curByte = mem[srcPos++]) != 2)
{
curByte = (byte)(curByte - 32);
}
Mem[destPos++] = curByte;
mem[destPos++] = curByte;
}
SetValue(false, Mem, VM_GLOBALMEMADDR + 0x1c, destPos - dataSize);
SetValue(false, Mem, VM_GLOBALMEMADDR + 0x20, dataSize);
SetValue(false, mem, VM_GLOBALMEMADDR + 0x1c, destPos - dataSize);
SetValue(false, mem, VM_GLOBALMEMADDR + 0x20, dataSize);
}
break;
}
@@ -1528,15 +1538,14 @@ internal sealed class RarVM : BitInput
{
if (pos < VM_MEMSIZE)
{
//&& data!=Mem+Pos)
//memmove(Mem+Pos,Data,Min(DataSize,VM_MEMSIZE-Pos));
for (var i = 0; i < Math.Min(data.Length - offset, dataSize); i++)
// Use Array.Copy for fast bulk memory operations instead of byte-by-byte loop
// Calculate how much data can actually fit in VM memory
int copyLength = Math.Min(dataSize, VM_MEMSIZE - pos);
copyLength = Math.Min(copyLength, data.Length - offset);
if (copyLength > 0)
{
if ((VM_MEMSIZE - pos) < i)
{
break;
}
Mem[pos + i] = data[offset + i];
Array.Copy(data, offset, Mem, pos, copyLength);
}
}
}

View File

@@ -22,9 +22,7 @@ public class XZFooter
public static XZFooter FromStream(Stream stream)
{
var footer = new XZFooter(
new BinaryReader(SharpCompressStream.Create(stream, leaveOpen: true), Encoding.UTF8)
);
var footer = new XZFooter(new BinaryReader(stream, Encoding.UTF8, true));
footer.Process();
return footer;
}

View File

@@ -18,9 +18,7 @@ public class XZHeader
public static XZHeader FromStream(Stream stream)
{
var header = new XZHeader(
new BinaryReader(SharpCompressStream.Create(stream, leaveOpen: true), Encoding.UTF8)
);
var header = new XZHeader(new BinaryReader(stream, Encoding.UTF8, true));
header.Process();
return header;
}

View File

@@ -32,7 +32,7 @@ public class XZIndex
public static XZIndex FromStream(Stream stream, bool indexMarkerAlreadyVerified)
{
var index = new XZIndex(
new BinaryReader(SharpCompressStream.Create(stream, leaveOpen: true), Encoding.UTF8),
new BinaryReader(stream, Encoding.UTF8, true),
indexMarkerAlreadyVerified
);
index.Process();

View File

@@ -34,7 +34,7 @@ public abstract class Factory : IFactory
/// <exception cref="ArgumentNullException"><paramref name="factory"/> must not be null.</exception>
public static void RegisterFactory(Factory factory)
{
factory.CheckNotNull(nameof(factory));
factory.NotNull(nameof(factory));
_factories.Add(factory);
}

View File

@@ -1,4 +1,7 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.IO;
@@ -66,6 +69,15 @@ internal class ListeningStream : Stream, IStreamStack
get => Stream.Position;
set => Stream.Position = value;
}
#if !NETSTANDARD2_0 && !NETFRAMEWORK
public override async ValueTask<int> ReadAsync(Memory<byte> buffer, CancellationToken cancellationToken = default)
{
var read = await Stream.ReadAsync(buffer, cancellationToken);
_currentEntryTotalReadBytes += read;
_listener.FireCompressedBytesRead(_currentEntryTotalReadBytes, _currentEntryTotalReadBytes);
return read;
}
#endif
public override int Read(byte[] buffer, int offset, int count)
{

View File

@@ -1,8 +1,10 @@
using System;
using System.Buffers;
using System.Diagnostics;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.IO;
@@ -44,7 +46,11 @@ public class SharpCompressStream : Stream, IStreamStack
_bufferingEnabled = _bufferSize > 0;
if (_bufferingEnabled)
{
_buffer = new byte[_bufferSize];
if (_buffer is not null)
{
ArrayPool<byte>.Shared.Return(_buffer);
}
_buffer = ArrayPool<byte>.Shared.Rent(_bufferSize);
_bufferPosition = 0;
_bufferedLength = 0;
if (_bufferingEnabled)
@@ -173,6 +179,11 @@ public class SharpCompressStream : Stream, IStreamStack
if (disposing)
{
Stream.Dispose();
if (_buffer != null)
{
ArrayPool<byte>.Shared.Return(_buffer);
_buffer = null;
}
}
}
@@ -202,6 +213,59 @@ public class SharpCompressStream : Stream, IStreamStack
set { Seek(value, SeekOrigin.Begin); }
}
public override async Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
if (count == 0)
return 0;
if (_bufferingEnabled)
{
ValidateBufferState();
// Fill buffer if needed
if (_bufferedLength == 0)
{
_bufferedLength = await Stream.ReadAsync(_buffer!, 0, _bufferSize, cancellationToken);
_bufferPosition = 0;
}
int available = _bufferedLength - _bufferPosition;
int toRead = Math.Min(count, available);
if (toRead > 0)
{
Array.Copy(_buffer!, _bufferPosition, buffer, offset, toRead);
_bufferPosition += toRead;
_internalPosition += toRead;
return toRead;
}
// If buffer exhausted, refill
int r = Stream.Read(_buffer!, 0, _bufferSize);
if (r == 0)
return 0;
_bufferedLength = r;
_bufferPosition = 0;
if (_bufferedLength == 0)
{
return 0;
}
toRead = Math.Min(count, _bufferedLength);
Array.Copy(_buffer!, 0, buffer, offset, toRead);
_bufferPosition = toRead;
_internalPosition += toRead;
return toRead;
}
else
{
if (count == 0)
{
return 0;
}
int read;
read = await Stream.ReadAsync(buffer, offset, count, cancellationToken);
_internalPosition += read;
return read;
}
}
public override int Read(byte[] buffer, int offset, int count)
{
if (count == 0)

View File

@@ -4,7 +4,7 @@ using System;
using System.Collections;
using System.Collections.Generic;
namespace SharpCompress.Helpers;
namespace SharpCompress;
internal sealed class LazyReadOnlyCollection<T> : ICollection<T>
{

View File

@@ -4,20 +4,19 @@ using System.Diagnostics.CodeAnalysis;
using System.Linq;
using System.Runtime.CompilerServices;
namespace SharpCompress.Helpers;
namespace SharpCompress;
internal static class NotNullExtensions
{
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static IEnumerable<T> Empty<T>(this IEnumerable<T>? source) =>
source ?? Enumerable.Empty<T>();
public static IEnumerable<T> Empty<T>(this IEnumerable<T>? source) => source ?? [];
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static IEnumerable<T> Empty<T>(this T? source)
{
if (source is null)
{
return Enumerable.Empty<T>();
return [];
}
return source.AsEnumerable();
}
@@ -68,4 +67,15 @@ internal static class NotNullExtensions
return obj.Value;
}
#endif
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static string NotNullOrEmpty(this string obj, string name)
{
obj.NotNull(name);
if (obj.Length == 0)
{
throw new ArgumentException("String is empty.", name);
}
return obj;
}
}

View File

@@ -2,6 +2,7 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Readers;
@@ -67,7 +68,7 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
}
}
public bool MoveToNextEntry()
public async Task<bool> MoveToNextEntryAsync()
{
if (_completed)
{
@@ -83,7 +84,7 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
}
if (!_wroteCurrentEntry)
{
SkipEntry();
await SkipEntryAsync();
}
_wroteCurrentEntry = false;
if (NextEntryForCurrentStream())
@@ -119,15 +120,15 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
#region Entry Skip/Write
private void SkipEntry()
private async Task SkipEntryAsync()
{
if (!Entry.IsDirectory)
{
Skip();
await SkipAsync();
}
}
private void Skip()
private async Task SkipAsync()
{
var part = Entry.Parts.First();
@@ -145,11 +146,11 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
}
}
//don't know the size so we have to try to decompress to skip
using var s = OpenEntryStream();
s.SkipEntry();
using var s = await OpenEntryStreamAsync();
await s.SkipEntryAsync();
}
public void WriteEntryTo(Stream writableStream)
public async Task WriteEntryToAsync(Stream writableStream)
{
if (_wroteCurrentEntry)
{
@@ -167,24 +168,29 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
);
}
Write(writableStream);
await WriteAsync(writableStream);
_wroteCurrentEntry = true;
}
internal void Write(Stream writeStream)
private async Task WriteAsync(Stream writeStream)
{
var streamListener = this as IReaderExtractionListener;
using Stream s = OpenEntryStream();
s.TransferTo(writeStream, Entry, streamListener);
#if !NETSTANDARD2_0 && !NETFRAMEWORK
await using Stream s = await OpenEntryStreamAsync();
#else
using Stream s = await OpenEntryStreamAsync();
#endif
await s.TransferToAsync(writeStream, Entry, streamListener);
}
public EntryStream OpenEntryStream()
public async Task<EntryStream> OpenEntryStreamAsync()
{
if (_wroteCurrentEntry)
{
throw new ArgumentException("WriteEntryTo or OpenEntryStream can only be called once.");
}
var stream = GetEntryStream();
var stream = await GetEntryStreamAsync();
_wroteCurrentEntry = true;
return stream;
}
@@ -192,11 +198,11 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
/// <summary>
/// Retains a reference to the entry stream, so we can check whether it completed later.
/// </summary>
protected EntryStream CreateEntryStream(Stream? decompressed) =>
new(this, decompressed.NotNull());
protected Task<EntryStream> CreateEntryStreamAsync(Stream? decompressed) =>
Task.FromResult(new EntryStream(this, decompressed.NotNull()));
protected virtual EntryStream GetEntryStream() =>
CreateEntryStream(Entry.Parts.First().GetCompressedStream());
protected virtual Task<EntryStream> GetEntryStreamAsync() =>
CreateEntryStreamAsync(Entry.Parts.First().GetCompressedStream());
#endregion

View File

@@ -24,7 +24,7 @@ namespace SharpCompress.Readers.Arc
/// <returns></returns>
public static ArcReader Open(Stream stream, ReaderOptions? options = null)
{
stream.CheckNotNull(nameof(stream));
stream.NotNull(nameof(stream));
return new ArcReader(stream, options ?? new ReaderOptions());
}

View File

@@ -22,7 +22,7 @@ public class GZipReader : AbstractReader<GZipEntry, GZipVolume>
/// <returns></returns>
public static GZipReader Open(Stream stream, ReaderOptions? options = null)
{
stream.CheckNotNull(nameof(stream));
stream.NotNull(nameof(stream));
return new GZipReader(stream, options ?? new ReaderOptions());
}

View File

@@ -1,5 +1,6 @@
using System;
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Readers;
@@ -19,7 +20,7 @@ public interface IReader : IDisposable
/// Decompresses the current entry to the stream. This cannot be called twice for the current entry.
/// </summary>
/// <param name="writableStream"></param>
void WriteEntryTo(Stream writableStream);
Task WriteEntryToAsync(Stream writableStream);
bool Cancelled { get; }
void Cancel();
@@ -28,11 +29,11 @@ public interface IReader : IDisposable
/// Moves to the next entry by reading more data from the underlying stream. This skips if data has not been read.
/// </summary>
/// <returns></returns>
bool MoveToNextEntry();
Task<bool> MoveToNextEntryAsync();
/// <summary>
/// Opens the current entry as a stream that will decompress as it is read.
/// Read the entire stream or use SkipEntry on EntryStream.
/// </summary>
EntryStream OpenEntryStream();
Task<EntryStream> OpenEntryStreamAsync();
}

View File

@@ -1,68 +1,69 @@
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Readers;
public static class IReaderExtensions
{
public static void WriteEntryTo(this IReader reader, string filePath)
public static async Task WriteEntryToAsync(this IReader reader, string filePath)
{
using Stream stream = File.Open(filePath, FileMode.Create, FileAccess.Write);
reader.WriteEntryTo(stream);
await reader.WriteEntryToAsync(stream);
}
public static void WriteEntryTo(this IReader reader, FileInfo filePath)
public static async Task WriteEntryToAsync(this IReader reader, FileInfo filePath)
{
using Stream stream = filePath.Open(FileMode.Create);
reader.WriteEntryTo(stream);
await reader.WriteEntryToAsync(stream);
}
/// <summary>
/// Extract all remaining unread entries to specific directory, retaining filename
/// </summary>
public static void WriteAllToDirectory(
public static async Task WriteAllToDirectoryAsync(
this IReader reader,
string destinationDirectory,
ExtractionOptions? options = null
)
{
while (reader.MoveToNextEntry())
while (await reader.MoveToNextEntryAsync())
{
reader.WriteEntryToDirectory(destinationDirectory, options);
await reader.WriteEntryToDirectoryAsync(destinationDirectory, options);
}
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteEntryToDirectory(
public static async Task WriteEntryToDirectoryAsync(
this IReader reader,
string destinationDirectory,
ExtractionOptions? options = null
) =>
ExtractionMethods.WriteEntryToDirectory(
await ExtractionMethods.WriteEntryToDirectoryAsync(
reader.Entry,
destinationDirectory,
options,
reader.WriteEntryToFile
reader.WriteEntryToFileAsync
);
/// <summary>
/// Extract to specific file
/// </summary>
public static void WriteEntryToFile(
public static async Task WriteEntryToFileAsync(
this IReader reader,
string destinationFileName,
ExtractionOptions? options = null
) =>
ExtractionMethods.WriteEntryToFile(
await ExtractionMethods.WriteEntryToFileAsync(
reader.Entry,
destinationFileName,
options,
(x, fm) =>
async (x, fm) =>
{
using var fs = File.Open(destinationFileName, fm);
reader.WriteEntryTo(fs);
await reader.WriteEntryToAsync(fs);
}
);
}

View File

@@ -2,6 +2,7 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Compressors.Rar;
@@ -40,6 +41,29 @@ public abstract class RarReader : AbstractReader<RarReaderEntry, RarVolume>
public override RarVolume? Volume => volume;
public static RarReader Open(string filePath, ReaderOptions? options = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), options);
}
public static RarReader Open(FileInfo fileInfo, ReaderOptions? options = null)
{
options ??= new ReaderOptions { LeaveStreamOpen = false };
return Open(fileInfo.OpenRead(), options);
}
public static RarReader Open(IEnumerable<string> filePaths, ReaderOptions? options = null)
{
return Open(filePaths.Select(x => new FileInfo(x)), options);
}
public static RarReader Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? options = null)
{
options ??= new ReaderOptions { LeaveStreamOpen = false };
return Open(fileInfos.Select(x => x.OpenRead()), options);
}
/// <summary>
/// Opens a RarReader for Non-seeking usage with a single volume
/// </summary>
@@ -48,7 +72,7 @@ public abstract class RarReader : AbstractReader<RarReaderEntry, RarVolume>
/// <returns></returns>
public static RarReader Open(Stream stream, ReaderOptions? options = null)
{
stream.CheckNotNull(nameof(stream));
stream.NotNull(nameof(stream));
return new SingleVolumeRarReader(stream, options ?? new ReaderOptions());
}
@@ -60,7 +84,7 @@ public abstract class RarReader : AbstractReader<RarReaderEntry, RarVolume>
/// <returns></returns>
public static RarReader Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
{
streams.CheckNotNull(nameof(streams));
streams.NotNull(nameof(streams));
return new MultiVolumeRarReader(streams, options ?? new ReaderOptions());
}
@@ -77,7 +101,7 @@ public abstract class RarReader : AbstractReader<RarReaderEntry, RarVolume>
protected virtual IEnumerable<FilePart> CreateFilePartEnumerableForCurrentEntry() =>
Entry.Parts;
protected override EntryStream GetEntryStream()
protected override async Task<EntryStream> GetEntryStreamAsync()
{
if (Entry.IsRedir)
{
@@ -90,16 +114,19 @@ public abstract class RarReader : AbstractReader<RarReaderEntry, RarVolume>
);
if (Entry.IsRarV3)
{
return CreateEntryStream(new RarCrcStream(UnpackV1.Value, Entry.FileHeader, stream));
return await CreateEntryStreamAsync(
await RarCrcStream.Create(UnpackV1.Value, Entry.FileHeader, stream)
);
}
if (Entry.FileHeader.FileCrc?.Length > 5)
{
return CreateEntryStream(
new RarBLAKE2spStream(UnpackV2017.Value, Entry.FileHeader, stream)
);
var s = await RarBLAKE2spStream.Create(UnpackV2017.Value, Entry.FileHeader, stream);
return await CreateEntryStreamAsync(s);
}
return CreateEntryStream(new RarCrcStream(UnpackV2017.Value, Entry.FileHeader, stream));
return await CreateEntryStreamAsync(
await RarCrcStream.Create(UnpackV2017.Value, Entry.FileHeader, stream)
);
}
}

View File

@@ -9,6 +9,18 @@ namespace SharpCompress.Readers;
public static class ReaderFactory
{
public static IReader Open(string filePath, ReaderOptions? options = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), options);
}
public static IReader Open(FileInfo fileInfo, ReaderOptions? options = null)
{
options ??= new ReaderOptions { LeaveStreamOpen = false };
return Open(fileInfo.OpenRead(), options);
}
/// <summary>
/// Opens a Reader for Non-seeking usage
/// </summary>
@@ -17,7 +29,7 @@ public static class ReaderFactory
/// <returns></returns>
public static IReader Open(Stream stream, ReaderOptions? options = null)
{
stream.CheckNotNull(nameof(stream));
stream.NotNull(nameof(stream));
options ??= new ReaderOptions() { LeaveStreamOpen = false };
var bStream = new SharpCompressStream(stream, bufferSize: options.BufferSize);

View File

@@ -55,7 +55,7 @@ public class TarReader : AbstractReader<TarEntry, TarVolume>
/// <returns></returns>
public static TarReader Open(Stream stream, ReaderOptions? options = null)
{
stream.CheckNotNull(nameof(stream));
stream.NotNull(nameof(stream));
options = options ?? new ReaderOptions();
var rewindableStream = new SharpCompressStream(stream);

View File

@@ -44,7 +44,7 @@ public class ZipReader : AbstractReader<ZipEntry, ZipVolume>
/// <returns></returns>
public static ZipReader Open(Stream stream, ReaderOptions? options = null)
{
stream.CheckNotNull(nameof(stream));
stream.NotNull(nameof(stream));
return new ZipReader(stream, options ?? new ReaderOptions());
}
@@ -54,7 +54,7 @@ public class ZipReader : AbstractReader<ZipEntry, ZipVolume>
IEnumerable<ZipEntry> entries
)
{
stream.CheckNotNull(nameof(stream));
stream.NotNull(nameof(stream));
return new ZipReader(stream, options ?? new ReaderOptions(), entries);
}

View File

@@ -35,9 +35,14 @@
<DefineConstants>$(DefineConstants);DEBUG_STREAMS</DefineConstants>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="System.Buffers" />
<PackageReference Include="ZstdSharp.Port" />
<PackageReference Include="Microsoft.SourceLink.GitHub" PrivateAssets="All" />
</ItemGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'net48' Or '$(TargetFramework)' == 'net481' ">
<PackageReference Include="Microsoft.Bcl.AsyncInterfaces" />
<PackageReference Include="System.Text.Encoding.CodePages" />
<PackageReference Include="System.Buffers" />
<PackageReference Include="System.Memory" />
<PackageReference Include="Microsoft.NETFramework.ReferenceAssemblies" PrivateAssets="All" />
</ItemGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'netstandard2.0' ">

View File

@@ -1,16 +1,20 @@
global using SharpCompress.Helpers;
using System;
using System.Buffers;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Readers;
namespace SharpCompress.Helpers;
namespace SharpCompress;
internal static class Utility
{
//80kb is a good industry standard temporary buffer size
private const int TEMP_BUFFER_SIZE = 81920;
private static readonly HashSet<char> invalidChars = new(Path.GetInvalidFileNameChars());
public static ReadOnlyCollection<T> ToReadOnly<T>(this IList<T> items) => new(items);
/// <summary>
@@ -19,14 +23,7 @@ internal static class Utility
/// <param name="number">Number to operate on</param>
/// <param name="bits">Amount of bits to shift</param>
/// <returns>The resulting number from the shift operation</returns>
public static int URShift(int number, int bits)
{
if (number >= 0)
{
return number >> bits;
}
return (number >> bits) + (2 << ~bits);
}
public static int URShift(int number, int bits) => (int)((uint)number >> bits);
/// <summary>
/// Performs an unsigned bitwise right shift with the specified number
@@ -34,14 +31,7 @@ internal static class Utility
/// <param name="number">Number to operate on</param>
/// <param name="bits">Amount of bits to shift</param>
/// <returns>The resulting number from the shift operation</returns>
public static long URShift(long number, int bits)
{
if (number >= 0)
{
return number >> bits;
}
return (number >> bits) + (2L << ~bits);
}
public static long URShift(long number, int bits) => (long)((ulong)number >> bits);
public static void SetSize(this List<byte> list, int count)
{
@@ -68,60 +58,11 @@ internal static class Utility
}
}
public static void Copy(
Array sourceArray,
long sourceIndex,
Array destinationArray,
long destinationIndex,
long length
)
{
if (sourceIndex > int.MaxValue || sourceIndex < int.MinValue)
{
throw new ArgumentOutOfRangeException(nameof(sourceIndex));
}
if (destinationIndex > int.MaxValue || destinationIndex < int.MinValue)
{
throw new ArgumentOutOfRangeException(nameof(destinationIndex));
}
if (length > int.MaxValue || length < int.MinValue)
{
throw new ArgumentOutOfRangeException(nameof(length));
}
Array.Copy(
sourceArray,
(int)sourceIndex,
destinationArray,
(int)destinationIndex,
(int)length
);
}
public static IEnumerable<T> AsEnumerable<T>(this T item)
{
yield return item;
}
public static void CheckNotNull(this object obj, string name)
{
if (obj is null)
{
throw new ArgumentNullException(name);
}
}
public static void CheckNotNullOrEmpty(this string obj, string name)
{
obj.CheckNotNull(name);
if (obj.Length == 0)
{
throw new ArgumentException("String is empty.", name);
}
}
public static void Skip(this Stream source, long advanceAmount)
{
if (source.CanSeek)
@@ -130,39 +71,22 @@ internal static class Utility
return;
}
var buffer = GetTransferByteArray();
try
using var buffer = MemoryPool<byte>.Shared.Rent(TEMP_BUFFER_SIZE);
while (advanceAmount > 0)
{
var read = 0;
var readCount = 0;
do
var toRead = (int)Math.Min(buffer.Memory.Length, advanceAmount);
var read = source.Read(buffer.Memory.Slice(0, toRead).Span);
if (read <= 0)
{
readCount = buffer.Length;
if (readCount > advanceAmount)
{
readCount = (int)advanceAmount;
}
read = source.Read(buffer, 0, readCount);
if (read <= 0)
{
break;
}
advanceAmount -= read;
if (advanceAmount == 0)
{
break;
}
} while (true);
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
break;
}
advanceAmount -= read;
}
}
public static void Skip(this Stream source)
{
var buffer = GetTransferByteArray();
var buffer = ArrayPool<byte>.Shared.Rent(TEMP_BUFFER_SIZE);
try
{
do { } while (source.Read(buffer, 0, buffer.Length) == buffer.Length);
@@ -173,36 +97,17 @@ internal static class Utility
}
}
public static bool Find(this Stream source, byte[] array)
public static async Task SkipAsync(this Stream source)
{
var buffer = GetTransferByteArray();
var buffer = ArrayPool<byte>.Shared.Rent(TEMP_BUFFER_SIZE);
try
{
var count = 0;
var len = source.Read(buffer, 0, buffer.Length);
do
{
for (var i = 0; i < len; i++)
{
if (array[count] == buffer[i])
{
count++;
if (count == array.Length)
{
source.Position = source.Position - len + i - array.Length + 1;
return true;
}
}
}
} while ((len = source.Read(buffer, 0, buffer.Length)) > 0);
do { } while (await source.ReadAsync(buffer, 0, buffer.Length) == buffer.Length);
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
}
return false;
}
public static DateTime DosDateToDateTime(ushort iDate, ushort iTime)
@@ -271,31 +176,12 @@ internal static class Utility
return sTime.AddSeconds(unixtime);
}
public static long TransferTo(this Stream source, Stream destination)
{
var array = GetTransferByteArray();
try
{
long total = 0;
while (ReadTransferBlock(source, array, out var count))
{
destination.Write(array, 0, count);
total += count;
}
return total;
}
finally
{
ArrayPool<byte>.Shared.Return(array);
}
}
public static long TransferTo(this Stream source, Stream destination, long maxLength)
{
var array = GetTransferByteArray();
var maxReadSize = array.Length;
var array = ArrayPool<byte>.Shared.Rent(TEMP_BUFFER_SIZE);
try
{
var maxReadSize = array.Length;
long total = 0;
var remaining = maxLength;
if (remaining < maxReadSize)
@@ -324,22 +210,23 @@ internal static class Utility
}
}
public static long TransferTo(
public static async Task<long> TransferToAsync(
this Stream source,
Stream destination,
Common.Entry entry,
IReaderExtractionListener readerExtractionListener
)
{
var array = GetTransferByteArray();
var array = ArrayPool<byte>.Shared.Rent(TEMP_BUFFER_SIZE);
try
{
var iterations = 0;
long total = 0;
while (ReadTransferBlock(source, array, out var count))
int count;
while ((count = await source.ReadAsync(array, 0, array.Length)) != 0)
{
total += count;
destination.Write(array, 0, count);
await destination.WriteAsync(array, 0, count);
iterations++;
readerExtractionListener.FireEntryExtractionProgress(entry, total, iterations);
}
@@ -351,12 +238,10 @@ internal static class Utility
}
}
private static bool ReadTransferBlock(Stream source, byte[] array, out int count) =>
(count = source.Read(array, 0, array.Length)) != 0;
private static bool ReadTransferBlock(Stream source, byte[] array, int size, out int count)
private static bool ReadTransferBlock(Stream source, byte[] array, int maxSize, out int count)
{
if (size > array.Length)
var size = maxSize;
if (maxSize > array.Length)
{
size = array.Length;
}
@@ -364,8 +249,34 @@ internal static class Utility
return count != 0;
}
private static byte[] GetTransferByteArray() => ArrayPool<byte>.Shared.Rent(81920);
#if NET60_OR_GREATER
public static bool ReadFully(this Stream stream, byte[] buffer)
{
try
{
stream.ReadExactly(buffer);
return true;
}
catch (EndOfStreamException)
{
return false;
}
}
public static bool ReadFully(this Stream stream, Span<byte> buffer)
{
try
{
stream.ReadExactly(buffer);
return true;
}
catch (EndOfStreamException)
{
return false;
}
}
#else
public static bool ReadFully(this Stream stream, byte[] buffer)
{
var total = 0;
@@ -395,6 +306,7 @@ internal static class Utility
}
return (total >= buffer.Length);
}
#endif
public static string TrimNulls(this string source) => source.Replace('\0', ' ').Trim();
@@ -439,7 +351,6 @@ internal static class Utility
public static string ReplaceInvalidFileNameChars(string fileName)
{
var invalidChars = new HashSet<char>(Path.GetInvalidFileNameChars());
var sb = new StringBuilder(fileName.Length);
foreach (var c in fileName)
{

View File

@@ -47,7 +47,7 @@ public sealed class GZipWriter : AbstractWriter
var stream = (GZipStream)OutputStream;
stream.FileName = filename;
stream.LastModified = modificationTime;
source.TransferTo(stream);
source.CopyTo(stream);
_wroteToStream = true;
}
}

View File

@@ -83,7 +83,7 @@ public class ZipWriter : AbstractWriter
public void Write(string entryPath, Stream source, ZipWriterEntryOptions zipWriterEntryOptions)
{
using var output = WriteToStream(entryPath, zipWriterEntryOptions);
source.TransferTo(output);
source.CopyTo(output);
}
public Stream WriteToStream(string entryPath, ZipWriterEntryOptions options)

View File

@@ -2,6 +2,15 @@
"version": 2,
"dependencies": {
".NETFramework,Version=v4.8": {
"Microsoft.Bcl.AsyncInterfaces": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "3WA9q9yVqJp222P3x1wYIGDAkpjAku0TMUaaQV22g6L67AI0LdOIrVS7Ht2vJfLHGSPVuqN94vIr15qn+HEkHw==",
"dependencies": {
"System.Threading.Tasks.Extensions": "4.5.4"
}
},
"Microsoft.NETFramework.ReferenceAssemblies": {
"type": "Direct",
"requested": "[1.0.3, )",
@@ -23,9 +32,30 @@
},
"System.Buffers": {
"type": "Direct",
"requested": "[4.6.0, )",
"resolved": "4.6.0",
"contentHash": "lN6tZi7Q46zFzAbRYXTIvfXcyvQQgxnY7Xm6C6xQ9784dEL1amjM6S6Iw4ZpsvesAKnRVsM4scrDQaDqSClkjA=="
"requested": "[4.6.1, )",
"resolved": "4.6.1",
"contentHash": "N8GXpmiLMtljq7gwvyS+1QvKT/W2J8sNAvx+HVg4NGmsG/H+2k/y9QI23auLJRterrzCiDH+IWAw4V/GPwsMlw=="
},
"System.Memory": {
"type": "Direct",
"requested": "[4.6.3, )",
"resolved": "4.6.3",
"contentHash": "qdcDOgnFZY40+Q9876JUHnlHu7bosOHX8XISRoH94fwk6hgaeQGSgfZd8srWRZNt5bV9ZW2TljcegDNxsf+96A==",
"dependencies": {
"System.Buffers": "4.6.1",
"System.Numerics.Vectors": "4.6.1",
"System.Runtime.CompilerServices.Unsafe": "6.1.2"
}
},
"System.Text.Encoding.CodePages": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "OZIsVplFGaVY90G2SbpgU7EnCoOO5pw1t4ic21dBF3/1omrJFpAGoNAVpPyMVOC90/hvgkGG3VFqR13YgZMQfg==",
"dependencies": {
"System.Memory": "4.5.5",
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
}
},
"ZstdSharp.Port": {
"type": "Direct",
@@ -55,13 +85,13 @@
},
"System.Numerics.Vectors": {
"type": "Transitive",
"resolved": "4.5.0",
"contentHash": "QQTlPTl06J/iiDbJCiepZ4H//BVraReU4O4EoRw1U02H5TLUIT7xn3GnDp9AXPSlJUDyFs4uWjWafNX6WrAojQ=="
"resolved": "4.6.1",
"contentHash": "sQxefTnhagrhoq2ReR0D/6K0zJcr9Hrd6kikeXsA1I8kOCboTavcUC4r7TSfpKFeE163uMuxZcyfO1mGO3EN8Q=="
},
"System.Runtime.CompilerServices.Unsafe": {
"type": "Transitive",
"resolved": "6.0.0",
"contentHash": "/iUeP3tq1S0XdNNoMz5C9twLSrM/TH+qElHkXWaPvuNOt+99G75NrV0OS2EqHx5wMN7popYjpc8oTjC1y16DLg=="
"resolved": "6.1.2",
"contentHash": "2hBr6zdbIBTDE3EhK7NSVNdX58uTK6iHW/P/Axmm9sl1xoGSLqDvMtpecn226TNwHByFokYwJmt/aQQNlO5CRw=="
},
"System.Threading.Tasks.Extensions": {
"type": "Transitive",
@@ -70,29 +100,18 @@
"dependencies": {
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
}
},
}
},
".NETFramework,Version=v4.8.1": {
"Microsoft.Bcl.AsyncInterfaces": {
"type": "CentralTransitive",
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "5.0.0",
"contentHash": "W8DPQjkMScOMTtJbPwmPyj9c3zYSFGawDW3jwlBOOsnY+EzZFLgNQ/UMkK35JmkNOVPdCyPr2Tw7Vv9N+KA3ZQ==",
"resolved": "8.0.0",
"contentHash": "3WA9q9yVqJp222P3x1wYIGDAkpjAku0TMUaaQV22g6L67AI0LdOIrVS7Ht2vJfLHGSPVuqN94vIr15qn+HEkHw==",
"dependencies": {
"System.Threading.Tasks.Extensions": "4.5.4"
}
},
"System.Memory": {
"type": "CentralTransitive",
"requested": "[4.6.0, )",
"resolved": "4.5.5",
"contentHash": "XIWiDvKPXaTveaB7HVganDlOCRoj03l+jrwNvcge/t8vhGYKvqV+dMv6G4SAX2NoNmN0wZfVPTAlFwZcZvVOUw==",
"dependencies": {
"System.Buffers": "4.5.1",
"System.Numerics.Vectors": "4.5.0",
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
}
}
},
".NETFramework,Version=v4.8.1": {
"Microsoft.NETFramework.ReferenceAssemblies": {
"type": "Direct",
"requested": "[1.0.3, )",
@@ -114,9 +133,30 @@
},
"System.Buffers": {
"type": "Direct",
"requested": "[4.6.0, )",
"resolved": "4.6.0",
"contentHash": "lN6tZi7Q46zFzAbRYXTIvfXcyvQQgxnY7Xm6C6xQ9784dEL1amjM6S6Iw4ZpsvesAKnRVsM4scrDQaDqSClkjA=="
"requested": "[4.6.1, )",
"resolved": "4.6.1",
"contentHash": "N8GXpmiLMtljq7gwvyS+1QvKT/W2J8sNAvx+HVg4NGmsG/H+2k/y9QI23auLJRterrzCiDH+IWAw4V/GPwsMlw=="
},
"System.Memory": {
"type": "Direct",
"requested": "[4.6.3, )",
"resolved": "4.6.3",
"contentHash": "qdcDOgnFZY40+Q9876JUHnlHu7bosOHX8XISRoH94fwk6hgaeQGSgfZd8srWRZNt5bV9ZW2TljcegDNxsf+96A==",
"dependencies": {
"System.Buffers": "4.6.1",
"System.Numerics.Vectors": "4.6.1",
"System.Runtime.CompilerServices.Unsafe": "6.1.2"
}
},
"System.Text.Encoding.CodePages": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "OZIsVplFGaVY90G2SbpgU7EnCoOO5pw1t4ic21dBF3/1omrJFpAGoNAVpPyMVOC90/hvgkGG3VFqR13YgZMQfg==",
"dependencies": {
"System.Memory": "4.5.5",
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
}
},
"ZstdSharp.Port": {
"type": "Direct",
@@ -146,13 +186,13 @@
},
"System.Numerics.Vectors": {
"type": "Transitive",
"resolved": "4.5.0",
"contentHash": "QQTlPTl06J/iiDbJCiepZ4H//BVraReU4O4EoRw1U02H5TLUIT7xn3GnDp9AXPSlJUDyFs4uWjWafNX6WrAojQ=="
"resolved": "4.6.1",
"contentHash": "sQxefTnhagrhoq2ReR0D/6K0zJcr9Hrd6kikeXsA1I8kOCboTavcUC4r7TSfpKFeE163uMuxZcyfO1mGO3EN8Q=="
},
"System.Runtime.CompilerServices.Unsafe": {
"type": "Transitive",
"resolved": "6.0.0",
"contentHash": "/iUeP3tq1S0XdNNoMz5C9twLSrM/TH+qElHkXWaPvuNOt+99G75NrV0OS2EqHx5wMN7popYjpc8oTjC1y16DLg=="
"resolved": "6.1.2",
"contentHash": "2hBr6zdbIBTDE3EhK7NSVNdX58uTK6iHW/P/Axmm9sl1xoGSLqDvMtpecn226TNwHByFokYwJmt/aQQNlO5CRw=="
},
"System.Threading.Tasks.Extensions": {
"type": "Transitive",
@@ -161,26 +201,6 @@
"dependencies": {
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
}
},
"Microsoft.Bcl.AsyncInterfaces": {
"type": "CentralTransitive",
"requested": "[8.0.0, )",
"resolved": "5.0.0",
"contentHash": "W8DPQjkMScOMTtJbPwmPyj9c3zYSFGawDW3jwlBOOsnY+EzZFLgNQ/UMkK35JmkNOVPdCyPr2Tw7Vv9N+KA3ZQ==",
"dependencies": {
"System.Threading.Tasks.Extensions": "4.5.4"
}
},
"System.Memory": {
"type": "CentralTransitive",
"requested": "[4.6.0, )",
"resolved": "4.5.5",
"contentHash": "XIWiDvKPXaTveaB7HVganDlOCRoj03l+jrwNvcge/t8vhGYKvqV+dMv6G4SAX2NoNmN0wZfVPTAlFwZcZvVOUw==",
"dependencies": {
"System.Buffers": "4.5.1",
"System.Numerics.Vectors": "4.5.0",
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
}
}
},
".NETStandard,Version=v2.0": {
@@ -193,15 +213,6 @@
"System.Threading.Tasks.Extensions": "4.5.4"
}
},
"Microsoft.NETFramework.ReferenceAssemblies": {
"type": "Direct",
"requested": "[1.0.3, )",
"resolved": "1.0.3",
"contentHash": "vUc9Npcs14QsyOD01tnv/m8sQUnGTGOw1BCmKcv77LBJY7OxhJ+zJF7UD/sCL3lYNFuqmQEVlkfS4Quif6FyYg==",
"dependencies": {
"Microsoft.NETFramework.ReferenceAssemblies.net461": "1.0.3"
}
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[8.0.0, )",
@@ -221,21 +232,15 @@
"Microsoft.NETCore.Platforms": "1.1.0"
}
},
"System.Buffers": {
"type": "Direct",
"requested": "[4.6.0, )",
"resolved": "4.6.0",
"contentHash": "lN6tZi7Q46zFzAbRYXTIvfXcyvQQgxnY7Xm6C6xQ9784dEL1amjM6S6Iw4ZpsvesAKnRVsM4scrDQaDqSClkjA=="
},
"System.Memory": {
"type": "Direct",
"requested": "[4.6.0, )",
"resolved": "4.6.0",
"contentHash": "OEkbBQoklHngJ8UD8ez2AERSk2g+/qpAaSWWCBFbpH727HxDq5ydVkuncBaKcKfwRqXGWx64dS6G1SUScMsitg==",
"requested": "[4.6.3, )",
"resolved": "4.6.3",
"contentHash": "qdcDOgnFZY40+Q9876JUHnlHu7bosOHX8XISRoH94fwk6hgaeQGSgfZd8srWRZNt5bV9ZW2TljcegDNxsf+96A==",
"dependencies": {
"System.Buffers": "4.6.0",
"System.Numerics.Vectors": "4.6.0",
"System.Runtime.CompilerServices.Unsafe": "6.1.0"
"System.Buffers": "4.6.1",
"System.Numerics.Vectors": "4.6.1",
"System.Runtime.CompilerServices.Unsafe": "6.1.2"
}
},
"System.Text.Encoding.CodePages": {
@@ -269,11 +274,6 @@
"resolved": "1.1.0",
"contentHash": "kz0PEW2lhqygehI/d6XsPCQzD7ff7gUJaVGPVETX611eadGsA3A877GdSlU0LRVMCTH/+P3o2iDTak+S08V2+A=="
},
"Microsoft.NETFramework.ReferenceAssemblies.net461": {
"type": "Transitive",
"resolved": "1.0.3",
"contentHash": "AmOJZwCqnOCNp6PPcf9joyogScWLtwy0M1WkqfEQ0M9nYwyDD7EX9ZjscKS5iYnyvteX7kzSKFCKt9I9dXA6mA=="
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "8.0.0",
@@ -281,13 +281,13 @@
},
"System.Numerics.Vectors": {
"type": "Transitive",
"resolved": "4.6.0",
"contentHash": "t+SoieZsRuEyiw/J+qXUbolyO219tKQQI0+2/YI+Qv7YdGValA6WiuokrNKqjrTNsy5ABWU11bdKOzUdheteXg=="
"resolved": "4.6.1",
"contentHash": "sQxefTnhagrhoq2ReR0D/6K0zJcr9Hrd6kikeXsA1I8kOCboTavcUC4r7TSfpKFeE163uMuxZcyfO1mGO3EN8Q=="
},
"System.Runtime.CompilerServices.Unsafe": {
"type": "Transitive",
"resolved": "6.1.0",
"contentHash": "5o/HZxx6RVqYlhKSq8/zronDkALJZUT2Vz0hx43f0gwe8mwlM0y2nYlqdBwLMzr262Bwvpikeb/yEwkAa5PADg=="
"resolved": "6.1.2",
"contentHash": "2hBr6zdbIBTDE3EhK7NSVNdX58uTK6iHW/P/Axmm9sl1xoGSLqDvMtpecn226TNwHByFokYwJmt/aQQNlO5CRw=="
},
"System.Threading.Tasks.Extensions": {
"type": "Transitive",
@@ -296,18 +296,15 @@
"dependencies": {
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
}
},
"System.Buffers": {
"type": "CentralTransitive",
"requested": "[4.6.1, )",
"resolved": "4.6.1",
"contentHash": "N8GXpmiLMtljq7gwvyS+1QvKT/W2J8sNAvx+HVg4NGmsG/H+2k/y9QI23auLJRterrzCiDH+IWAw4V/GPwsMlw=="
}
},
"net6.0": {
"Microsoft.NETFramework.ReferenceAssemblies": {
"type": "Direct",
"requested": "[1.0.3, )",
"resolved": "1.0.3",
"contentHash": "vUc9Npcs14QsyOD01tnv/m8sQUnGTGOw1BCmKcv77LBJY7OxhJ+zJF7UD/sCL3lYNFuqmQEVlkfS4Quif6FyYg==",
"dependencies": {
"Microsoft.NETFramework.ReferenceAssemblies.net461": "1.0.3"
}
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[8.0.0, )",
@@ -318,12 +315,6 @@
"Microsoft.SourceLink.Common": "8.0.0"
}
},
"System.Buffers": {
"type": "Direct",
"requested": "[4.6.0, )",
"resolved": "4.6.0",
"contentHash": "lN6tZi7Q46zFzAbRYXTIvfXcyvQQgxnY7Xm6C6xQ9784dEL1amjM6S6Iw4ZpsvesAKnRVsM4scrDQaDqSClkjA=="
},
"ZstdSharp.Port": {
"type": "Direct",
"requested": "[0.8.6, )",
@@ -335,11 +326,6 @@
"resolved": "8.0.0",
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
},
"Microsoft.NETFramework.ReferenceAssemblies.net461": {
"type": "Transitive",
"resolved": "1.0.3",
"contentHash": "AmOJZwCqnOCNp6PPcf9joyogScWLtwy0M1WkqfEQ0M9nYwyDD7EX9ZjscKS5iYnyvteX7kzSKFCKt9I9dXA6mA=="
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "8.0.0",
@@ -353,15 +339,6 @@
"resolved": "8.0.17",
"contentHash": "x5/y4l8AtshpBOrCZdlE4txw8K3e3s9meBFeZeR3l8hbbku2V7kK6ojhXvrbjg1rk3G+JqL1BI26gtgc1ZrdUw=="
},
"Microsoft.NETFramework.ReferenceAssemblies": {
"type": "Direct",
"requested": "[1.0.3, )",
"resolved": "1.0.3",
"contentHash": "vUc9Npcs14QsyOD01tnv/m8sQUnGTGOw1BCmKcv77LBJY7OxhJ+zJF7UD/sCL3lYNFuqmQEVlkfS4Quif6FyYg==",
"dependencies": {
"Microsoft.NETFramework.ReferenceAssemblies.net461": "1.0.3"
}
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[8.0.0, )",
@@ -372,12 +349,6 @@
"Microsoft.SourceLink.Common": "8.0.0"
}
},
"System.Buffers": {
"type": "Direct",
"requested": "[4.6.0, )",
"resolved": "4.6.0",
"contentHash": "lN6tZi7Q46zFzAbRYXTIvfXcyvQQgxnY7Xm6C6xQ9784dEL1amjM6S6Iw4ZpsvesAKnRVsM4scrDQaDqSClkjA=="
},
"ZstdSharp.Port": {
"type": "Direct",
"requested": "[0.8.6, )",
@@ -389,11 +360,6 @@
"resolved": "8.0.0",
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
},
"Microsoft.NETFramework.ReferenceAssemblies.net461": {
"type": "Transitive",
"resolved": "1.0.3",
"contentHash": "AmOJZwCqnOCNp6PPcf9joyogScWLtwy0M1WkqfEQ0M9nYwyDD7EX9ZjscKS5iYnyvteX7kzSKFCKt9I9dXA6mA=="
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "8.0.0",

View File

@@ -0,0 +1,49 @@
using System;
using JetBrains.Profiler.SelfApi;
namespace SharpCompress.Test;
public static class JetbrainsProfiler
{
private sealed class CpuClass : IDisposable
{
public CpuClass(string snapshotPath)
{
DotTrace.Init();
var config2 = new DotTrace.Config();
config2.SaveToDir(snapshotPath);
DotTrace.Attach(config2);
DotTrace.StartCollectingData();
}
public void Dispose()
{
DotTrace.StopCollectingData();
DotTrace.SaveData();
DotTrace.Detach();
}
}
private sealed class MemoryClass : IDisposable
{
public MemoryClass(string snapshotPath)
{
DotMemory.Init();
var config = new DotMemory.Config();
config.UseLogLevelVerbose();
config.SaveToDir(snapshotPath);
DotMemory.Attach(config);
DotMemory.GetSnapshot("Before");
}
public void Dispose()
{
DotMemory.GetSnapshot("After");
DotMemory.Detach();
}
}
public static IDisposable Cpu(string snapshotPath) => new CpuClass(snapshotPath);
public static IDisposable Memory(string snapshotPath) => new MemoryClass(snapshotPath);
}

View File

@@ -0,0 +1,280 @@
using System;
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Performance;
/// <summary>
/// A Stream implementation backed by a List of byte arrays that supports large position values.
/// This allows handling streams larger than typical 32-bit or even standard 64-bit constraints
/// by chunking data into multiple byte array segments.
/// </summary>
public class LargeMemoryStream : Stream
{
private readonly List<byte[]> _chunks;
private readonly int _chunkSize;
private long _position;
private bool _isDisposed;
/// <summary>
/// Initializes a new instance of the LargeMemoryStream class.
/// </summary>
/// <param name="chunkSize">The size of each chunk in the backing byte array list. Defaults to 1MB.</param>
public LargeMemoryStream(int chunkSize = 1024 * 1024)
{
if (chunkSize <= 0)
throw new ArgumentException("Chunk size must be greater than zero.", nameof(chunkSize));
_chunks = new List<byte[]>();
_chunkSize = chunkSize;
_position = 0;
}
public override bool CanRead => true;
public override bool CanSeek => true;
public override bool CanWrite => true;
public override long Length
{
get
{
ThrowIfDisposed();
if (_chunks.Count == 0)
return 0;
long length = (long)(_chunks.Count - 1) * _chunkSize;
length += _chunks[_chunks.Count - 1].Length;
return length;
}
}
public override long Position
{
get
{
ThrowIfDisposed();
return _position;
}
set
{
ThrowIfDisposed();
if (value < 0)
throw new ArgumentOutOfRangeException(
nameof(value),
"Position cannot be negative."
);
_position = value;
}
}
public override void Flush()
{
ThrowIfDisposed();
// No-op for in-memory stream
}
public override int Read(byte[] buffer, int offset, int count)
{
ThrowIfDisposed();
if (buffer == null)
throw new ArgumentNullException(nameof(buffer));
if (offset < 0 || count < 0 || offset + count > buffer.Length)
throw new ArgumentOutOfRangeException();
long length = Length;
if (_position >= length)
return 0;
int bytesToRead = (int)Math.Min(count, length - _position);
int bytesRead = 0;
while (bytesRead < bytesToRead)
{
long chunkIndex = _position / _chunkSize;
int chunkOffset = (int)(_position % _chunkSize);
if (chunkIndex >= _chunks.Count)
break;
byte[] chunk = _chunks[(int)chunkIndex];
int availableInChunk = chunk.Length - chunkOffset;
int bytesToCopyFromChunk = Math.Min(availableInChunk, bytesToRead - bytesRead);
Array.Copy(chunk, chunkOffset, buffer, offset + bytesRead, bytesToCopyFromChunk);
_position += bytesToCopyFromChunk;
bytesRead += bytesToCopyFromChunk;
}
return bytesRead;
}
public override void Write(byte[] buffer, int offset, int count)
{
ThrowIfDisposed();
if (buffer == null)
throw new ArgumentNullException(nameof(buffer));
if (offset < 0 || count < 0 || offset + count > buffer.Length)
throw new ArgumentOutOfRangeException();
int bytesWritten = 0;
while (bytesWritten < count)
{
long chunkIndex = _position / _chunkSize;
int chunkOffset = (int)(_position % _chunkSize);
// Ensure we have enough chunks
while (_chunks.Count <= chunkIndex)
{
_chunks.Add(new byte[_chunkSize]);
}
byte[] chunk = _chunks[(int)chunkIndex];
int availableInChunk = chunk.Length - chunkOffset;
int bytesToCopyToChunk = Math.Min(availableInChunk, count - bytesWritten);
Array.Copy(buffer, offset + bytesWritten, chunk, chunkOffset, bytesToCopyToChunk);
_position += bytesToCopyToChunk;
bytesWritten += bytesToCopyToChunk;
}
}
public override long Seek(long offset, SeekOrigin origin)
{
ThrowIfDisposed();
long newPosition = origin switch
{
SeekOrigin.Begin => offset,
SeekOrigin.Current => _position + offset,
SeekOrigin.End => Length + offset,
_ => throw new ArgumentOutOfRangeException(nameof(origin)),
};
if (newPosition < 0)
throw new ArgumentOutOfRangeException(
nameof(offset),
"Cannot seek before the beginning of the stream."
);
_position = newPosition;
return _position;
}
public override void SetLength(long value)
{
ThrowIfDisposed();
if (value < 0)
throw new ArgumentOutOfRangeException(nameof(value), "Length cannot be negative.");
long currentLength = Length;
if (value < currentLength)
{
// Truncate
long chunkIndex = (value + _chunkSize - 1) / _chunkSize;
if (chunkIndex > 0)
chunkIndex--;
_chunks.RemoveRange((int)(chunkIndex + 1), _chunks.Count - (int)(chunkIndex + 1));
if (chunkIndex < _chunks.Count)
{
int lastChunkSize = (int)(value - chunkIndex * _chunkSize);
var x = _chunks[(int)chunkIndex];
Array.Resize(ref x, lastChunkSize);
}
if (_position > value)
_position = value;
}
else if (value > currentLength)
{
// Extend with zeros
long chunkIndex = currentLength / _chunkSize;
int chunkOffset = (int)(currentLength % _chunkSize);
while ((long)_chunks.Count * _chunkSize < value)
{
_chunks.Add(new byte[_chunkSize]);
}
// Resize the last chunk if needed
if (_chunks.Count > 0)
{
long lastChunkNeededSize = value - (long)(_chunks.Count - 1) * _chunkSize;
if (lastChunkNeededSize < _chunkSize)
{
var x = _chunks[^1];
Array.Resize(ref x, (int)lastChunkNeededSize);
}
}
}
}
/// <summary>
/// Gets the number of chunks in the backing list.
/// </summary>
public int ChunkCount => _chunks.Count;
/// <summary>
/// Gets the size of each chunk in bytes.
/// </summary>
public int ChunkSize => _chunkSize;
/// <summary>
/// Converts the stream contents to a single byte array.
/// This may consume significant memory for large streams.
/// </summary>
public byte[] ToArray()
{
ThrowIfDisposed();
long length = Length;
byte[] result = new byte[length];
long currentPosition = _position;
try
{
_position = 0;
int totalRead = 0;
while (totalRead < length)
{
int bytesToRead = (int)Math.Min(length - totalRead, int.MaxValue);
int bytesRead = Read(result, totalRead, bytesToRead);
if (bytesRead == 0)
break;
totalRead += bytesRead;
}
}
finally
{
_position = currentPosition;
}
return result;
}
private void ThrowIfDisposed()
{
if (_isDisposed)
throw new ObjectDisposedException(GetType().Name);
}
protected override void Dispose(bool disposing)
{
if (!_isDisposed)
{
if (disposing)
{
_chunks.Clear();
}
_isDisposed = true;
}
base.Dispose(disposing);
}
}

View File

@@ -0,0 +1,54 @@
using System;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Performance;
using SharpCompress.Readers;
using SharpCompress.Test;
var index = AppDomain.CurrentDomain.BaseDirectory.IndexOf(
"SharpCompress.Performance",
StringComparison.OrdinalIgnoreCase
);
var path = AppDomain.CurrentDomain.BaseDirectory.Substring(0, index);
var SOLUTION_BASE_PATH = Path.GetDirectoryName(path) ?? throw new ArgumentNullException();
var TEST_ARCHIVES_PATH = Path.Combine(SOLUTION_BASE_PATH, "TestArchives", "Archives");
//using var _ = JetbrainsProfiler.Memory($"/Users/adam/temp/");
using (var __ = JetbrainsProfiler.Cpu($"/Users/adam/temp/"))
{
var testArchives = new[]
{
"Rar.Audio_program.rar",
//"64bitstream.zip.7z",
//"TarWithSymlink.tar.gz"
};
var arcs = testArchives.Select(a => Path.Combine(TEST_ARCHIVES_PATH, a)).ToArray();
for (int i = 0; i < 50; i++)
{
using var found = ArchiveFactory.Open(arcs[0]);
foreach (var entry in found.Entries.Where(entry => !entry.IsDirectory))
{
Console.WriteLine($"Extracting {entry.Key}");
using var entryStream = entry.OpenEntryStream();
entryStream.CopyTo(Stream.Null);
}
/*using var found = ReaderFactory.Open(arcs[0]);
while (found.MoveToNextEntry())
{
var entry = found.Entry;
if (entry.IsDirectory)
continue;
Console.WriteLine($"Extracting {entry.Key}");
found.WriteEntryTo(Stream.Null);
}*/
}
Console.WriteLine("Still running...");
}
await Task.Delay(500);

View File

@@ -0,0 +1,10 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net8.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="JetBrains.Profiler.SelfApi" />
<ProjectReference Include="..\..\src\SharpCompress\SharpCompress.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,50 @@
{
"version": 2,
"dependencies": {
"net8.0": {
"JetBrains.Profiler.SelfApi": {
"type": "Direct",
"requested": "[2.5.14, )",
"resolved": "2.5.14",
"contentHash": "9+NcTe49B2M8/MOledSxKZkQKqavFf5xXZw4JL4bVu/KYiw6OOaD6cDQmNGSO18yUP/WoBXsXGKmZ9VOpmyadw==",
"dependencies": {
"JetBrains.HabitatDetector": "1.4.5",
"JetBrains.Profiler.Api": "1.4.10"
}
},
"JetBrains.FormatRipper": {
"type": "Transitive",
"resolved": "2.4.0",
"contentHash": "k5eGab1DArJH0k94ZO9oxDxg8go1KvR1oPGPzyVvfplEHetgrc2hGZ6Cken8fVsdS/Xp3hMnHd9L5MXb7JJM4A=="
},
"JetBrains.HabitatDetector": {
"type": "Transitive",
"resolved": "1.4.5",
"contentHash": "5kb1G32O8fmlS2QnJLycEnHbq9ukuDUHQll4mqOAPLEE1JEJcz12W6cTt1CMpQY3n/6R0jZAhmBvaJm2zixvow==",
"dependencies": {
"JetBrains.FormatRipper": "2.4.0"
}
},
"JetBrains.Profiler.Api": {
"type": "Transitive",
"resolved": "1.4.10",
"contentHash": "XBynPGDiWB6uWoiVwkki3uUsXqc66lRC1YX8LWYWc579ioJSB5OzZ8KsRK2q+eawj3OxrkeCsgXlb6mwBkCebQ==",
"dependencies": {
"JetBrains.HabitatDetector": "1.4.5"
}
},
"sharpcompress": {
"type": "Project",
"dependencies": {
"ZstdSharp.Port": "[0.8.6, )"
}
},
"ZstdSharp.Port": {
"type": "CentralTransitive",
"requested": "[0.8.6, )",
"resolved": "0.8.6",
"contentHash": "iP4jVLQoQmUjMU88g1WObiNr6YKZGvh4aOXn3yOJsHqZsflwRsxZPcIBvNXgjXO3vQKSLctXGLTpcBPLnWPS8A=="
}
}
}
}

View File

@@ -1,8 +1,4 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
@@ -20,31 +16,32 @@ namespace SharpCompress.Test.Arc
}
[Fact]
public void Arc_Uncompressed_Read() => Read("Arc.uncompressed.arc", CompressionType.None);
public Task Arc_Uncompressed_Read() =>
ReadAsync("Arc.uncompressed.arc", CompressionType.None);
[Fact]
public void Arc_Squeezed_Read()
public async Task Arc_Squeezed_Read()
{
ProcessArchive("Arc.squeezed.arc");
await ProcessArchive("Arc.squeezed.arc");
}
[Fact]
public void Arc_Crunched_Read()
public async Task Arc_Crunched_Read()
{
ProcessArchive("Arc.crunched.arc");
await ProcessArchive("Arc.crunched.arc");
}
private void ProcessArchive(string archiveName)
private async Task ProcessArchive(string archiveName)
{
// Process a given archive by its name
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, archiveName)))
using (IReader reader = ArcReader.Open(stream))
{
while (reader.MoveToNextEntry())
while (await reader.MoveToNextEntryAsync())
{
if (!reader.Entry.IsDirectory)
{
reader.WriteEntryToDirectory(
await reader.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);

View File

@@ -2,6 +2,7 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Common;
using SharpCompress.Compressors.Xz;
@@ -27,13 +28,16 @@ public class ArchiveTests : ReaderTests
}
}
protected void ArchiveStreamReadExtractAll(string testArchive, CompressionType compression)
protected async Task ArchiveStreamReadExtractAllAsync(
string testArchive,
CompressionType compression
)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
ArchiveStreamReadExtractAll(new[] { testArchive }, compression);
await ArchiveStreamReadExtractAllAsync(new[] { testArchive }, compression);
}
protected void ArchiveStreamReadExtractAll(
protected async Task ArchiveStreamReadExtractAllAsync(
IEnumerable<string> testArchives,
CompressionType compression
)
@@ -54,7 +58,7 @@ public class ArchiveTests : ReaderTests
Assert.True(archive.IsSolid);
using (var reader = archive.ExtractAllEntries())
{
UseReader(reader, compression);
await UseReaderAsync(reader, compression);
}
VerifyFiles();
@@ -65,7 +69,7 @@ public class ArchiveTests : ReaderTests
}
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(
await entry.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -83,36 +87,38 @@ public class ArchiveTests : ReaderTests
}
}
protected void ArchiveStreamRead(string testArchive, ReaderOptions? readerOptions = null) =>
ArchiveStreamRead(ArchiveFactory.AutoFactory, testArchive, readerOptions);
protected Task ArchiveStreamReadAsync(
string testArchive,
ReaderOptions? readerOptions = null
) => ArchiveStreamReadAsync(ArchiveFactory.AutoFactory, testArchive, readerOptions);
protected void ArchiveStreamRead(
protected Task ArchiveStreamReadAsync(
IArchiveFactory archiveFactory,
string testArchive,
ReaderOptions? readerOptions = null
)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
ArchiveStreamRead(archiveFactory, readerOptions, testArchive);
return ArchiveStreamReadAsync(archiveFactory, readerOptions, testArchive);
}
protected void ArchiveStreamRead(
protected Task ArchiveStreamReadAsync(
ReaderOptions? readerOptions = null,
params string[] testArchives
) => ArchiveStreamRead(ArchiveFactory.AutoFactory, readerOptions, testArchives);
) => ArchiveStreamReadAsync(ArchiveFactory.AutoFactory, readerOptions, testArchives);
protected void ArchiveStreamRead(
protected Task ArchiveStreamReadAsync(
IArchiveFactory archiveFactory,
ReaderOptions? readerOptions = null,
params string[] testArchives
) =>
ArchiveStreamRead(
ArchiveStreamReadAsync(
archiveFactory,
readerOptions,
testArchives.Select(x => Path.Combine(TEST_ARCHIVES_PATH, x))
);
protected void ArchiveStreamRead(
protected async Task ArchiveStreamReadAsync(
IArchiveFactory archiveFactory,
ReaderOptions? readerOptions,
IEnumerable<string> testArchives
@@ -133,7 +139,7 @@ public class ArchiveTests : ReaderTests
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(
await entry.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -151,16 +157,16 @@ public class ArchiveTests : ReaderTests
}
}
protected void ArchiveStreamMultiRead(
protected Task ArchiveStreamMultiReadAsync(
ReaderOptions? readerOptions = null,
params string[] testArchives
) =>
ArchiveStreamMultiRead(
ArchiveStreamMultiReadAsync(
readerOptions,
testArchives.Select(x => Path.Combine(TEST_ARCHIVES_PATH, x))
);
protected void ArchiveStreamMultiRead(
protected async Task ArchiveStreamMultiReadAsync(
ReaderOptions? readerOptions,
IEnumerable<string> testArchives
)
@@ -174,7 +180,7 @@ public class ArchiveTests : ReaderTests
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(
await entry.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -183,16 +189,16 @@ public class ArchiveTests : ReaderTests
VerifyFiles();
}
protected void ArchiveOpenStreamRead(
protected Task ArchiveOpenStreamReadAsync(
ReaderOptions? readerOptions = null,
params string[] testArchives
) =>
ArchiveOpenStreamRead(
ArchiveOpenStreamReadAsync(
readerOptions,
testArchives.Select(x => Path.Combine(TEST_ARCHIVES_PATH, x))
);
protected void ArchiveOpenStreamRead(
protected async Task ArchiveOpenStreamReadAsync(
ReaderOptions? readerOptions,
IEnumerable<string> testArchives
)
@@ -206,7 +212,7 @@ public class ArchiveTests : ReaderTests
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(
await entry.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -252,26 +258,20 @@ public class ArchiveTests : ReaderTests
}
}
protected void ArchiveExtractToDirectory(
string testArchive,
ReaderOptions? readerOptions = null
) => ArchiveExtractToDirectory(ArchiveFactory.AutoFactory, testArchive, readerOptions);
protected void ArchiveExtractToDirectory(
IArchiveFactory archiveFactory,
protected async Task ArchiveExtractToDirectoryAsync(
string testArchive,
ReaderOptions? readerOptions = null
)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using (var archive = archiveFactory.Open(new FileInfo(testArchive), readerOptions))
using (var archive = ArchiveFactory.Open(new FileInfo(testArchive), readerOptions))
{
archive.ExtractToDirectory(SCRATCH_FILES_PATH);
await archive.ExtractToDirectoryAsync(SCRATCH_FILES_PATH);
}
VerifyFiles();
}
protected void ArchiveFileRead(
protected async Task ArchiveFileReadAsync(
IArchiveFactory archiveFactory,
string testArchive,
ReaderOptions? readerOptions = null
@@ -282,7 +282,7 @@ public class ArchiveTests : ReaderTests
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(
await entry.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -291,8 +291,8 @@ public class ArchiveTests : ReaderTests
VerifyFiles();
}
protected void ArchiveFileRead(string testArchive, ReaderOptions? readerOptions = null) =>
ArchiveFileRead(ArchiveFactory.AutoFactory, testArchive, readerOptions);
protected Task ArchiveFileReadAsync(string testArchive, ReaderOptions? readerOptions = null) =>
ArchiveFileReadAsync(ArchiveFactory.AutoFactory, testArchive, readerOptions);
protected void ArchiveFileSkip(
string testArchive,
@@ -316,14 +316,14 @@ public class ArchiveTests : ReaderTests
/// <summary>
/// Demonstrate the ExtractionOptions.PreserveFileTime and ExtractionOptions.PreserveAttributes extract options
/// </summary>
protected void ArchiveFileReadEx(string testArchive)
protected async Task ArchiveFileReadExAsync(string testArchive)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using (var archive = ArchiveFactory.Open(testArchive))
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(
await entry.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions
{
@@ -338,17 +338,16 @@ public class ArchiveTests : ReaderTests
VerifyFilesEx();
}
protected void ArchiveDeltaDistanceRead(string testArchive)
protected async Task ArchiveDeltaDistanceReadAsync(string testArchive)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using var archive = ArchiveFactory.Open(testArchive);
using var reader = archive.ExtractAllEntries();
while (reader.MoveToNextEntry())
foreach (var entry in archive.Entries)
{
if (!reader.Entry.IsDirectory)
if (!entry.IsDirectory)
{
var memory = new MemoryStream();
reader.WriteEntryTo(memory);
await entry.WriteToAsync(memory);
memory.Position = 0;

View File

@@ -1,5 +1,6 @@
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.GZip;
using SharpCompress.Archives.Tar;
@@ -13,13 +14,13 @@ public class GZipArchiveTests : ArchiveTests
public GZipArchiveTests() => UseExtensionInsteadOfNameToVerify = true;
[Fact]
public void GZip_Archive_Generic()
public async Task GZip_Archive_Generic()
{
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz")))
using (var archive = ArchiveFactory.Open(stream))
{
var entry = archive.Entries.First();
entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull()));
await entry.WriteToFileAsync(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull()));
var size = entry.Size;
var scratch = new FileInfo(Path.Combine(SCRATCH_FILES_PATH, "Tar.tar"));
@@ -28,20 +29,20 @@ public class GZipArchiveTests : ArchiveTests
Assert.Equal(size, scratch.Length);
Assert.Equal(size, test.Length);
}
CompareArchivesByPath(
await CompareArchivesByPathAsync(
Path.Combine(SCRATCH_FILES_PATH, "Tar.tar"),
Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar")
);
}
[Fact]
public void GZip_Archive()
public async Task GZip_Archive()
{
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz")))
using (var archive = GZipArchive.Open(stream))
{
var entry = archive.Entries.First();
entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull()));
await entry.WriteToFileAsync(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull()));
var size = entry.Size;
var scratch = new FileInfo(Path.Combine(SCRATCH_FILES_PATH, "Tar.tar"));
@@ -50,7 +51,7 @@ public class GZipArchiveTests : ArchiveTests
Assert.Equal(size, scratch.Length);
Assert.Equal(size, test.Length);
}
CompareArchivesByPath(
await CompareArchivesByPathAsync(
Path.Combine(SCRATCH_FILES_PATH, "Tar.tar"),
Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar")
);

View File

@@ -1,4 +1,5 @@
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers.GZip;
@@ -11,15 +12,15 @@ public class GZipReaderTests : ReaderTests
public GZipReaderTests() => UseExtensionInsteadOfNameToVerify = true;
[Fact]
public void GZip_Reader_Generic() => Read("Tar.tar.gz", CompressionType.GZip);
public Task GZip_Reader_Generic() => ReadAsync("Tar.tar.gz", CompressionType.GZip);
[Fact]
public void GZip_Reader_Generic2()
public async Task GZip_Reader_Generic2()
{
//read only as GZip itme
using Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"));
using var reader = GZipReader.Open(new SharpCompressStream(stream));
while (reader.MoveToNextEntry()) // Crash here
while (await reader.MoveToNextEntryAsync()) // Crash here
{
Assert.NotEqual(0, reader.Entry.Size);
Assert.NotEqual(0, reader.Entry.Crc);

View File

@@ -1,4 +1,5 @@
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Writers;
using SharpCompress.Writers.GZip;
@@ -12,7 +13,7 @@ public class GZipWriterTests : WriterTests
: base(ArchiveType.GZip) => UseExtensionInsteadOfNameToVerify = true;
[Fact]
public void GZip_Writer_Generic()
public async Task GZip_Writer_Generic()
{
using (
Stream stream = File.Open(
@@ -25,14 +26,14 @@ public class GZipWriterTests : WriterTests
{
writer.Write("Tar.tar", Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar"));
}
CompareArchivesByPath(
await CompareArchivesByPathAsync(
Path.Combine(SCRATCH_FILES_PATH, "Tar.tar.gz"),
Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz")
);
}
[Fact]
public void GZip_Writer()
public async Task GZip_Writer()
{
using (
Stream stream = File.Open(
@@ -45,7 +46,7 @@ public class GZipWriterTests : WriterTests
{
writer.Write("Tar.tar", Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar"));
}
CompareArchivesByPath(
await CompareArchivesByPathAsync(
Path.Combine(SCRATCH_FILES_PATH, "Tar.tar.gz"),
Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz")
);
@@ -60,7 +61,7 @@ public class GZipWriterTests : WriterTests
});
[Fact]
public void GZip_Writer_Entry_Path_With_Dir()
public async Task GZip_Writer_Entry_Path_With_Dir()
{
using (
Stream stream = File.Open(
@@ -74,7 +75,7 @@ public class GZipWriterTests : WriterTests
var path = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar");
writer.Write(path, path); //covers issue #532
}
CompareArchivesByPath(
await CompareArchivesByPathAsync(
Path.Combine(SCRATCH_FILES_PATH, "Tar.tar.gz"),
Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz")
);

View File

@@ -1,5 +1,6 @@
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Rar;
using SharpCompress.Common;
@@ -12,57 +13,57 @@ namespace SharpCompress.Test.Rar;
public class RarArchiveTests : ArchiveTests
{
[Fact]
public void Rar_EncryptedFileAndHeader_Archive() =>
ReadRarPassword("Rar.encrypted_filesAndHeader.rar", "test");
public Task Rar_EncryptedFileAndHeader_Archive() =>
ReadRarPasswordAsync("Rar.encrypted_filesAndHeader.rar", "test");
[Fact]
public void Rar_EncryptedFileAndHeader_NoPasswordExceptionTest() =>
Assert.Throws(
public Task Rar_EncryptedFileAndHeader_NoPasswordExceptionTest() =>
Assert.ThrowsAsync(
typeof(CryptographicException),
() => ReadRarPassword("Rar.encrypted_filesAndHeader.rar", null)
async () => await ReadRarPasswordAsync("Rar.encrypted_filesAndHeader.rar", null)
);
[Fact]
public void Rar5_EncryptedFileAndHeader_Archive() =>
ReadRarPassword("Rar5.encrypted_filesAndHeader.rar", "test");
public Task Rar5_EncryptedFileAndHeader_Archive() =>
ReadRarPasswordAsync("Rar5.encrypted_filesAndHeader.rar", "test");
[Fact]
public void Rar5_EncryptedFileAndHeader_Archive_Err() =>
Assert.Throws(
public Task Rar5_EncryptedFileAndHeader_Archive_Err() =>
Assert.ThrowsAsync(
typeof(CryptographicException),
() => ReadRarPassword("Rar5.encrypted_filesAndHeader.rar", "failed")
async () => await ReadRarPasswordAsync("Rar5.encrypted_filesAndHeader.rar", "failed")
);
[Fact]
public void Rar5_EncryptedFileAndHeader_NoPasswordExceptionTest() =>
Assert.Throws(
public Task Rar5_EncryptedFileAndHeader_NoPasswordExceptionTest() =>
Assert.ThrowsAsync(
typeof(CryptographicException),
() => ReadRarPassword("Rar5.encrypted_filesAndHeader.rar", null)
async () => await ReadRarPasswordAsync("Rar5.encrypted_filesAndHeader.rar", null)
);
[Fact]
public void Rar_EncryptedFileOnly_Archive() =>
ReadRarPassword("Rar.encrypted_filesOnly.rar", "test");
public Task Rar_EncryptedFileOnly_Archive() =>
ReadRarPasswordAsync("Rar.encrypted_filesOnly.rar", "test");
[Fact]
public void Rar_EncryptedFileOnly_Archive_Err() =>
Assert.Throws(
public Task Rar_EncryptedFileOnly_Archive_Err() =>
Assert.ThrowsAsync(
typeof(CryptographicException),
() => ReadRarPassword("Rar5.encrypted_filesOnly.rar", "failed")
async () => await ReadRarPasswordAsync("Rar5.encrypted_filesOnly.rar", "failed")
);
[Fact]
public void Rar5_EncryptedFileOnly_Archive() =>
ReadRarPassword("Rar5.encrypted_filesOnly.rar", "test");
public Task Rar5_EncryptedFileOnly_Archive() =>
ReadRarPasswordAsync("Rar5.encrypted_filesOnly.rar", "test");
[Fact]
public void Rar_Encrypted_Archive() => ReadRarPassword("Rar.Encrypted.rar", "test");
public Task Rar_Encrypted_Archive() => ReadRarPasswordAsync("Rar.Encrypted.rar", "test");
[Fact]
public void Rar5_Encrypted_Archive() =>
ReadRarPassword("Rar5.encrypted_filesAndHeader.rar", "test");
public Task Rar5_Encrypted_Archive() =>
ReadRarPasswordAsync("Rar5.encrypted_filesAndHeader.rar", "test");
private void ReadRarPassword(string testArchive, string? password)
private async Task ReadRarPasswordAsync(string testArchive, string? password)
{
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, testArchive)))
using (
@@ -77,7 +78,7 @@ public class RarArchiveTests : ArchiveTests
if (!entry.IsDirectory)
{
Assert.Equal(CompressionType.Rar, entry.CompressionType);
entry.WriteToDirectory(
await entry.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -88,12 +89,12 @@ public class RarArchiveTests : ArchiveTests
}
[Fact]
public void Rar_Multi_Archive_Encrypted() =>
Assert.Throws<InvalidFormatException>(() =>
ArchiveFileReadPassword("Rar.EncryptedParts.part01.rar", "test")
public Task Rar_Multi_Archive_Encrypted() =>
Assert.ThrowsAsync<InvalidFormatException>(async () =>
await ArchiveFileReadPasswordAsync("Rar.EncryptedParts.part01.rar", "test")
);
protected void ArchiveFileReadPassword(string archiveName, string password)
protected async Task ArchiveFileReadPasswordAsync(string archiveName, string password)
{
using (
var archive = RarArchive.Open(
@@ -104,7 +105,7 @@ public class RarArchiveTests : ArchiveTests
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(
await entry.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -114,28 +115,28 @@ public class RarArchiveTests : ArchiveTests
}
[Fact]
public void Rar_None_ArchiveStreamRead() => ArchiveStreamRead("Rar.none.rar");
public Task Rar_None_ArchiveStreamRead() => ArchiveStreamReadAsync("Rar.none.rar");
[Fact]
public void Rar5_None_ArchiveStreamRead() => ArchiveStreamRead("Rar5.none.rar");
public Task Rar5_None_ArchiveStreamRead() => ArchiveStreamReadAsync("Rar5.none.rar");
[Fact]
public void Rar_ArchiveStreamRead() => ArchiveStreamRead("Rar.rar");
public Task Rar_ArchiveStreamRead() => ArchiveStreamReadAsync("Rar.rar");
[Fact]
public void Rar5_ArchiveStreamRead() => ArchiveStreamRead("Rar5.rar");
public Task Rar5_ArchiveStreamRead() => ArchiveStreamReadAsync("Rar5.rar");
[Fact]
public void Rar_test_invalid_exttime_ArchiveStreamRead() =>
public Task Rar_test_invalid_exttime_ArchiveStreamRead() =>
DoRar_test_invalid_exttime_ArchiveStreamRead("Rar.test_invalid_exttime.rar");
private void DoRar_test_invalid_exttime_ArchiveStreamRead(string filename)
private async Task DoRar_test_invalid_exttime_ArchiveStreamRead(string filename)
{
using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename));
using var archive = ArchiveFactory.Open(stream);
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(
await entry.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -143,14 +144,14 @@ public class RarArchiveTests : ArchiveTests
}
[Fact]
public void Rar_Jpg_ArchiveStreamRead()
public async Task Rar_Jpg_ArchiveStreamRead()
{
using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Rar.jpeg.jpg"));
using (var archive = RarArchive.Open(stream, new ReaderOptions { LookForHeader = true }))
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(
await entry.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -160,12 +161,12 @@ public class RarArchiveTests : ArchiveTests
}
[Fact]
public void Rar_IsSolidArchiveCheck() => DoRar_IsSolidArchiveCheck("Rar.rar");
public Task Rar_IsSolidArchiveCheck() => DoRar_IsSolidArchiveCheck("Rar.rar");
[Fact]
public void Rar5_IsSolidArchiveCheck() => DoRar_IsSolidArchiveCheck("Rar5.rar");
public Task Rar5_IsSolidArchiveCheck() => DoRar_IsSolidArchiveCheck("Rar5.rar");
private void DoRar_IsSolidArchiveCheck(string filename)
private async Task DoRar_IsSolidArchiveCheck(string filename)
{
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename)))
{
@@ -173,7 +174,7 @@ public class RarArchiveTests : ArchiveTests
Assert.False(archive.IsSolid);
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(
await entry.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -183,10 +184,10 @@ public class RarArchiveTests : ArchiveTests
}
[Fact]
public void Rar_IsSolidEntryStreamCheck() => DoRar_IsSolidEntryStreamCheck("Rar.solid.rar");
public Task Rar_IsSolidEntryStreamCheck() => DoRar_IsSolidEntryStreamCheck("Rar.solid.rar");
//Extract the 2nd file in a solid archive to check that the first file is skipped properly
private void DoRar_IsSolidEntryStreamCheck(string filename)
private async Task DoRar_IsSolidEntryStreamCheck(string filename)
{
using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename));
using var archive = RarArchive.Open(stream);
@@ -202,8 +203,8 @@ public class RarArchiveTests : ArchiveTests
{
using (var crcStream = new CrcCheckStream((uint)entry.Crc)) //use the 7zip CRC stream for convenience (required a bug fix)
{
using var eStream = entry.OpenEntryStream(); //bug fix in RarStream to report the correct Position
eStream.CopyTo(crcStream);
using var eStream = await entry.OpenEntryStreamAsync(); //bug fix in RarStream to report the correct Position
await eStream.CopyToAsync(crcStream);
} //throws if not valid
if (entry == testEntry)
{
@@ -213,22 +214,22 @@ public class RarArchiveTests : ArchiveTests
}
[Fact]
public void Rar_Solid_ArchiveStreamRead() => ArchiveStreamRead("Rar.solid.rar");
public Task Rar_Solid_ArchiveStreamRead() => ArchiveStreamReadAsync("Rar.solid.rar");
[Fact]
public void Rar5_Solid_ArchiveStreamRead() => ArchiveStreamRead("Rar5.solid.rar");
public Task Rar5_Solid_ArchiveStreamRead() => ArchiveStreamReadAsync("Rar5.solid.rar");
[Fact]
public void Rar_Solid_StreamRead_Extract_All() =>
ArchiveStreamReadExtractAll("Rar.solid.rar", CompressionType.Rar);
public Task Rar_Solid_StreamRead_Extract_All() =>
ArchiveStreamReadExtractAllAsync("Rar.solid.rar", CompressionType.Rar);
[Fact]
public void Rar5_Solid_StreamRead_Extract_All() =>
ArchiveStreamReadExtractAll("Rar5.solid.rar", CompressionType.Rar);
public Task Rar5_Solid_StreamRead_Extract_All() =>
ArchiveStreamReadExtractAllAsync("Rar5.solid.rar", CompressionType.Rar);
[Fact]
public void Rar_Multi_ArchiveStreamRead() =>
DoRar_Multi_ArchiveStreamRead(
public Task Rar_Multi_ArchiveStreamRead() =>
DoRar_Multi_ArchiveStreamReadAsync(
[
"Rar.multi.part01.rar",
"Rar.multi.part02.rar",
@@ -241,8 +242,8 @@ public class RarArchiveTests : ArchiveTests
);
[Fact]
public void Rar5_Multi_ArchiveStreamRead() =>
DoRar_Multi_ArchiveStreamRead(
public Task Rar5_Multi_ArchiveStreamRead() =>
DoRar_Multi_ArchiveStreamReadAsync(
[
"Rar5.multi.part01.rar",
"Rar5.multi.part02.rar",
@@ -254,7 +255,7 @@ public class RarArchiveTests : ArchiveTests
false
);
private void DoRar_Multi_ArchiveStreamRead(string[] archives, bool isSolid)
private async Task DoRar_Multi_ArchiveStreamReadAsync(string[] archives, bool isSolid)
{
using var archive = RarArchive.Open(
archives.Select(s => Path.Combine(TEST_ARCHIVES_PATH, s)).Select(File.OpenRead)
@@ -262,7 +263,7 @@ public class RarArchiveTests : ArchiveTests
Assert.Equal(archive.IsSolid, isSolid);
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(
await entry.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -270,8 +271,8 @@ public class RarArchiveTests : ArchiveTests
}
[Fact]
public void Rar5_MultiSolid_ArchiveStreamRead() =>
DoRar_Multi_ArchiveStreamRead(
public Task Rar5_MultiSolid_ArchiveStreamRead() =>
DoRar_Multi_ArchiveStreamReadAsync(
[
"Rar.multi.solid.part01.rar",
"Rar.multi.solid.part02.rar",
@@ -284,16 +285,16 @@ public class RarArchiveTests : ArchiveTests
);
[Fact]
public void RarNoneArchiveFileRead() => ArchiveFileRead("Rar.none.rar");
public Task RarNoneArchiveFileRead() => ArchiveFileReadAsync("Rar.none.rar");
[Fact]
public void Rar5NoneArchiveFileRead() => ArchiveFileRead("Rar5.none.rar");
public Task Rar5NoneArchiveFileRead() => ArchiveFileReadAsync("Rar5.none.rar");
[Fact]
public void Rar_ArchiveFileRead() => ArchiveFileRead("Rar.rar");
public Task Rar_ArchiveFileRead() => ArchiveFileReadAsync("Rar.rar");
[Fact]
public void Rar5_ArchiveFileRead() => ArchiveFileRead("Rar5.rar");
public Task Rar5_ArchiveFileRead() => ArchiveFileReadAsync("Rar5.rar");
[Fact]
public void Rar_ArchiveFileRead_HasDirectories() =>
@@ -312,7 +313,7 @@ public class RarArchiveTests : ArchiveTests
}
[Fact]
public void Rar_Jpg_ArchiveFileRead()
public async Task Rar_Jpg_ArchiveFileRead()
{
using (
var archive = RarArchive.Open(
@@ -323,7 +324,7 @@ public class RarArchiveTests : ArchiveTests
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(
await entry.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -333,14 +334,14 @@ public class RarArchiveTests : ArchiveTests
}
[Fact]
public void Rar_Solid_ArchiveFileRead() => ArchiveFileRead("Rar.solid.rar");
public Task Rar_Solid_ArchiveFileRead() => ArchiveFileReadAsync("Rar.solid.rar");
[Fact]
public void Rar5_Solid_ArchiveFileRead() => ArchiveFileRead("Rar5.solid.rar");
public Task Rar5_Solid_ArchiveFileRead() => ArchiveFileReadAsync("Rar5.solid.rar");
[Fact]
public void Rar2_Multi_ArchiveStreamRead() =>
DoRar_Multi_ArchiveStreamRead(
public Task Rar2_Multi_ArchiveStreamRead() =>
DoRar_Multi_ArchiveStreamReadAsync(
[
"Rar2.multi.rar",
"Rar2.multi.r00",
@@ -354,17 +355,17 @@ public class RarArchiveTests : ArchiveTests
);
[Fact]
public void Rar2_Multi_ArchiveFileRead() => ArchiveFileRead("Rar2.multi.rar"); //r00, r01...
public Task Rar2_Multi_ArchiveFileRead() => ArchiveFileReadAsync("Rar2.multi.rar"); //r00, r01...
[Fact]
public void Rar2_ArchiveFileRead() => ArchiveFileRead("Rar2.rar");
public Task Rar2_ArchiveFileRead() => ArchiveFileReadAsync("Rar2.rar");
[Fact]
public void Rar15_ArchiveFileRead()
public async Task Rar15_ArchiveFileRead()
{
UseExtensionInsteadOfNameToVerify = true;
UseCaseInsensitiveToVerify = true;
ArchiveFileRead("Rar15.rar");
await ArchiveFileReadAsync("Rar15.rar");
}
[Fact]
@@ -408,10 +409,10 @@ public class RarArchiveTests : ArchiveTests
}
[Fact]
public void Rar4_Multi_ArchiveFileRead() => ArchiveFileRead("Rar4.multi.part01.rar");
public Task Rar4_Multi_ArchiveFileRead() => ArchiveFileReadAsync("Rar4.multi.part01.rar");
[Fact]
public void Rar4_ArchiveFileRead() => ArchiveFileRead("Rar4.rar");
public Task Rar4_ArchiveFileRead() => ArchiveFileReadAsync("Rar4.rar");
[Fact]
public void Rar_GetPartsSplit() =>
@@ -461,8 +462,8 @@ public class RarArchiveTests : ArchiveTests
);
[Fact]
public void Rar4_Multi_ArchiveStreamRead() =>
DoRar_Multi_ArchiveStreamRead(
public Task Rar4_Multi_ArchiveStreamRead() =>
DoRar_Multi_ArchiveStreamReadAsync(
[
"Rar4.multi.part01.rar",
"Rar4.multi.part02.rar",
@@ -477,8 +478,8 @@ public class RarArchiveTests : ArchiveTests
//no extension to test the lib identifies the archive by content not ext
[Fact]
public void Rar4_Split_ArchiveStreamRead() =>
ArchiveStreamMultiRead(
public Task Rar4_Split_ArchiveStreamRead() =>
ArchiveStreamMultiReadAsync(
null,
[
"Rar4.split.001",
@@ -492,7 +493,7 @@ public class RarArchiveTests : ArchiveTests
//will detect and load other files
[Fact]
public void Rar4_Multi_ArchiveFirstFileRead() => ArchiveFileRead("Rar4.multi.part01.rar");
public Task Rar4_Multi_ArchiveFirstFileRead() => ArchiveFileReadAsync("Rar4.multi.part01.rar");
//"Rar4.multi.part02.rar",
//"Rar4.multi.part03.rar",
@@ -502,7 +503,7 @@ public class RarArchiveTests : ArchiveTests
//"Rar4.multi.part07.rar"
//will detect and load other files
[Fact]
public void Rar4_Split_ArchiveFirstFileRead() => ArchiveFileRead("Rar4.split.001");
public Task Rar4_Split_ArchiveFirstFileRead() => ArchiveFileReadAsync("Rar4.split.001");
//"Rar4.split.002",
//"Rar4.split.003",
@@ -511,8 +512,8 @@ public class RarArchiveTests : ArchiveTests
//"Rar4.split.006"
//will detect and load other files
[Fact]
public void Rar4_Split_ArchiveStreamFirstFileRead() =>
ArchiveStreamMultiRead(
public Task Rar4_Split_ArchiveStreamFirstFileRead() =>
ArchiveStreamMultiReadAsync(
null,
[
"Rar4.split.001",
@@ -526,8 +527,8 @@ public class RarArchiveTests : ArchiveTests
//open with ArchiveFactory.Open and stream
[Fact]
public void Rar4_Split_ArchiveOpen() =>
ArchiveOpenStreamRead(
public Task Rar4_Split_ArchiveOpen() =>
ArchiveOpenStreamReadAsync(
null,
"Rar4.split.001",
"Rar4.split.002",
@@ -539,8 +540,8 @@ public class RarArchiveTests : ArchiveTests
//open with ArchiveFactory.Open and stream
[Fact]
public void Rar4_Multi_ArchiveOpen() =>
ArchiveOpenStreamRead(
public Task Rar4_Multi_ArchiveOpen() =>
ArchiveOpenStreamReadAsync(
null,
"Rar4.multi.part01.rar",
"Rar4.multi.part02.rar",
@@ -570,10 +571,10 @@ public class RarArchiveTests : ArchiveTests
);
[Fact]
public void Rar_Multi_ArchiveFileRead() => ArchiveFileRead("Rar.multi.part01.rar");
public Task Rar_Multi_ArchiveFileRead() => ArchiveFileReadAsync("Rar.multi.part01.rar");
[Fact]
public void Rar5_Multi_ArchiveFileRead() => ArchiveFileRead("Rar5.multi.part01.rar");
public Task Rar5_Multi_ArchiveFileRead() => ArchiveFileReadAsync("Rar5.multi.part01.rar");
[Fact]
public void Rar_IsFirstVolume_True() => DoRar_IsFirstVolume_True("Rar.multi.part01.rar");
@@ -602,7 +603,7 @@ public class RarArchiveTests : ArchiveTests
}
[Fact]
public void Rar5_CRC_Blake2_Archive() => ArchiveFileRead("Rar5.crc_blake2.rar");
public Task Rar5_CRC_Blake2_Archive() => ArchiveFileReadAsync("Rar5.crc_blake2.rar");
[Fact]
void Rar_Iterate_Archive() =>

View File

@@ -1,4 +1,4 @@
using System;
/*using System;
using System.Collections;
using System.IO;
using System.Linq;
@@ -223,7 +223,7 @@ public class RarReaderTests : ReaderTests
var destinationFileName = Path.Combine(destdir, file);
using var fs = File.OpenWrite(destinationFileName);
entryStream.TransferTo(fs);
entryStream.CopyTo(fs);
}
}
}
@@ -407,13 +407,19 @@ public class RarReaderTests : ReaderTests
Path.Combine("exe", "test.exe"),
}
);
using var archive = RarArchive.Open(
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part01.rar")
using var reader = RarReader.Open(
[
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part01.rar"),
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part02.rar"),
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part03.rar"),
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part04.rar"),
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part05.rar"),
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part06.rar"),
]
);
using var reader = archive.ExtractAllEntries();
while (reader.MoveToNextEntry())
{
Assert.Equal(expectedOrder.Pop(), reader.Entry.Key);
}
}
}
}*/

View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -11,7 +12,7 @@ namespace SharpCompress.Test;
public abstract class ReaderTests : TestBase
{
protected void Read(
protected async Task ReadAsync(
string testArchive,
CompressionType expectedCompression,
ReaderOptions? options = null
@@ -22,14 +23,14 @@ public abstract class ReaderTests : TestBase
options ??= new ReaderOptions() { BufferSize = 0x20000 }; //test larger buffer size (need test rather than eyeballing debug logs :P)
options.LeaveStreamOpen = true;
ReadImpl(testArchive, expectedCompression, options);
await ReadAsyncImpl(testArchive, expectedCompression, options);
options.LeaveStreamOpen = false;
ReadImpl(testArchive, expectedCompression, options);
await ReadAsyncImpl(testArchive, expectedCompression, options);
VerifyFiles();
}
private void ReadImpl(
private async Task ReadAsyncImpl(
string testArchive,
CompressionType expectedCompression,
ReaderOptions options
@@ -45,7 +46,7 @@ public abstract class ReaderTests : TestBase
using var testStream = new TestStream(protectedStream);
using (var reader = ReaderFactory.Open(testStream, options))
{
UseReader(reader, expectedCompression);
await UseReaderAsync(reader, expectedCompression);
protectedStream.ThrowOnDispose = false;
Assert.False(testStream.IsDisposed, $"{nameof(testStream)} prematurely closed");
}
@@ -57,42 +58,18 @@ public abstract class ReaderTests : TestBase
Assert.True(options.LeaveStreamOpen != testStream.IsDisposed, message);
}
public void UseReader(IReader reader, CompressionType expectedCompression)
public async Task UseReaderAsync(IReader reader, CompressionType expectedCompression)
{
while (reader.MoveToNextEntry())
while (await reader.MoveToNextEntryAsync())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(expectedCompression, reader.Entry.CompressionType);
reader.WriteEntryToDirectory(
await reader.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
}
protected void Iterate(
string testArchive,
string fileOrder,
CompressionType expectedCompression,
ReaderOptions? options = null
)
{
if (!Environment.OSVersion.IsWindows())
{
fileOrder = fileOrder.Replace('\\', '/');
}
var expected = new Stack<string>(fileOrder.Split(' '));
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using var file = File.OpenRead(testArchive);
using var forward = new ForwardOnlyStream(file);
using var reader = ReaderFactory.Open(forward, options);
while (reader.MoveToNextEntry())
{
Assert.Equal(expectedCompression, reader.Entry.CompressionType);
Assert.Equal(expected.Pop(), reader.Entry.Key);
}
}
}

View File

@@ -1,6 +1,7 @@
using System;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.SevenZip;
using SharpCompress.Common;
@@ -13,78 +14,93 @@ namespace SharpCompress.Test.SevenZip;
public class SevenZipArchiveTests : ArchiveTests
{
[Fact]
public void SevenZipArchive_Solid_StreamRead() => ArchiveStreamRead("7Zip.solid.7z");
public Task SevenZipArchive_Solid_StreamRead() => ArchiveStreamReadAsync("7Zip.solid.7z");
[Fact]
public void SevenZipArchive_NonSolid_StreamRead() => ArchiveStreamRead("7Zip.nonsolid.7z");
public Task SevenZipArchive_NonSolid_StreamRead() => ArchiveStreamReadAsync("7Zip.nonsolid.7z");
[Fact]
public void SevenZipArchive_LZMA_StreamRead() => ArchiveStreamRead("7Zip.LZMA.7z");
public Task SevenZipArchive_LZMA_StreamRead() => ArchiveStreamReadAsync("7Zip.LZMA.7z");
[Fact]
public void SevenZipArchive_LZMA_PathRead() => ArchiveFileRead("7Zip.LZMA.7z");
public Task SevenZipArchive_LZMA_PathRead() => ArchiveFileReadAsync("7Zip.LZMA.7z");
[Fact]
public void SevenZipArchive_LZMAAES_StreamRead() =>
ArchiveStreamRead("7Zip.LZMA.Aes.7z", new ReaderOptions { Password = "testpassword" });
public Task SevenZipArchive_LZMAAES_StreamRead() =>
ArchiveStreamReadAsync("7Zip.LZMA.Aes.7z", new ReaderOptions { Password = "testpassword" });
[Fact]
public void SevenZipArchive_LZMAAES_PathRead() =>
ArchiveFileRead("7Zip.LZMA.Aes.7z", new ReaderOptions { Password = "testpassword" });
public Task SevenZipArchive_LZMAAES_PathRead() =>
ArchiveFileReadAsync("7Zip.LZMA.Aes.7z", new ReaderOptions { Password = "testpassword" });
[Fact]
public void SevenZipArchive_LZMAAES_NoPasswordExceptionTest() =>
Assert.Throws(
public Task SevenZipArchive_LZMAAES_NoPasswordExceptionTest() =>
Assert.ThrowsAsync(
typeof(CryptographicException),
() => ArchiveFileRead("7Zip.LZMA.Aes.7z", new ReaderOptions { Password = null })
async () =>
await ArchiveFileReadAsync(
"7Zip.LZMA.Aes.7z",
new ReaderOptions { Password = null }
)
); //was failing with ArgumentNullException not CryptographicException like rar
[Fact]
public void SevenZipArchive_PPMd_StreamRead() => ArchiveStreamRead("7Zip.PPMd.7z");
public Task SevenZipArchive_PPMd_StreamRead() => ArchiveStreamReadAsync("7Zip.PPMd.7z");
[Fact]
public void SevenZipArchive_PPMd_StreamRead_Extract_All() =>
ArchiveStreamReadExtractAll("7Zip.PPMd.7z", CompressionType.PPMd);
public Task SevenZipArchive_PPMd_StreamRead_Extract_All() =>
ArchiveStreamReadExtractAllAsync("7Zip.PPMd.7z", CompressionType.PPMd);
[Fact]
public void SevenZipArchive_PPMd_PathRead() => ArchiveFileRead("7Zip.PPMd.7z");
public Task SevenZipArchive_PPMd_PathRead() => ArchiveFileReadAsync("7Zip.PPMd.7z");
[Fact]
public void SevenZipArchive_LZMA2_StreamRead() => ArchiveStreamRead("7Zip.LZMA2.7z");
public Task SevenZipArchive_LZMA2_StreamRead() => ArchiveStreamReadAsync("7Zip.LZMA2.7z");
[Fact]
public void SevenZipArchive_LZMA2_PathRead() => ArchiveFileRead("7Zip.LZMA2.7z");
public Task SevenZipArchive_LZMA2_PathRead() => ArchiveFileReadAsync("7Zip.LZMA2.7z");
[Fact]
public void SevenZipArchive_LZMA2_EXE_StreamRead() =>
ArchiveStreamRead(new SevenZipFactory(), "7Zip.LZMA2.exe", new() { LookForHeader = true });
public Task SevenZipArchive_LZMA2_EXE_StreamRead() =>
ArchiveStreamReadAsync(
new SevenZipFactory(),
"7Zip.LZMA2.exe",
new() { LookForHeader = true }
);
[Fact]
public void SevenZipArchive_LZMA2_EXE_PathRead() =>
ArchiveFileRead(new SevenZipFactory(), "7Zip.LZMA2.exe", new() { LookForHeader = true });
public Task SevenZipArchive_LZMA2_EXE_PathRead() =>
ArchiveFileReadAsync(
new SevenZipFactory(),
"7Zip.LZMA2.exe",
new() { LookForHeader = true }
);
[Fact]
public void SevenZipArchive_LZMA2AES_StreamRead() =>
ArchiveStreamRead("7Zip.LZMA2.Aes.7z", new ReaderOptions { Password = "testpassword" });
public Task SevenZipArchive_LZMA2AES_StreamRead() =>
ArchiveStreamReadAsync(
"7Zip.LZMA2.Aes.7z",
new ReaderOptions { Password = "testpassword" }
);
[Fact]
public void SevenZipArchive_LZMA2AES_PathRead() =>
ArchiveFileRead("7Zip.LZMA2.Aes.7z", new ReaderOptions { Password = "testpassword" });
public Task SevenZipArchive_LZMA2AES_PathRead() =>
ArchiveFileReadAsync("7Zip.LZMA2.Aes.7z", new ReaderOptions { Password = "testpassword" });
[Fact]
public void SevenZipArchive_BZip2_StreamRead() => ArchiveStreamRead("7Zip.BZip2.7z");
public Task SevenZipArchive_BZip2_StreamRead() => ArchiveStreamReadAsync("7Zip.BZip2.7z");
[Fact]
public void SevenZipArchive_BZip2_PathRead() => ArchiveFileRead("7Zip.BZip2.7z");
public Task SevenZipArchive_BZip2_PathRead() => ArchiveFileReadAsync("7Zip.BZip2.7z");
[Fact]
public void SevenZipArchive_LZMA_Time_Attributes_PathRead() =>
ArchiveFileReadEx("7Zip.LZMA.7z");
public Task SevenZipArchive_LZMA_Time_Attributes_PathRead() =>
ArchiveFileReadExAsync("7Zip.LZMA.7z");
[Fact]
public void SevenZipArchive_BZip2_Split() =>
Assert.Throws<InvalidOperationException>(() =>
ArchiveStreamRead(
public Task SevenZipArchive_BZip2_Split() =>
Assert.ThrowsAsync<InvalidOperationException>(async () =>
await ArchiveStreamReadAsync(
null,
"Original.7z.001",
"Original.7z.002",
@@ -98,8 +114,8 @@ public class SevenZipArchiveTests : ArchiveTests
//Same as archive as Original.7z.001 ... 007 files without the root directory 'Original\' in the archive - this caused the verify to fail
[Fact]
public void SevenZipArchive_BZip2_Split_Working() =>
ArchiveStreamMultiRead(
public Task SevenZipArchive_BZip2_Split_Working() =>
ArchiveStreamMultiReadAsync(
null,
"7Zip.BZip2.split.001",
"7Zip.BZip2.split.002",
@@ -112,8 +128,8 @@ public class SevenZipArchiveTests : ArchiveTests
//will detect and load other files
[Fact]
public void SevenZipArchive_BZip2_Split_FirstFileRead() =>
ArchiveFileRead("7Zip.BZip2.split.001");
public Task SevenZipArchive_BZip2_Split_FirstFileRead() =>
ArchiveFileReadAsync("7Zip.BZip2.split.001");
//"7Zip.BZip2.split.002",
//"7Zip.BZip2.split.003",
@@ -123,15 +139,15 @@ public class SevenZipArchiveTests : ArchiveTests
//"7Zip.BZip2.split.007"
[Fact]
public void SevenZipArchive_ZSTD_StreamRead() => ArchiveStreamRead("7Zip.ZSTD.7z");
public Task SevenZipArchive_ZSTD_StreamRead() => ArchiveStreamReadAsync("7Zip.ZSTD.7z");
[Fact]
public void SevenZipArchive_ZSTD_PathRead() => ArchiveFileRead("7Zip.ZSTD.7z");
public Task SevenZipArchive_ZSTD_PathRead() => ArchiveFileReadAsync("7Zip.ZSTD.7z");
[Fact]
public void SevenZipArchive_ZSTD_Split() =>
Assert.Throws<InvalidOperationException>(() =>
ArchiveStreamRead(
public Task SevenZipArchive_ZSTD_Split() =>
Assert.ThrowsAsync<InvalidOperationException>(async () =>
await ArchiveStreamReadAsync(
null,
"7Zip.ZSTD.Split.7z.001",
"7Zip.ZSTD.Split.7z.002",
@@ -143,53 +159,53 @@ public class SevenZipArchiveTests : ArchiveTests
);
[Fact]
public void SevenZipArchive_EOS_FileRead() => ArchiveFileRead("7Zip.eos.7z");
public Task SevenZipArchive_EOS_FileRead() => ArchiveFileReadAsync("7Zip.eos.7z");
[Fact]
public void SevenZipArchive_Delta_FileRead() => ArchiveFileRead("7Zip.delta.7z");
public Task SevenZipArchive_Delta_FileRead() => ArchiveFileReadAsync("7Zip.delta.7z");
[Fact]
public void SevenZipArchive_ARM_FileRead() => ArchiveFileRead("7Zip.ARM.7z");
public Task SevenZipArchive_ARM_FileRead() => ArchiveFileReadAsync("7Zip.ARM.7z");
[Fact]
public void SevenZipArchive_ARMT_FileRead() => ArchiveFileRead("7Zip.ARMT.7z");
public Task SevenZipArchive_ARMT_FileRead() => ArchiveFileReadAsync("7Zip.ARMT.7z");
[Fact]
public void SevenZipArchive_BCJ_FileRead() => ArchiveFileRead("7Zip.BCJ.7z");
public Task SevenZipArchive_BCJ_FileRead() => ArchiveFileReadAsync("7Zip.BCJ.7z");
[Fact]
public void SevenZipArchive_BCJ2_FileRead() => ArchiveFileRead("7Zip.BCJ2.7z");
public Task SevenZipArchive_BCJ2_FileRead() => ArchiveFileReadAsync("7Zip.BCJ2.7z");
[Fact]
public void SevenZipArchive_IA64_FileRead() => ArchiveFileRead("7Zip.IA64.7z");
public Task SevenZipArchive_IA64_FileRead() => ArchiveFileReadAsync("7Zip.IA64.7z");
[Fact]
public void SevenZipArchive_PPC_FileRead() => ArchiveFileRead("7Zip.PPC.7z");
public Task SevenZipArchive_PPC_FileRead() => ArchiveFileReadAsync("7Zip.PPC.7z");
[Fact]
public void SevenZipArchive_SPARC_FileRead() => ArchiveFileRead("7Zip.SPARC.7z");
public Task SevenZipArchive_SPARC_FileRead() => ArchiveFileReadAsync("7Zip.SPARC.7z");
[Fact]
public void SevenZipArchive_ARM64_FileRead() => ArchiveFileRead("7Zip.ARM64.7z");
public Task SevenZipArchive_ARM64_FileRead() => ArchiveFileReadAsync("7Zip.ARM64.7z");
[Fact]
public void SevenZipArchive_RISCV_FileRead() => ArchiveFileRead("7Zip.RISCV.7z");
public Task SevenZipArchive_RISCV_FileRead() => ArchiveFileReadAsync("7Zip.RISCV.7z");
[Fact]
public void SevenZipArchive_Filters_FileRead() => ArchiveFileRead("7Zip.Filters.7z");
public Task SevenZipArchive_Filters_FileRead() => ArchiveFileReadAsync("7Zip.Filters.7z");
[Fact]
public void SevenZipArchive_Delta_Distance() =>
ArchiveDeltaDistanceRead("7Zip.delta.distance.7z");
public Task SevenZipArchive_Delta_Distance() =>
ArchiveDeltaDistanceReadAsync("7Zip.delta.distance.7z");
[Fact]
public void SevenZipArchive_Tar_PathRead()
public async Task SevenZipArchive_Tar_PathRead()
{
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "7Zip.Tar.tar.7z")))
using (var archive = SevenZipArchive.Open(stream))
{
var entry = archive.Entries.First();
entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull()));
await entry.WriteToFileAsync(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull()));
var size = entry.Size;
var scratch = new FileInfo(Path.Combine(SCRATCH_FILES_PATH, "7Zip.Tar.tar"));
@@ -199,7 +215,7 @@ public class SevenZipArchiveTests : ArchiveTests
Assert.Equal(size, test.Length);
}
CompareArchivesByPath(
await CompareArchivesByPathAsync(
Path.Combine(SCRATCH_FILES_PATH, "7Zip.Tar.tar"),
Path.Combine(TEST_ARCHIVES_PATH, "7Zip.Tar.tar")
);

View File

@@ -9,6 +9,9 @@
<PropertyGroup Condition="'$(Configuration)|$(TargetFramework)|$(Platform)'=='Debug|net8.0|AnyCPU'">
<DefineConstants>$(DefineConstants);DEBUG_STREAMS</DefineConstants>
</PropertyGroup>
<PropertyGroup Condition="$([System.Runtime.InteropServices.RuntimeInformation]::IsOSPlatform($([System.Runtime.InteropServices.OSPlatform]::Windows)))">
<DefineConstants>$(DefineConstants);WINDOWS</DefineConstants>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\..\src\SharpCompress\SharpCompress.csproj" />
</ItemGroup>
@@ -17,7 +20,6 @@
<PackageReference Include="Microsoft.NET.Test.Sdk" />
<PackageReference Include="xunit.runner.visualstudio" />
<PackageReference Include="xunit" />
<PackageReference Include="Xunit.SkippableFact" />
<PackageReference Include="Microsoft.NETFramework.ReferenceAssemblies" PrivateAssets="All" />
</ItemGroup>
<ItemGroup Condition=" '$(VersionlessImplicitFrameworkDefine)' != 'NETFRAMEWORK' ">

View File

@@ -16,283 +16,283 @@ namespace SharpCompress.Test.Tar;
public class TarArchiveTests : ArchiveTests
{
public TarArchiveTests() => UseExtensionInsteadOfNameToVerify = true;
[Fact]
public void TarArchiveStreamRead() => ArchiveStreamRead("Tar.tar");
[Fact]
public void TarArchivePathRead() => ArchiveFileRead("Tar.tar");
[Fact]
public void Tar_FileName_Exactly_100_Characters()
{
var archive = "Tar_FileName_Exactly_100_Characters.tar";
// create the 100 char filename
var filename =
"filename_with_exactly_100_characters_______________________________________________________________X";
// Step 1: create a tar file containing a file with the test name
using (Stream stream = File.OpenWrite(Path.Combine(SCRATCH2_FILES_PATH, archive)))
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, CompressionType.None))
using (Stream inputStream = new MemoryStream())
/*
[Fact]
public void TarArchiveStreamRead() => ArchiveStreamRead("Tar.tar");
[Fact]
public void TarArchivePathRead() => ArchiveFileRead("Tar.tar");
[Fact]
public void Tar_FileName_Exactly_100_Characters()
{
var sw = new StreamWriter(inputStream);
sw.Write("dummy filecontent");
sw.Flush();
inputStream.Position = 0;
writer.Write(filename, inputStream, null);
}
// Step 2: check if the written tar file can be read correctly
var unmodified = Path.Combine(SCRATCH2_FILES_PATH, archive);
using (var archive2 = TarArchive.Open(unmodified))
{
Assert.Equal(1, archive2.Entries.Count);
Assert.Contains(filename, archive2.Entries.Select(entry => entry.Key));
foreach (var entry in archive2.Entries)
var archive = "Tar_FileName_Exactly_100_Characters.tar";
// create the 100 char filename
var filename =
"filename_with_exactly_100_characters_______________________________________________________________X";
// Step 1: create a tar file containing a file with the test name
using (Stream stream = File.OpenWrite(Path.Combine(SCRATCH2_FILES_PATH, archive)))
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, CompressionType.None))
using (Stream inputStream = new MemoryStream())
{
Assert.Equal(
"dummy filecontent",
new StreamReader(entry.OpenEntryStream()).ReadLine()
);
var sw = new StreamWriter(inputStream);
sw.Write("dummy filecontent");
sw.Flush();
inputStream.Position = 0;
writer.Write(filename, inputStream, null);
}
}
}
[Fact]
public void Tar_NonUstarArchiveWithLongNameDoesNotSkipEntriesAfterTheLongOne()
{
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "very long filename.tar");
using var archive = TarArchive.Open(unmodified);
Assert.Equal(5, archive.Entries.Count);
Assert.Contains("very long filename/", archive.Entries.Select(entry => entry.Key));
Assert.Contains(
"very long filename/very long filename very long filename very long filename very long filename very long filename very long filename very long filename very long filename very long filename very long filename.jpg",
archive.Entries.Select(entry => entry.Key)
);
Assert.Contains("z_file 1.txt", archive.Entries.Select(entry => entry.Key));
Assert.Contains("z_file 2.txt", archive.Entries.Select(entry => entry.Key));
Assert.Contains("z_file 3.txt", archive.Entries.Select(entry => entry.Key));
}
[Fact]
public void Tar_VeryLongFilepathReadback()
{
var archive = "Tar_VeryLongFilepathReadback.tar";
// create a very long filename
var longFilename = "";
for (var i = 0; i < 600; i = longFilename.Length)
{
longFilename += i.ToString("D10") + "-";
}
longFilename += ".txt";
// Step 1: create a tar file containing a file with a long name
using (Stream stream = File.OpenWrite(Path.Combine(SCRATCH2_FILES_PATH, archive)))
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, CompressionType.None))
using (Stream inputStream = new MemoryStream())
{
var sw = new StreamWriter(inputStream);
sw.Write("dummy filecontent");
sw.Flush();
inputStream.Position = 0;
writer.Write(longFilename, inputStream, null);
}
// Step 2: check if the written tar file can be read correctly
var unmodified = Path.Combine(SCRATCH2_FILES_PATH, archive);
using (var archive2 = TarArchive.Open(unmodified))
{
Assert.Equal(1, archive2.Entries.Count);
Assert.Contains(longFilename, archive2.Entries.Select(entry => entry.Key));
foreach (var entry in archive2.Entries)
// Step 2: check if the written tar file can be read correctly
var unmodified = Path.Combine(SCRATCH2_FILES_PATH, archive);
using (var archive2 = TarArchive.Open(unmodified))
{
Assert.Equal(
"dummy filecontent",
new StreamReader(entry.OpenEntryStream()).ReadLine()
);
}
}
}
[Fact]
public void Tar_UstarArchivePathReadLongName()
{
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "ustar with long names.tar");
using var archive = TarArchive.Open(unmodified);
Assert.Equal(6, archive.Entries.Count);
Assert.Contains("Directory/", archive.Entries.Select(entry => entry.Key));
Assert.Contains(
"Directory/Some file with veeeeeeeeeery loooooooooong name",
archive.Entries.Select(entry => entry.Key)
);
Assert.Contains(
"Directory/Directory with veeeeeeeeeery loooooooooong name/",
archive.Entries.Select(entry => entry.Key)
);
Assert.Contains(
"Directory/Directory with veeeeeeeeeery loooooooooong name/Some file with veeeeeeeeeery loooooooooong name",
archive.Entries.Select(entry => entry.Key)
);
Assert.Contains(
"Directory/Directory with veeeeeeeeeery loooooooooong name/Directory with veeeeeeeeeery loooooooooong name/",
archive.Entries.Select(entry => entry.Key)
);
Assert.Contains(
"Directory/Directory with veeeeeeeeeery loooooooooong name/Directory with veeeeeeeeeery loooooooooong name/Some file with veeeeeeeeeery loooooooooong name",
archive.Entries.Select(entry => entry.Key)
);
}
[Fact]
public void Tar_Create_New()
{
var scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.tar");
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar");
// var aropt = new Ar
using (var archive = TarArchive.Create())
{
archive.AddAllFromDirectory(ORIGINAL_FILES_PATH);
var twopt = new TarWriterOptions(CompressionType.None, true);
twopt.ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(866) };
archive.SaveTo(scratchPath, twopt);
}
CompareArchivesByPath(unmodified, scratchPath);
}
[Fact]
public void Tar_Random_Write_Add()
{
var jpg = Path.Combine(ORIGINAL_FILES_PATH, "jpg", "test.jpg");
var scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.mod.tar");
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.mod.tar");
var modified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar");
using (var archive = TarArchive.Open(unmodified))
{
archive.AddEntry("jpg\\test.jpg", jpg);
archive.SaveTo(scratchPath, CompressionType.None);
}
CompareArchivesByPath(modified, scratchPath);
}
[Fact]
public void Tar_Random_Write_Remove()
{
var scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.mod.tar");
var modified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.mod.tar");
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar");
using (var archive = TarArchive.Open(unmodified))
{
var entry = archive.Entries.Single(x =>
x.Key.NotNull().EndsWith("jpg", StringComparison.OrdinalIgnoreCase)
);
archive.RemoveEntry(entry);
archive.SaveTo(scratchPath, CompressionType.None);
}
CompareArchivesByPath(modified, scratchPath);
}
[Fact]
public void Tar_Containing_Rar_Archive()
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.ContainsRar.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var archive = ArchiveFactory.Open(stream);
Assert.True(archive.Type == ArchiveType.Tar);
}
[Fact]
public void Tar_Empty_Archive()
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.Empty.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var archive = ArchiveFactory.Open(stream);
Assert.True(archive.Type == ArchiveType.Tar);
}
[Theory]
[InlineData(10)]
[InlineData(128)]
public void Tar_Japanese_Name(int length)
{
using var mstm = new MemoryStream();
var enc = new ArchiveEncoding { Default = Encoding.UTF8 };
var twopt = new TarWriterOptions(CompressionType.None, true);
twopt.ArchiveEncoding = enc;
var fname = new string((char)0x3042, length);
using (var tw = new TarWriter(mstm, twopt))
using (var input = new MemoryStream(new byte[32]))
{
tw.Write(fname, input, null);
}
using (var inputMemory = new MemoryStream(mstm.ToArray()))
{
var tropt = new ReaderOptions { ArchiveEncoding = enc };
using (var tr = TarReader.Open(inputMemory, tropt))
{
while (tr.MoveToNextEntry())
Assert.Equal(1, archive2.Entries.Count);
Assert.Contains(filename, archive2.Entries.Select(entry => entry.Key));
foreach (var entry in archive2.Entries)
{
Assert.Equal(fname, tr.Entry.Key);
Assert.Equal(
"dummy filecontent",
new StreamReader(entry.OpenEntryStream()).ReadLine()
);
}
}
}
}
[Fact]
public void Tar_Read_One_At_A_Time()
{
var archiveEncoding = new ArchiveEncoding { Default = Encoding.UTF8 };
var tarWriterOptions = new TarWriterOptions(CompressionType.None, true)
[Fact]
public void Tar_NonUstarArchiveWithLongNameDoesNotSkipEntriesAfterTheLongOne()
{
ArchiveEncoding = archiveEncoding,
};
var testBytes = Encoding.UTF8.GetBytes("This is a test.");
using var memoryStream = new MemoryStream();
using (var tarWriter = new TarWriter(memoryStream, tarWriterOptions))
using (var testFileStream = new MemoryStream(testBytes))
{
tarWriter.Write("test1.txt", testFileStream);
testFileStream.Position = 0;
tarWriter.Write("test2.txt", testFileStream);
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "very long filename.tar");
using var archive = TarArchive.Open(unmodified);
Assert.Equal(5, archive.Entries.Count);
Assert.Contains("very long filename/", archive.Entries.Select(entry => entry.Key));
Assert.Contains(
"very long filename/very long filename very long filename very long filename very long filename very long filename very long filename very long filename very long filename very long filename very long filename.jpg",
archive.Entries.Select(entry => entry.Key)
);
Assert.Contains("z_file 1.txt", archive.Entries.Select(entry => entry.Key));
Assert.Contains("z_file 2.txt", archive.Entries.Select(entry => entry.Key));
Assert.Contains("z_file 3.txt", archive.Entries.Select(entry => entry.Key));
}
memoryStream.Position = 0;
var numberOfEntries = 0;
using (var archiveFactory = TarArchive.Open(memoryStream))
[Fact]
public void Tar_VeryLongFilepathReadback()
{
foreach (var entry in archiveFactory.Entries)
var archive = "Tar_VeryLongFilepathReadback.tar";
// create a very long filename
var longFilename = "";
for (var i = 0; i < 600; i = longFilename.Length)
{
++numberOfEntries;
using var tarEntryStream = entry.OpenEntryStream();
using var testFileStream = new MemoryStream();
tarEntryStream.CopyTo(testFileStream);
Assert.Equal(testBytes.Length, testFileStream.Length);
longFilename += i.ToString("D10") + "-";
}
longFilename += ".txt";
// Step 1: create a tar file containing a file with a long name
using (Stream stream = File.OpenWrite(Path.Combine(SCRATCH2_FILES_PATH, archive)))
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, CompressionType.None))
using (Stream inputStream = new MemoryStream())
{
var sw = new StreamWriter(inputStream);
sw.Write("dummy filecontent");
sw.Flush();
inputStream.Position = 0;
writer.Write(longFilename, inputStream, null);
}
// Step 2: check if the written tar file can be read correctly
var unmodified = Path.Combine(SCRATCH2_FILES_PATH, archive);
using (var archive2 = TarArchive.Open(unmodified))
{
Assert.Equal(1, archive2.Entries.Count);
Assert.Contains(longFilename, archive2.Entries.Select(entry => entry.Key));
foreach (var entry in archive2.Entries)
{
Assert.Equal(
"dummy filecontent",
new StreamReader(entry.OpenEntryStream()).ReadLine()
);
}
}
}
Assert.Equal(2, numberOfEntries);
}
[Fact]
public void Tar_Detect_Test()
{
var isTar = TarArchive.IsTarFile(Path.Combine(TEST_ARCHIVES_PATH, "false.positive.tar"));
Assert.False(isTar);
}
[Fact]
public void Tar_UstarArchivePathReadLongName()
{
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "ustar with long names.tar");
using var archive = TarArchive.Open(unmodified);
Assert.Equal(6, archive.Entries.Count);
Assert.Contains("Directory/", archive.Entries.Select(entry => entry.Key));
Assert.Contains(
"Directory/Some file with veeeeeeeeeery loooooooooong name",
archive.Entries.Select(entry => entry.Key)
);
Assert.Contains(
"Directory/Directory with veeeeeeeeeery loooooooooong name/",
archive.Entries.Select(entry => entry.Key)
);
Assert.Contains(
"Directory/Directory with veeeeeeeeeery loooooooooong name/Some file with veeeeeeeeeery loooooooooong name",
archive.Entries.Select(entry => entry.Key)
);
Assert.Contains(
"Directory/Directory with veeeeeeeeeery loooooooooong name/Directory with veeeeeeeeeery loooooooooong name/",
archive.Entries.Select(entry => entry.Key)
);
Assert.Contains(
"Directory/Directory with veeeeeeeeeery loooooooooong name/Directory with veeeeeeeeeery loooooooooong name/Some file with veeeeeeeeeery loooooooooong name",
archive.Entries.Select(entry => entry.Key)
);
}
[Fact]
public void Tar_Create_New()
{
var scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.tar");
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar");
// var aropt = new Ar
using (var archive = TarArchive.Create())
{
archive.AddAllFromDirectory(ORIGINAL_FILES_PATH);
var twopt = new TarWriterOptions(CompressionType.None, true);
twopt.ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(866) };
archive.SaveTo(scratchPath, twopt);
}
CompareArchivesByPath(unmodified, scratchPath);
}
[Fact]
public void Tar_Random_Write_Add()
{
var jpg = Path.Combine(ORIGINAL_FILES_PATH, "jpg", "test.jpg");
var scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.mod.tar");
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.mod.tar");
var modified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar");
using (var archive = TarArchive.Open(unmodified))
{
archive.AddEntry("jpg\\test.jpg", jpg);
archive.SaveTo(scratchPath, CompressionType.None);
}
CompareArchivesByPath(modified, scratchPath);
}
[Fact]
public void Tar_Random_Write_Remove()
{
var scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.mod.tar");
var modified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.mod.tar");
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar");
using (var archive = TarArchive.Open(unmodified))
{
var entry = archive.Entries.Single(x =>
x.Key.NotNull().EndsWith("jpg", StringComparison.OrdinalIgnoreCase)
);
archive.RemoveEntry(entry);
archive.SaveTo(scratchPath, CompressionType.None);
}
CompareArchivesByPath(modified, scratchPath);
}
[Fact]
public void Tar_Containing_Rar_Archive()
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.ContainsRar.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var archive = ArchiveFactory.Open(stream);
Assert.True(archive.Type == ArchiveType.Tar);
}
[Fact]
public void Tar_Empty_Archive()
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.Empty.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var archive = ArchiveFactory.Open(stream);
Assert.True(archive.Type == ArchiveType.Tar);
}
[Theory]
[InlineData(10)]
[InlineData(128)]
public void Tar_Japanese_Name(int length)
{
using var mstm = new MemoryStream();
var enc = new ArchiveEncoding { Default = Encoding.UTF8 };
var twopt = new TarWriterOptions(CompressionType.None, true);
twopt.ArchiveEncoding = enc;
var fname = new string((char)0x3042, length);
using (var tw = new TarWriter(mstm, twopt))
using (var input = new MemoryStream(new byte[32]))
{
tw.Write(fname, input, null);
}
using (var inputMemory = new MemoryStream(mstm.ToArray()))
{
var tropt = new ReaderOptions { ArchiveEncoding = enc };
using (var tr = TarReader.Open(inputMemory, tropt))
{
while (tr.MoveToNextEntry())
{
Assert.Equal(fname, tr.Entry.Key);
}
}
}
}
[Fact]
public void Tar_Read_One_At_A_Time()
{
var archiveEncoding = new ArchiveEncoding { Default = Encoding.UTF8 };
var tarWriterOptions = new TarWriterOptions(CompressionType.None, true)
{
ArchiveEncoding = archiveEncoding,
};
var testBytes = Encoding.UTF8.GetBytes("This is a test.");
using var memoryStream = new MemoryStream();
using (var tarWriter = new TarWriter(memoryStream, tarWriterOptions))
using (var testFileStream = new MemoryStream(testBytes))
{
tarWriter.Write("test1.txt", testFileStream);
testFileStream.Position = 0;
tarWriter.Write("test2.txt", testFileStream);
}
memoryStream.Position = 0;
var numberOfEntries = 0;
using (var archiveFactory = TarArchive.Open(memoryStream))
{
foreach (var entry in archiveFactory.Entries)
{
++numberOfEntries;
using var tarEntryStream = entry.OpenEntryStream();
using var testFileStream = new MemoryStream();
tarEntryStream.CopyTo(testFileStream);
Assert.Equal(testBytes.Length, testFileStream.Length);
}
}
Assert.Equal(2, numberOfEntries);
}
[Fact]
public void Tar_Detect_Test()
{
var isTar = TarArchive.IsTarFile(Path.Combine(TEST_ARCHIVES_PATH, "false.positive.tar"));
Assert.False(isTar);
}*/
}

View File

@@ -12,255 +12,255 @@ namespace SharpCompress.Test.Tar;
public class TarReaderTests : ReaderTests
{
public TarReaderTests() => UseExtensionInsteadOfNameToVerify = true;
[Fact]
public void Tar_Reader() => Read("Tar.tar", CompressionType.None);
[Fact]
public void Tar_Skip()
{
using Stream stream = new ForwardOnlyStream(
File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar"))
);
using var reader = ReaderFactory.Open(stream);
var x = 0;
while (reader.MoveToNextEntry())
/*
[Fact]
public void Tar_Reader() => Read("Tar.tar", CompressionType.None);
[Fact]
public void Tar_Skip()
{
if (!reader.Entry.IsDirectory)
using Stream stream = new ForwardOnlyStream(
File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar"))
);
using var reader = ReaderFactory.Open(stream);
var x = 0;
while (reader.MoveToNextEntry())
{
x++;
if (x % 2 == 0)
if (!reader.Entry.IsDirectory)
{
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
x++;
if (x % 2 == 0)
{
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
}
}
}
[Fact]
public void Tar_Z_Reader() => Read("Tar.tar.Z", CompressionType.Lzw);
[Fact]
public void Tar_BZip2_Reader() => Read("Tar.tar.bz2", CompressionType.BZip2);
[Fact]
public void Tar_GZip_Reader() => Read("Tar.tar.gz", CompressionType.GZip);
[Fact]
public void Tar_ZStandard_Reader() => Read("Tar.tar.zst", CompressionType.ZStandard);
[Fact]
public void Tar_LZip_Reader() => Read("Tar.tar.lz", CompressionType.LZip);
[Fact]
public void Tar_Xz_Reader() => Read("Tar.tar.xz", CompressionType.Xz);
[Fact]
public void Tar_GZip_OldGnu_Reader() => Read("Tar.oldgnu.tar.gz", CompressionType.GZip);
[Fact]
public void Tar_BZip2_Entry_Stream()
{
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2")))
using (var reader = TarReader.Open(stream))
[Fact]
public void Tar_Z_Reader() => Read("Tar.tar.Z", CompressionType.Lzw);
[Fact]
public void Tar_BZip2_Reader() => Read("Tar.tar.bz2", CompressionType.BZip2);
[Fact]
public void Tar_GZip_Reader() => Read("Tar.tar.gz", CompressionType.GZip);
[Fact]
public void Tar_ZStandard_Reader() => Read("Tar.tar.zst", CompressionType.ZStandard);
[Fact]
public void Tar_LZip_Reader() => Read("Tar.tar.lz", CompressionType.LZip);
[Fact]
public void Tar_Xz_Reader() => Read("Tar.tar.xz", CompressionType.Xz);
[Fact]
public void Tar_GZip_OldGnu_Reader() => Read("Tar.oldgnu.tar.gz", CompressionType.GZip);
[Fact]
public void Tar_BZip2_Entry_Stream()
{
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2")))
using (var reader = TarReader.Open(stream))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(CompressionType.BZip2, reader.Entry.CompressionType);
using var entryStream = reader.OpenEntryStream();
var file = Path.GetFileName(reader.Entry.Key);
var folder =
Path.GetDirectoryName(reader.Entry.Key)
?? throw new ArgumentNullException();
var destdir = Path.Combine(SCRATCH_FILES_PATH, folder);
if (!Directory.Exists(destdir))
{
Directory.CreateDirectory(destdir);
}
var destinationFileName = Path.Combine(destdir, file.NotNull());
using var fs = File.OpenWrite(destinationFileName);
entryStream.CopyTo(fs);
}
}
}
VerifyFiles();
}
[Fact]
public void Tar_LongNamesWithLongNameExtension()
{
var filePaths = new List<string>();
using (
Stream stream = File.OpenRead(
Path.Combine(TEST_ARCHIVES_PATH, "Tar.LongPathsWithLongNameExtension.tar")
)
)
using (var reader = TarReader.Open(stream))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
filePaths.Add(reader.Entry.Key.NotNull("Entry Key is null"));
}
}
}
Assert.Equal(3, filePaths.Count);
Assert.Contains("a.txt", filePaths);
Assert.Contains(
"wp-content/plugins/gravityformsextend/lib/Aws/Symfony/Component/ClassLoader/Tests/Fixtures/Apc/beta/Apc/ApcPrefixCollision/A/B/Bar.php",
filePaths
);
Assert.Contains(
"wp-content/plugins/gravityformsextend/lib/Aws/Symfony/Component/ClassLoader/Tests/Fixtures/Apc/beta/Apc/ApcPrefixCollision/A/B/Foo.php",
filePaths
);
}
[Fact]
public void Tar_BZip2_Skip_Entry_Stream()
{
using Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2"));
using var reader = TarReader.Open(stream);
var names = new List<string>();
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(CompressionType.BZip2, reader.Entry.CompressionType);
using var entryStream = reader.OpenEntryStream();
var file = Path.GetFileName(reader.Entry.Key);
var folder =
Path.GetDirectoryName(reader.Entry.Key)
?? throw new ArgumentNullException();
var destdir = Path.Combine(SCRATCH_FILES_PATH, folder);
if (!Directory.Exists(destdir))
{
Directory.CreateDirectory(destdir);
}
var destinationFileName = Path.Combine(destdir, file.NotNull());
using var fs = File.OpenWrite(destinationFileName);
entryStream.TransferTo(fs);
entryStream.SkipEntry();
names.Add(reader.Entry.Key.NotNull());
}
}
Assert.Equal(3, names.Count);
}
VerifyFiles();
}
[Fact]
public void Tar_LongNamesWithLongNameExtension()
{
var filePaths = new List<string>();
using (
Stream stream = File.OpenRead(
Path.Combine(TEST_ARCHIVES_PATH, "Tar.LongPathsWithLongNameExtension.tar")
)
)
using (var reader = TarReader.Open(stream))
[Fact]
public void Tar_Containing_Rar_Reader()
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.ContainsRar.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var reader = ReaderFactory.Open(stream);
Assert.True(reader.ArchiveType == ArchiveType.Tar);
}
[Fact]
public void Tar_With_TarGz_With_Flushed_EntryStream()
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.ContainsTarGz.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var reader = ReaderFactory.Open(stream);
Assert.True(reader.MoveToNextEntry());
Assert.Equal("inner.tar.gz", reader.Entry.Key);
using var entryStream = reader.OpenEntryStream();
using var flushingStream = new FlushOnDisposeStream(entryStream);
// Extract inner.tar.gz
using var innerReader = ReaderFactory.Open(flushingStream);
Assert.True(innerReader.MoveToNextEntry());
Assert.Equal("test", innerReader.Entry.Key);
}
[Fact]
public void Tar_Broken_Stream()
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var reader = ReaderFactory.Open(stream);
var memoryStream = new MemoryStream();
Assert.True(reader.MoveToNextEntry());
Assert.True(reader.MoveToNextEntry());
reader.WriteEntryTo(memoryStream);
stream.Close();
Assert.Throws<IncompleteArchiveException>(() => reader.MoveToNextEntry());
}
[Fact]
public void Tar_Corrupted()
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "TarCorrupted.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var reader = ReaderFactory.Open(stream);
var memoryStream = new MemoryStream();
Assert.True(reader.MoveToNextEntry());
Assert.True(reader.MoveToNextEntry());
reader.WriteEntryTo(memoryStream);
stream.Close();
Assert.Throws<IncompleteArchiveException>(() => reader.MoveToNextEntry());
}
#if !NETFRAMEWORK
[Fact]
public void Tar_GZip_With_Symlink_Entries()
{
var isWindows = System.Runtime.InteropServices.RuntimeInformation.IsOSPlatform(
System.Runtime.InteropServices.OSPlatform.Windows
);
using Stream stream = File.OpenRead(
Path.Combine(TEST_ARCHIVES_PATH, "TarWithSymlink.tar.gz")
);
using var reader = TarReader.Open(stream);
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
if (reader.Entry.IsDirectory)
{
filePaths.Add(reader.Entry.Key.NotNull("Entry Key is null"));
continue;
}
}
}
Assert.Equal(3, filePaths.Count);
Assert.Contains("a.txt", filePaths);
Assert.Contains(
"wp-content/plugins/gravityformsextend/lib/Aws/Symfony/Component/ClassLoader/Tests/Fixtures/Apc/beta/Apc/ApcPrefixCollision/A/B/Bar.php",
filePaths
);
Assert.Contains(
"wp-content/plugins/gravityformsextend/lib/Aws/Symfony/Component/ClassLoader/Tests/Fixtures/Apc/beta/Apc/ApcPrefixCollision/A/B/Foo.php",
filePaths
);
}
[Fact]
public void Tar_BZip2_Skip_Entry_Stream()
{
using Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2"));
using var reader = TarReader.Open(stream);
var names = new List<string>();
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(CompressionType.BZip2, reader.Entry.CompressionType);
using var entryStream = reader.OpenEntryStream();
entryStream.SkipEntry();
names.Add(reader.Entry.Key.NotNull());
}
}
Assert.Equal(3, names.Count);
}
[Fact]
public void Tar_Containing_Rar_Reader()
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.ContainsRar.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var reader = ReaderFactory.Open(stream);
Assert.True(reader.ArchiveType == ArchiveType.Tar);
}
[Fact]
public void Tar_With_TarGz_With_Flushed_EntryStream()
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.ContainsTarGz.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var reader = ReaderFactory.Open(stream);
Assert.True(reader.MoveToNextEntry());
Assert.Equal("inner.tar.gz", reader.Entry.Key);
using var entryStream = reader.OpenEntryStream();
using var flushingStream = new FlushOnDisposeStream(entryStream);
// Extract inner.tar.gz
using var innerReader = ReaderFactory.Open(flushingStream);
Assert.True(innerReader.MoveToNextEntry());
Assert.Equal("test", innerReader.Entry.Key);
}
[Fact]
public void Tar_Broken_Stream()
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var reader = ReaderFactory.Open(stream);
var memoryStream = new MemoryStream();
Assert.True(reader.MoveToNextEntry());
Assert.True(reader.MoveToNextEntry());
reader.WriteEntryTo(memoryStream);
stream.Close();
Assert.Throws<IncompleteArchiveException>(() => reader.MoveToNextEntry());
}
[Fact]
public void Tar_Corrupted()
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "TarCorrupted.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var reader = ReaderFactory.Open(stream);
var memoryStream = new MemoryStream();
Assert.True(reader.MoveToNextEntry());
Assert.True(reader.MoveToNextEntry());
reader.WriteEntryTo(memoryStream);
stream.Close();
Assert.Throws<IncompleteArchiveException>(() => reader.MoveToNextEntry());
}
#if !NETFRAMEWORK
[Fact]
public void Tar_GZip_With_Symlink_Entries()
{
var isWindows = System.Runtime.InteropServices.RuntimeInformation.IsOSPlatform(
System.Runtime.InteropServices.OSPlatform.Windows
);
using Stream stream = File.OpenRead(
Path.Combine(TEST_ARCHIVES_PATH, "TarWithSymlink.tar.gz")
);
using var reader = TarReader.Open(stream);
while (reader.MoveToNextEntry())
{
if (reader.Entry.IsDirectory)
{
continue;
}
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions
{
ExtractFullPath = true,
Overwrite = true,
WriteSymbolicLink = (sourcePath, targetPath) =>
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions
{
if (!isWindows)
ExtractFullPath = true,
Overwrite = true,
WriteSymbolicLink = (sourcePath, targetPath) =>
{
var link = new Mono.Unix.UnixSymbolicLinkInfo(sourcePath);
if (File.Exists(sourcePath))
if (!isWindows)
{
link.Delete(); // equivalent to ln -s -f
var link = new Mono.Unix.UnixSymbolicLinkInfo(sourcePath);
if (File.Exists(sourcePath))
{
link.Delete(); // equivalent to ln -s -f
}
link.CreateSymbolicLinkTo(targetPath);
}
link.CreateSymbolicLinkTo(targetPath);
}
},
}
);
if (!isWindows)
{
if (reader.Entry.LinkTarget != null)
{
var path = Path.Combine(SCRATCH_FILES_PATH, reader.Entry.Key.NotNull());
var link = new Mono.Unix.UnixSymbolicLinkInfo(path);
if (link.HasContents)
{
// need to convert the link to an absolute path for comparison
var target = reader.Entry.LinkTarget;
var realTarget = Path.GetFullPath(
Path.Combine($"{Path.GetDirectoryName(path)}", target)
);
Assert.Equal(realTarget, link.GetContents().ToString());
},
}
else
);
if (!isWindows)
{
if (reader.Entry.LinkTarget != null)
{
Assert.True(false, "Symlink has no target");
var path = Path.Combine(SCRATCH_FILES_PATH, reader.Entry.Key.NotNull());
var link = new Mono.Unix.UnixSymbolicLinkInfo(path);
if (link.HasContents)
{
// need to convert the link to an absolute path for comparison
var target = reader.Entry.LinkTarget;
var realTarget = Path.GetFullPath(
Path.Combine($"{Path.GetDirectoryName(path)}", target)
);
Assert.Equal(realTarget, link.GetContents().ToString());
}
else
{
Assert.True(false, "Symlink has no target");
}
}
}
}
}
}
#endif
#endif*/
}

View File

@@ -1,5 +1,6 @@
using System.IO;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Writers.Tar;
using Xunit;
@@ -20,8 +21,8 @@ public class TarWriterTests : WriterTests
: base(ArchiveType.Tar) => UseExtensionInsteadOfNameToVerify = true;
[Fact]
public void Tar_Writer() =>
Write(
public Task Tar_Writer() =>
WriteAsync(
CompressionType.None,
"Tar.noEmptyDirs.tar",
"Tar.noEmptyDirs.tar",
@@ -29,8 +30,8 @@ public class TarWriterTests : WriterTests
);
[Fact]
public void Tar_BZip2_Writer() =>
Write(
public Task Tar_BZip2_Writer() =>
WriteAsync(
CompressionType.BZip2,
"Tar.noEmptyDirs.tar.bz2",
"Tar.noEmptyDirs.tar.bz2",
@@ -38,8 +39,8 @@ public class TarWriterTests : WriterTests
);
[Fact]
public void Tar_LZip_Writer() =>
Write(
public Task Tar_LZip_Writer() =>
WriteAsync(
CompressionType.LZip,
"Tar.noEmptyDirs.tar.lz",
"Tar.noEmptyDirs.tar.lz",
@@ -47,9 +48,13 @@ public class TarWriterTests : WriterTests
);
[Fact]
public void Tar_Rar_Write() =>
Assert.Throws<InvalidFormatException>(() =>
Write(CompressionType.Rar, "Zip.ppmd.noEmptyDirs.zip", "Zip.ppmd.noEmptyDirs.zip")
public Task Tar_Rar_Write() =>
Assert.ThrowsAsync<InvalidFormatException>(async () =>
await WriteAsync(
CompressionType.Rar,
"Zip.ppmd.noEmptyDirs.zip",
"Zip.ppmd.noEmptyDirs.zip"
)
);
[Theory]

View File

@@ -1,9 +1,9 @@
global using SharpCompress.Helpers;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Readers;
using Xunit;
@@ -203,7 +203,11 @@ public class TestBase : IDisposable
Assert.Equal(fi1.Attributes, fi2.Attributes);
}
protected void CompareArchivesByPath(string file1, string file2, Encoding? encoding = null)
protected async Task CompareArchivesByPathAsync(
string file1,
string file2,
Encoding? encoding = null
)
{
var readerOptions = new ReaderOptions { LeaveStreamOpen = false };
readerOptions.ArchiveEncoding.Default = encoding ?? Encoding.Default;
@@ -214,13 +218,13 @@ public class TestBase : IDisposable
using (var archive1 = ReaderFactory.Open(File.OpenRead(file1), readerOptions))
using (var archive2 = ReaderFactory.Open(File.OpenRead(file2), readerOptions))
{
while (archive1.MoveToNextEntry())
while (await archive1.MoveToNextEntryAsync())
{
Assert.True(archive2.MoveToNextEntry());
Assert.True(await archive2.MoveToNextEntryAsync());
archive1Entries.Add(archive1.Entry.Key.NotNull());
archive2Entries.Add(archive2.Entry.Key.NotNull());
}
Assert.False(archive2.MoveToNextEntry());
Assert.False(await archive2.MoveToNextEntryAsync());
}
archive1Entries.Sort();
archive2Entries.Sort();

View File

@@ -0,0 +1,754 @@
using System;
using System.Buffers;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using Xunit;
namespace SharpCompress.Test;
public class UtilityTests
{
#region URShift Tests
[Fact]
public void URShift_Int_PositiveNumber_ShiftsCorrectly()
{
var result = Utility.URShift(16, 2);
Assert.Equal(4, result);
}
[Fact]
public void URShift_Int_NegativeNumber_PerformsUnsignedShift()
{
// -1 in binary is all 1s (0xFFFFFFFF), shifted right by 1 should be 0x7FFFFFFF
var result = Utility.URShift(-1, 1);
Assert.Equal(int.MaxValue, result);
}
[Fact]
public void URShift_Int_Zero_ReturnsZero()
{
var result = Utility.URShift(0, 5);
Assert.Equal(0, result);
}
[Fact]
public void URShift_Long_PositiveNumber_ShiftsCorrectly()
{
var result = Utility.URShift(32L, 3);
Assert.Equal(4L, result);
}
[Fact]
public void URShift_Long_NegativeNumber_PerformsUnsignedShift()
{
var result = Utility.URShift(-1L, 1);
Assert.Equal(long.MaxValue, result);
}
[Fact]
public void URShift_Long_Zero_ReturnsZero()
{
var result = Utility.URShift(0L, 10);
Assert.Equal(0L, result);
}
#endregion
#region ReadFully Tests
[Fact]
public void ReadFully_ByteArray_ReadsExactlyRequiredBytes()
{
var data = new byte[] { 1, 2, 3, 4, 5 };
using var stream = new MemoryStream(data);
var buffer = new byte[5];
var result = stream.ReadFully(buffer);
Assert.True(result);
Assert.Equal(data, buffer);
}
[Fact]
public void ReadFully_ByteArray_ReturnsFalseWhenNotEnoughData()
{
var data = new byte[] { 1, 2, 3 };
using var stream = new MemoryStream(data);
var buffer = new byte[5];
var result = stream.ReadFully(buffer);
Assert.False(result);
}
[Fact]
public void ReadFully_ByteArray_EmptyStream_ReturnsFalse()
{
using var stream = new MemoryStream();
var buffer = new byte[5];
var result = stream.ReadFully(buffer);
Assert.False(result);
}
[Fact]
public void ReadFully_ByteArray_EmptyBuffer_ReturnsTrue()
{
var data = new byte[] { 1, 2, 3 };
using var stream = new MemoryStream(data);
var buffer = new byte[0];
var result = stream.ReadFully(buffer);
Assert.True(result);
}
[Fact]
public void ReadFully_Span_ReadsExactlyRequiredBytes()
{
var data = new byte[] { 1, 2, 3, 4, 5 };
using var stream = new MemoryStream(data);
Span<byte> buffer = new byte[5];
var result = stream.ReadFully(buffer);
Assert.True(result);
Assert.Equal(data, buffer.ToArray());
}
[Fact]
public void ReadFully_Span_ReturnsFalseWhenNotEnoughData()
{
var data = new byte[] { 1, 2, 3 };
using var stream = new MemoryStream(data);
Span<byte> buffer = new byte[5];
var result = stream.ReadFully(buffer);
Assert.False(result);
}
[Fact]
public void ReadFully_Span_EmptyStream_ReturnsFalse()
{
using var stream = new MemoryStream();
Span<byte> buffer = new byte[5];
var result = stream.ReadFully(buffer);
Assert.False(result);
}
[Fact]
public void ReadFully_Span_EmptyBuffer_ReturnsTrue()
{
var data = new byte[] { 1, 2, 3 };
using var stream = new MemoryStream(data);
Span<byte> buffer = new byte[0];
var result = stream.ReadFully(buffer);
Assert.True(result);
}
#endregion
#region Skip Tests
[Fact]
public void Skip_SeekableStream_UsesSeek()
{
var data = new byte[] { 1, 2, 3, 4, 5 };
using var stream = new MemoryStream(data);
stream.Skip(3);
Assert.Equal(3, stream.Position);
}
[Fact]
public void Skip_SeekableStream_SkipsCorrectAmount()
{
var data = new byte[] { 1, 2, 3, 4, 5 };
using var stream = new MemoryStream(data);
stream.Skip(2);
var buffer = new byte[2];
stream.Read(buffer);
Assert.Equal(new byte[] { 3, 4 }, buffer);
}
[Fact]
public void Skip_NonSeekableStream_SkipsCorrectAmount()
{
var data = new byte[] { 1, 2, 3, 4, 5 };
using var seekableStream = new MemoryStream(data);
using var nonSeekableStream = new NonSeekableStream(seekableStream);
nonSeekableStream.Skip(2);
var buffer = new byte[2];
nonSeekableStream.Read(buffer);
Assert.Equal(new byte[] { 3, 4 }, buffer);
}
[Fact]
public void Skip_NonSeekableStream_SkipsBeyondStreamEnd()
{
var data = new byte[] { 1, 2, 3 };
using var seekableStream = new MemoryStream(data);
using var nonSeekableStream = new NonSeekableStream(seekableStream);
// Should not throw, just skip what's available
nonSeekableStream.Skip(10);
Assert.Equal(-1, nonSeekableStream.ReadByte());
}
[Fact]
public void Skip_Parameterless_SkipsEntireStream()
{
var data = new byte[] { 1, 2, 3, 4, 5 };
using var stream = new MemoryStream(data);
stream.Skip();
Assert.Equal(-1, stream.ReadByte());
}
[Fact]
public void Skip_Zero_DoesNotMove()
{
var data = new byte[] { 1, 2, 3, 4, 5 };
using var stream = new MemoryStream(data);
stream.Position = 2;
stream.Skip(0);
Assert.Equal(2, stream.Position);
}
#endregion
#region SetSize Tests
[Fact]
public void SetSize_GrowsList_AddsZeroBytes()
{
var list = new List<byte> { 1, 2, 3 };
Utility.SetSize(list, 5);
Assert.Equal(5, list.Count);
Assert.Equal(new byte[] { 1, 2, 3, 0, 0 }, list);
}
[Fact]
public void SetSize_ShrinksListByOne()
{
var list = new List<byte> { 1, 2, 3, 4, 5 };
Utility.SetSize(list, 3);
Assert.Equal(3, list.Count);
Assert.Equal(new byte[] { 1, 2, 3 }, list);
}
[Fact]
public void SetSize_ToZero_ClearsAllItems()
{
var list = new List<byte> { 1, 2, 3 };
Utility.SetSize(list, 0);
Assert.Empty(list);
}
[Fact]
public void SetSize_SameSize_NoChange()
{
var list = new List<byte> { 1, 2, 3 };
Utility.SetSize(list, 3);
Assert.Equal(3, list.Count);
Assert.Equal(new byte[] { 1, 2, 3 }, list);
}
#endregion
#region ForEach Tests
[Fact]
public void ForEach_ExecutesActionForEachItem()
{
var items = new[] { 1, 2, 3, 4, 5 };
var results = new List<int>();
items.ForEach(x => results.Add(x));
Assert.Equal(items, results);
}
[Fact]
public void ForEach_EmptyCollection_NoExecutions()
{
var items = Array.Empty<int>();
var count = 0;
items.ForEach(x => count++);
Assert.Equal(0, count);
}
#endregion
#region AsEnumerable Tests
[Fact]
public void AsEnumerable_SingleItem_YieldsItem()
{
var item = 42;
var result = item.AsEnumerable().ToList();
Assert.Single(result);
Assert.Equal(42, result[0]);
}
[Fact]
public void AsEnumerable_String_YieldsString()
{
var item = "test";
var result = item.AsEnumerable().ToList();
Assert.Single(result);
Assert.Equal("test", result[0]);
}
#endregion
#region DosDateToDateTime Tests
[Fact]
public void DosDateToDateTime_ValidDate_ConvertsCorrectly()
{
// DOS date format: year (7 bits) | month (4 bits) | day (5 bits)
// DOS time format: hour (5 bits) | minute (6 bits) | second (5 bits, in 2-second increments)
// This represents: 2020-01-15 10:30:20 (approximately)
ushort dosDate = (ushort)(((2020 - 1980) << 9) | (1 << 5) | 15); // 2020-01-15
ushort dosTime = (ushort)((10 << 11) | (30 << 5) | 10); // 10:30:20
var result = Utility.DosDateToDateTime(dosDate, dosTime);
Assert.Equal(2020, result.Year);
Assert.Equal(1, result.Month);
Assert.Equal(15, result.Day);
Assert.Equal(10, result.Hour);
Assert.Equal(30, result.Minute);
Assert.Equal(20, result.Second);
}
[Fact]
public void DosDateToDateTime_InvalidDate_DefaultsTo1980_01_01()
{
ushort dosDate = ushort.MaxValue;
ushort dosTime = (ushort)((10 << 11) | (30 << 5) | 10);
var result = Utility.DosDateToDateTime(dosDate, dosTime);
Assert.Equal(1980, result.Year);
Assert.Equal(1, result.Month);
Assert.Equal(1, result.Day);
}
[Fact]
public void DosDateToDateTime_InvalidTime_DefaultsToMidnight()
{
ushort dosDate = (ushort)(((2020 - 1980) << 9) | (1 << 5) | 15);
ushort dosTime = ushort.MaxValue;
var result = Utility.DosDateToDateTime(dosDate, dosTime);
Assert.Equal(0, result.Hour);
Assert.Equal(0, result.Minute);
Assert.Equal(0, result.Second);
}
[Fact]
public void DosDateToDateTime_FromUint_ConvertsCorrectly()
{
ushort dosDate = (ushort)(((2020 - 1980) << 9) | (6 << 5) | 20); // 2020-06-20
ushort dosTime = (ushort)((14 << 11) | (45 << 5) | 15); // 14:45:30
uint combined = (uint)(dosDate << 16) | dosTime;
var result = Utility.DosDateToDateTime(combined);
Assert.Equal(2020, result.Year);
Assert.Equal(6, result.Month);
Assert.Equal(20, result.Day);
Assert.Equal(14, result.Hour);
Assert.Equal(45, result.Minute);
}
#endregion
#region DateTimeToDosTime Tests
[Fact]
public void DateTimeToDosTime_ValidDateTime_ConvertsCorrectly()
{
//always do local time
var dt = new DateTime(2020, 6, 15, 14, 30, 20, DateTimeKind.Local);
var result = Utility.DateTimeToDosTime(dt);
// Verify we can convert back
var reversed = Utility.DosDateToDateTime(result);
Assert.Equal(2020, reversed.Year);
Assert.Equal(6, reversed.Month);
Assert.Equal(15, reversed.Day);
Assert.Equal(14, reversed.Hour);
Assert.Equal(30, reversed.Minute);
// Seconds are rounded down to nearest even number in DOS format
Assert.True(reversed.Second == 20 || reversed.Second == 18);
}
[Fact]
public void DateTimeToDosTime_NullDateTime_ReturnsZero()
{
DateTime? dt = null;
var result = Utility.DateTimeToDosTime(dt);
Assert.Equal(0u, result);
}
#endregion
#region UnixTimeToDateTime Tests
[Fact]
public void UnixTimeToDateTime_Zero_Returns1970_01_01()
{
var result = Utility.UnixTimeToDateTime(0);
Assert.Equal(1970, result.Year);
Assert.Equal(1, result.Month);
Assert.Equal(1, result.Day);
Assert.Equal(0, result.Hour);
Assert.Equal(0, result.Minute);
Assert.Equal(0, result.Second);
}
[Fact]
public void UnixTimeToDateTime_ValidTimestamp_ConvertsCorrectly()
{
// January 1, 2000 00:00:00 UTC is 946684800 seconds after epoch
var result = Utility.UnixTimeToDateTime(946684800);
Assert.Equal(2000, result.Year);
Assert.Equal(1, result.Month);
Assert.Equal(1, result.Day);
}
[Fact]
public void UnixTimeToDateTime_NegativeTimestamp_ReturnsBeforeEpoch()
{
// -86400 is one day before epoch
var result = Utility.UnixTimeToDateTime(-86400);
Assert.Equal(1969, result.Year);
Assert.Equal(12, result.Month);
Assert.Equal(31, result.Day);
}
#endregion
#region TransferTo Tests
[Fact]
public void TransferTo_WithMaxLength_TransfersCorrectAmount()
{
var sourceData = new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 };
using var source = new MemoryStream(sourceData);
using var destination = new MemoryStream();
var transferred = source.TransferTo(destination, 5);
Assert.Equal(5, transferred);
Assert.Equal(new byte[] { 1, 2, 3, 4, 5 }, destination.ToArray());
}
[Fact]
public void TransferTo_SourceSmallerThanMax_TransfersAll()
{
var sourceData = new byte[] { 1, 2, 3 };
using var source = new MemoryStream(sourceData);
using var destination = new MemoryStream();
var transferred = source.TransferTo(destination, 100);
Assert.Equal(3, transferred);
Assert.Equal(sourceData, destination.ToArray());
}
[Fact]
public void TransferTo_EmptySource_TransfersNothing()
{
using var source = new MemoryStream();
using var destination = new MemoryStream();
var transferred = source.TransferTo(destination, 100);
Assert.Equal(0, transferred);
Assert.Empty(destination.ToArray());
}
#endregion
#region SwapUINT32 Tests
[Fact]
public void SwapUINT32_SimpleValue_SwapsEndianness()
{
uint value = 0x12345678;
var result = Utility.SwapUINT32(value);
Assert.Equal(0x78563412u, result);
}
[Fact]
public void SwapUINT32_Zero_ReturnsZero()
{
var result = Utility.SwapUINT32(0);
Assert.Equal(0u, result);
}
[Fact]
public void SwapUINT32_MaxValue_SwapsCorrectly()
{
var result = Utility.SwapUINT32(uint.MaxValue);
Assert.Equal(uint.MaxValue, result);
}
[Fact]
public void SwapUINT32_Involution_SwappingTwiceReturnsOriginal()
{
uint value = 0x12345678;
var result = Utility.SwapUINT32(Utility.SwapUINT32(value));
Assert.Equal(value, result);
}
#endregion
#region SetLittleUInt32 Tests
[Fact]
public void SetLittleUInt32_InsertsValueCorrectly()
{
byte[] buffer = new byte[10];
uint value = 0x12345678;
Utility.SetLittleUInt32(ref buffer, value, 2);
Assert.Equal(0x78, buffer[2]);
Assert.Equal(0x56, buffer[3]);
Assert.Equal(0x34, buffer[4]);
Assert.Equal(0x12, buffer[5]);
}
[Fact]
public void SetLittleUInt32_AtOffset_InsertsBehindOffset()
{
byte[] buffer = new byte[10];
uint value = 0xDEADBEEF;
Utility.SetLittleUInt32(ref buffer, value, 5);
Assert.Equal(0xEF, buffer[5]);
Assert.Equal(0xBE, buffer[6]);
Assert.Equal(0xAD, buffer[7]);
Assert.Equal(0xDE, buffer[8]);
}
#endregion
#region SetBigUInt32 Tests
[Fact]
public void SetBigUInt32_InsertsValueCorrectly()
{
byte[] buffer = new byte[10];
uint value = 0x12345678;
Utility.SetBigUInt32(ref buffer, value, 2);
Assert.Equal(0x12, buffer[2]);
Assert.Equal(0x34, buffer[3]);
Assert.Equal(0x56, buffer[4]);
Assert.Equal(0x78, buffer[5]);
}
[Fact]
public void SetBigUInt32_AtOffset_InsertsBehindOffset()
{
byte[] buffer = new byte[10];
uint value = 0xDEADBEEF;
Utility.SetBigUInt32(ref buffer, value, 5);
Assert.Equal(0xDE, buffer[5]);
Assert.Equal(0xAD, buffer[6]);
Assert.Equal(0xBE, buffer[7]);
Assert.Equal(0xEF, buffer[8]);
}
#endregion
#region ReplaceInvalidFileNameChars Tests
#if WINDOWS
[Theory]
[InlineData("valid_filename.txt", "valid_filename.txt")]
[InlineData("file<name>test.txt", "file_name_test.txt")]
[InlineData("<>:\"|?*", "_______")]
public void ReplaceInvalidFileNameChars_Windows(string fileName, string expected)
{
var result = Utility.ReplaceInvalidFileNameChars(fileName);
Assert.Equal(expected, result);
}
#else
[Theory]
[InlineData("valid_filename.txt", "valid_filename.txt")]
[InlineData("file<name>test.txt", "file<name>test.txt")]
[InlineData("<>:\"|?*", "<>:\"|?*")]
public void ReplaceInvalidFileNameChars_NonWindows(string fileName, string expected)
{
var result = Utility.ReplaceInvalidFileNameChars(fileName);
Assert.Equal(expected, result);
}
#endif
#endregion
#region ToReadOnly Tests
[Fact]
public void ToReadOnly_IList_ReturnsReadOnlyCollection()
{
var list = new List<int> { 1, 2, 3, 4, 5 };
var result = list.ToReadOnly();
Assert.Equal(5, result.Count);
Assert.Equal(1, result[0]);
Assert.Equal(5, result[4]);
}
[Fact]
public void ToReadOnly_EmptyList_ReturnsEmptyReadOnlyCollection()
{
var list = new List<int>();
var result = list.ToReadOnly();
Assert.Empty(result);
}
#endregion
#region TrimNulls Tests
[Fact]
public void TrimNulls_StringWithNulls_ReplacesAndTrims()
{
var input = " hello\0world\0 ";
var result = Utility.TrimNulls(input);
Assert.Equal("hello world", result);
}
[Fact]
public void TrimNulls_StringWithoutNulls_TrimsWhitespace()
{
var input = " hello world ";
var result = Utility.TrimNulls(input);
Assert.Equal("hello world", result);
}
[Fact]
public void TrimNulls_OnlyNulls_ReturnsEmpty()
{
var input = "\0\0\0";
var result = Utility.TrimNulls(input);
Assert.Empty(result);
}
#endregion
}
/// <summary>
/// Helper class for testing non-seekable streams
/// </summary>
internal class NonSeekableStream : Stream
{
private readonly Stream _inner;
public NonSeekableStream(Stream inner)
{
_inner = inner;
}
public override bool CanRead => _inner.CanRead;
public override bool CanSeek => false; // Force non-seekable
public override bool CanWrite => _inner.CanWrite;
public override long Length => _inner.Length;
public override long Position
{
get => _inner.Position;
set => throw new NotSupportedException("Stream is not seekable");
}
public override void Flush() => _inner.Flush();
public override int Read(byte[] buffer, int offset, int count) =>
_inner.Read(buffer, offset, count);
public override long Seek(long offset, SeekOrigin origin) =>
throw new NotSupportedException("Stream is not seekable");
public override void SetLength(long value) =>
throw new NotSupportedException("Stream is not seekable");
public override void Write(byte[] buffer, int offset, int count) =>
_inner.Write(buffer, offset, count);
protected override void Dispose(bool disposing)
{
if (disposing)
{
_inner.Dispose();
}
base.Dispose(disposing);
}
}

View File

@@ -1,5 +1,6 @@
using System.IO;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -13,7 +14,7 @@ public class WriterTests : TestBase
protected WriterTests(ArchiveType type) => _type = type;
protected void Write(
protected async Task WriteAsync(
CompressionType compressionType,
string archive,
string archiveToVerifyAgainst,
@@ -29,7 +30,8 @@ public class WriterTests : TestBase
using var writer = WriterFactory.Open(stream, _type, writerOptions);
writer.WriteAll(ORIGINAL_FILES_PATH, "*", SearchOption.AllDirectories);
}
CompareArchivesByPath(
await CompareArchivesByPathAsync(
Path.Combine(SCRATCH2_FILES_PATH, archive),
Path.Combine(TEST_ARCHIVES_PATH, archiveToVerifyAgainst)
);
@@ -44,7 +46,7 @@ public class WriterTests : TestBase
SharpCompressStream.Create(stream, leaveOpen: true),
readerOptions
);
reader.WriteAllToDirectory(
await reader.WriteAllToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true }
);

View File

@@ -1,6 +1,7 @@
using System;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Common;
using SharpCompress.Common.Zip;
@@ -23,67 +24,67 @@ public class Zip64Tests : WriterTests
private const long FOUR_GB_LIMIT = ((long)uint.MaxValue) + 1;
[Trait("format", "zip64")]
public void Zip64_Single_Large_File() =>
public Task Zip64_Single_Large_File() =>
// One single file, requires zip64
RunSingleTest(1, FOUR_GB_LIMIT, setZip64: true, forwardOnly: false);
RunSingleTestAsync(1, FOUR_GB_LIMIT, setZip64: true, forwardOnly: false);
[Trait("format", "zip64")]
public void Zip64_Two_Large_Files() =>
public Task Zip64_Two_Large_Files() =>
// One single file, requires zip64
RunSingleTest(2, FOUR_GB_LIMIT, setZip64: true, forwardOnly: false);
RunSingleTestAsync(2, FOUR_GB_LIMIT, setZip64: true, forwardOnly: false);
[Trait("format", "zip64")]
public void Zip64_Two_Small_files() =>
public Task Zip64_Two_Small_files() =>
// Multiple files, does not require zip64
RunSingleTest(2, FOUR_GB_LIMIT / 2, setZip64: false, forwardOnly: false);
RunSingleTestAsync(2, FOUR_GB_LIMIT / 2, setZip64: false, forwardOnly: false);
[Trait("format", "zip64")]
public void Zip64_Two_Small_files_stream() =>
public Task Zip64_Two_Small_files_stream() =>
// Multiple files, does not require zip64, and works with streams
RunSingleTest(2, FOUR_GB_LIMIT / 2, setZip64: false, forwardOnly: true);
RunSingleTestAsync(2, FOUR_GB_LIMIT / 2, setZip64: false, forwardOnly: true);
[Trait("format", "zip64")]
public void Zip64_Two_Small_Files_Zip64() =>
public Task Zip64_Two_Small_Files_Zip64() =>
// Multiple files, use zip64 even though it is not required
RunSingleTest(2, FOUR_GB_LIMIT / 2, setZip64: true, forwardOnly: false);
RunSingleTestAsync(2, FOUR_GB_LIMIT / 2, setZip64: true, forwardOnly: false);
[Trait("format", "zip64")]
public void Zip64_Single_Large_File_Fail()
public async Task Zip64_Single_Large_File_Fail()
{
try
{
// One single file, should fail
RunSingleTest(1, FOUR_GB_LIMIT, setZip64: false, forwardOnly: false);
await RunSingleTestAsync(1, FOUR_GB_LIMIT, setZip64: false, forwardOnly: false);
throw new InvalidOperationException("Test did not fail?");
}
catch (NotSupportedException) { }
}
[Trait("zip64", "true")]
public void Zip64_Single_Large_File_Zip64_Streaming_Fail()
public async Task Zip64_Single_Large_File_Zip64_Streaming_Fail()
{
try
{
// One single file, should fail (fast) with zip64
RunSingleTest(1, FOUR_GB_LIMIT, setZip64: true, forwardOnly: true);
await RunSingleTestAsync(1, FOUR_GB_LIMIT, setZip64: true, forwardOnly: true);
throw new InvalidOperationException("Test did not fail?");
}
catch (NotSupportedException) { }
}
[Trait("zip64", "true")]
public void Zip64_Single_Large_File_Streaming_Fail()
public async Task Zip64_Single_Large_File_Streaming_Fail()
{
try
{
// One single file, should fail once the write discovers the problem
RunSingleTest(1, FOUR_GB_LIMIT, setZip64: false, forwardOnly: true);
await RunSingleTestAsync(1, FOUR_GB_LIMIT, setZip64: false, forwardOnly: true);
throw new InvalidOperationException("Test did not fail?");
}
catch (NotSupportedException) { }
}
public void RunSingleTest(
public async Task RunSingleTestAsync(
long files,
long filesize,
bool setZip64,
@@ -104,7 +105,7 @@ public class Zip64Tests : WriterTests
CreateZipArchive(filename, files, filesize, writeChunkSize, setZip64, forwardOnly);
}
var resForward = ReadForwardOnly(filename);
var resForward = await ReadForwardOnlyAsync(filename);
if (resForward.Item1 != files)
{
throw new InvalidOperationException(
@@ -168,7 +169,7 @@ public class Zip64Tests : WriterTests
}
}
public Tuple<long, long> ReadForwardOnly(string filename)
public async Task<Tuple<long, long>> ReadForwardOnlyAsync(string filename)
{
long count = 0;
long size = 0;
@@ -176,9 +177,9 @@ public class Zip64Tests : WriterTests
using (var fs = File.OpenRead(filename))
using (var rd = ZipReader.Open(fs, new ReaderOptions { LookForHeader = false }))
{
while (rd.MoveToNextEntry())
while (await rd.MoveToNextEntryAsync())
{
using (rd.OpenEntryStream()) { }
using (await rd.OpenEntryStreamAsync()) { }
count++;
if (prev != null)

File diff suppressed because it is too large Load Diff

View File

@@ -17,219 +17,219 @@ namespace SharpCompress.Test.Zip;
public class ZipTypesLevelsWithCrcRatioTests : ArchiveTests
{
public ZipTypesLevelsWithCrcRatioTests() => UseExtensionInsteadOfNameToVerify = true;
[Theory]
[InlineData(CompressionType.Deflate, 1, 1, 0.11f)] // was 0.8f, actual 0.104
[InlineData(CompressionType.Deflate, 3, 1, 0.08f)] // was 0.8f, actual 0.078
[InlineData(CompressionType.Deflate, 6, 1, 0.05f)] // was 0.8f, actual ~0.042
[InlineData(CompressionType.Deflate, 9, 1, 0.04f)] // was 0.7f, actual 0.038
[InlineData(CompressionType.ZStandard, 1, 1, 0.025f)] // was 0.8f, actual 0.023
[InlineData(CompressionType.ZStandard, 3, 1, 0.015f)] // was 0.7f, actual 0.013
[InlineData(CompressionType.ZStandard, 9, 1, 0.006f)] // was 0.7f, actual 0.005
[InlineData(CompressionType.ZStandard, 22, 1, 0.005f)] // was 0.7f, actual 0.004
[InlineData(CompressionType.BZip2, 0, 1, 0.035f)] // was 0.8f, actual 0.033
[InlineData(CompressionType.LZMA, 0, 1, 0.005f)] // was 0.8f, actual 0.004
[InlineData(CompressionType.None, 0, 1, 1.001f)] // was 1.1f, actual 1.000
[InlineData(CompressionType.Deflate, 6, 2, 0.045f)] // was 0.8f, actual 0.042
[InlineData(CompressionType.ZStandard, 3, 2, 0.012f)] // was 0.7f, actual 0.010
[InlineData(CompressionType.BZip2, 0, 2, 0.035f)] // was 0.8f, actual 0.032
[InlineData(CompressionType.Deflate, 9, 3, 0.04f)] // was 0.7f, actual 0.038
[InlineData(CompressionType.ZStandard, 9, 3, 0.003f)] // was 0.7f, actual 0.002
public void Zip_Create_Archive_With_3_Files_Crc32_Test(
CompressionType compressionType,
int compressionLevel,
int sizeMb,
float expectedRatio
)
{
const int OneMiB = 1024 * 1024;
var baseSize = sizeMb * OneMiB;
// Generate test content for files with sizes based on the sizeMb parameter
var file1Data = TestPseudoTextStream.Create(baseSize);
var file2Data = TestPseudoTextStream.Create(baseSize * 2);
var file3Data = TestPseudoTextStream.Create(baseSize * 3);
var expectedFiles = new Dictionary<string, (byte[] data, uint crc)>
/*
[Theory]
[InlineData(CompressionType.Deflate, 1, 1, 0.11f)] // was 0.8f, actual 0.104
[InlineData(CompressionType.Deflate, 3, 1, 0.08f)] // was 0.8f, actual 0.078
[InlineData(CompressionType.Deflate, 6, 1, 0.05f)] // was 0.8f, actual ~0.042
[InlineData(CompressionType.Deflate, 9, 1, 0.04f)] // was 0.7f, actual 0.038
[InlineData(CompressionType.ZStandard, 1, 1, 0.025f)] // was 0.8f, actual 0.023
[InlineData(CompressionType.ZStandard, 3, 1, 0.015f)] // was 0.7f, actual 0.013
[InlineData(CompressionType.ZStandard, 9, 1, 0.006f)] // was 0.7f, actual 0.005
[InlineData(CompressionType.ZStandard, 22, 1, 0.005f)] // was 0.7f, actual 0.004
[InlineData(CompressionType.BZip2, 0, 1, 0.035f)] // was 0.8f, actual 0.033
[InlineData(CompressionType.LZMA, 0, 1, 0.005f)] // was 0.8f, actual 0.004
[InlineData(CompressionType.None, 0, 1, 1.001f)] // was 1.1f, actual 1.000
[InlineData(CompressionType.Deflate, 6, 2, 0.045f)] // was 0.8f, actual 0.042
[InlineData(CompressionType.ZStandard, 3, 2, 0.012f)] // was 0.7f, actual 0.010
[InlineData(CompressionType.BZip2, 0, 2, 0.035f)] // was 0.8f, actual 0.032
[InlineData(CompressionType.Deflate, 9, 3, 0.04f)] // was 0.7f, actual 0.038
[InlineData(CompressionType.ZStandard, 9, 3, 0.003f)] // was 0.7f, actual 0.002
public void Zip_Create_Archive_With_3_Files_Crc32_Test(
CompressionType compressionType,
int compressionLevel,
int sizeMb,
float expectedRatio
)
{
[$"file1_{sizeMb}MiB.txt"] = (file1Data, CalculateCrc32(file1Data)),
[$"data/file2_{sizeMb * 2}MiB.txt"] = (file2Data, CalculateCrc32(file2Data)),
[$"deep/nested/file3_{sizeMb * 3}MiB.txt"] = (file3Data, CalculateCrc32(file3Data)),
};
// Create zip archive in memory
using var zipStream = new MemoryStream();
using (var writer = CreateWriterWithLevel(zipStream, compressionType, compressionLevel))
{
writer.Write($"file1_{sizeMb}MiB.txt", new MemoryStream(file1Data));
writer.Write($"data/file2_{sizeMb * 2}MiB.txt", new MemoryStream(file2Data));
writer.Write($"deep/nested/file3_{sizeMb * 3}MiB.txt", new MemoryStream(file3Data));
}
// Calculate and output actual compression ratio
var originalSize = file1Data.Length + file2Data.Length + file3Data.Length;
var actualRatio = (double)zipStream.Length / originalSize;
//Debug.WriteLine($"Zip_Create_Archive_With_3_Files_Crc32_Test: {compressionType} Level={compressionLevel} Size={sizeMb}MB Expected={expectedRatio:F3} Actual={actualRatio:F3}");
// Verify compression occurred (except for None compression type)
if (compressionType != CompressionType.None)
{
Assert.True(
zipStream.Length < originalSize,
$"Compression failed: compressed={zipStream.Length}, original={originalSize}"
const int OneMiB = 1024 * 1024;
var baseSize = sizeMb * OneMiB;
// Generate test content for files with sizes based on the sizeMb parameter
var file1Data = TestPseudoTextStream.Create(baseSize);
var file2Data = TestPseudoTextStream.Create(baseSize * 2);
var file3Data = TestPseudoTextStream.Create(baseSize * 3);
var expectedFiles = new Dictionary<string, (byte[] data, uint crc)>
{
[$"file1_{sizeMb}MiB.txt"] = (file1Data, CalculateCrc32(file1Data)),
[$"data/file2_{sizeMb * 2}MiB.txt"] = (file2Data, CalculateCrc32(file2Data)),
[$"deep/nested/file3_{sizeMb * 3}MiB.txt"] = (file3Data, CalculateCrc32(file3Data)),
};
// Create zip archive in memory
using var zipStream = new MemoryStream();
using (var writer = CreateWriterWithLevel(zipStream, compressionType, compressionLevel))
{
writer.Write($"file1_{sizeMb}MiB.txt", new MemoryStream(file1Data));
writer.Write($"data/file2_{sizeMb * 2}MiB.txt", new MemoryStream(file2Data));
writer.Write($"deep/nested/file3_{sizeMb * 3}MiB.txt", new MemoryStream(file3Data));
}
// Calculate and output actual compression ratio
var originalSize = file1Data.Length + file2Data.Length + file3Data.Length;
var actualRatio = (double)zipStream.Length / originalSize;
//Debug.WriteLine($"Zip_Create_Archive_With_3_Files_Crc32_Test: {compressionType} Level={compressionLevel} Size={sizeMb}MB Expected={expectedRatio:F3} Actual={actualRatio:F3}");
// Verify compression occurred (except for None compression type)
if (compressionType != CompressionType.None)
{
Assert.True(
zipStream.Length < originalSize,
$"Compression failed: compressed={zipStream.Length}, original={originalSize}"
);
}
// Verify compression ratio
VerifyCompressionRatio(
originalSize,
zipStream.Length,
expectedRatio,
$"{compressionType} level {compressionLevel}"
);
// Verify archive content and CRC32
VerifyArchiveContent(zipStream, expectedFiles);
// Verify compression type is correctly set
VerifyCompressionType(zipStream, compressionType);
}
// Verify compression ratio
VerifyCompressionRatio(
originalSize,
zipStream.Length,
expectedRatio,
$"{compressionType} level {compressionLevel}"
);
// Verify archive content and CRC32
VerifyArchiveContent(zipStream, expectedFiles);
// Verify compression type is correctly set
VerifyCompressionType(zipStream, compressionType);
}
[Theory]
[InlineData(CompressionType.Deflate, 1, 4, 0.11f)] // was 0.8, actual 0.105
[InlineData(CompressionType.Deflate, 3, 4, 0.08f)] // was 0.8, actual 0.077
[InlineData(CompressionType.Deflate, 6, 4, 0.045f)] // was 0.8, actual 0.042
[InlineData(CompressionType.Deflate, 9, 4, 0.04f)] // was 0.8, actual 0.037
[InlineData(CompressionType.ZStandard, 1, 4, 0.025f)] // was 0.8, actual 0.022
[InlineData(CompressionType.ZStandard, 3, 4, 0.012f)] // was 0.8, actual 0.010
[InlineData(CompressionType.ZStandard, 9, 4, 0.003f)] // was 0.8, actual 0.002
[InlineData(CompressionType.ZStandard, 22, 4, 0.003f)] // was 0.8, actual 0.002
[InlineData(CompressionType.BZip2, 0, 4, 0.035f)] // was 0.8, actual 0.032
[InlineData(CompressionType.LZMA, 0, 4, 0.003f)] // was 0.8, actual 0.002
public void Zip_WriterFactory_Crc32_Test(
CompressionType compressionType,
int compressionLevel,
int sizeMb,
float expectedRatio
)
{
var fileSize = sizeMb * 1024 * 1024;
var testData = TestPseudoTextStream.Create(fileSize);
var expectedCrc = CalculateCrc32(testData);
// Create archive with specified compression level
using var zipStream = new MemoryStream();
var writerOptions = new ZipWriterOptions(compressionType)
[Theory]
[InlineData(CompressionType.Deflate, 1, 4, 0.11f)] // was 0.8, actual 0.105
[InlineData(CompressionType.Deflate, 3, 4, 0.08f)] // was 0.8, actual 0.077
[InlineData(CompressionType.Deflate, 6, 4, 0.045f)] // was 0.8, actual 0.042
[InlineData(CompressionType.Deflate, 9, 4, 0.04f)] // was 0.8, actual 0.037
[InlineData(CompressionType.ZStandard, 1, 4, 0.025f)] // was 0.8, actual 0.022
[InlineData(CompressionType.ZStandard, 3, 4, 0.012f)] // was 0.8, actual 0.010
[InlineData(CompressionType.ZStandard, 9, 4, 0.003f)] // was 0.8, actual 0.002
[InlineData(CompressionType.ZStandard, 22, 4, 0.003f)] // was 0.8, actual 0.002
[InlineData(CompressionType.BZip2, 0, 4, 0.035f)] // was 0.8, actual 0.032
[InlineData(CompressionType.LZMA, 0, 4, 0.003f)] // was 0.8, actual 0.002
public void Zip_WriterFactory_Crc32_Test(
CompressionType compressionType,
int compressionLevel,
int sizeMb,
float expectedRatio
)
{
CompressionLevel = compressionLevel,
};
using (var writer = WriterFactory.Open(zipStream, ArchiveType.Zip, writerOptions))
{
writer.Write(
$"{compressionType}_level_{compressionLevel}_{sizeMb}MiB.txt",
new MemoryStream(testData)
var fileSize = sizeMb * 1024 * 1024;
var testData = TestPseudoTextStream.Create(fileSize);
var expectedCrc = CalculateCrc32(testData);
// Create archive with specified compression level
using var zipStream = new MemoryStream();
var writerOptions = new ZipWriterOptions(compressionType)
{
CompressionLevel = compressionLevel,
};
using (var writer = WriterFactory.Open(zipStream, ArchiveType.Zip, writerOptions))
{
writer.Write(
$"{compressionType}_level_{compressionLevel}_{sizeMb}MiB.txt",
new MemoryStream(testData)
);
}
// Calculate and output actual compression ratio
var actualRatio = (double)zipStream.Length / testData.Length;
//Debug.WriteLine($"Zip_WriterFactory_Crc32_Test: {compressionType} Level={compressionLevel} Size={sizeMb}MB Expected={expectedRatio:F3} Actual={actualRatio:F3}");
VerifyCompressionRatio(
testData.Length,
zipStream.Length,
expectedRatio,
$"{compressionType} level {compressionLevel}"
);
}
// Calculate and output actual compression ratio
var actualRatio = (double)zipStream.Length / testData.Length;
//Debug.WriteLine($"Zip_WriterFactory_Crc32_Test: {compressionType} Level={compressionLevel} Size={sizeMb}MB Expected={expectedRatio:F3} Actual={actualRatio:F3}");
VerifyCompressionRatio(
testData.Length,
zipStream.Length,
expectedRatio,
$"{compressionType} level {compressionLevel}"
);
// Verify the archive
zipStream.Position = 0;
using var archive = ZipArchive.Open(zipStream);
var entry = archive.Entries.Single(e => !e.IsDirectory);
using var entryStream = entry.OpenEntryStream();
using var extractedStream = new MemoryStream();
entryStream.CopyTo(extractedStream);
var extractedData = extractedStream.ToArray();
var actualCrc = CalculateCrc32(extractedData);
Assert.Equal(compressionType, entry.CompressionType);
Assert.Equal(expectedCrc, actualCrc);
Assert.Equal(testData.Length, extractedData.Length);
Assert.Equal(testData, extractedData);
}
[Theory]
[InlineData(CompressionType.Deflate, 1, 2, 0.11f)] // was 0.8, actual 0.104
[InlineData(CompressionType.Deflate, 3, 2, 0.08f)] // was 0.8, actual 0.077
[InlineData(CompressionType.Deflate, 6, 2, 0.045f)] // was 0.8, actual 0.042
[InlineData(CompressionType.Deflate, 9, 2, 0.04f)] // was 0.7, actual 0.038
[InlineData(CompressionType.ZStandard, 1, 2, 0.025f)] // was 0.8, actual 0.023
[InlineData(CompressionType.ZStandard, 3, 2, 0.015f)] // was 0.7, actual 0.012
[InlineData(CompressionType.ZStandard, 9, 2, 0.006f)] // was 0.7, actual 0.005
[InlineData(CompressionType.ZStandard, 22, 2, 0.005f)] // was 0.7, actual 0.004
[InlineData(CompressionType.BZip2, 0, 2, 0.035f)] // was 0.8, actual 0.032
[InlineData(CompressionType.LZMA, 0, 2, 0.005f)] // was 0.8, actual 0.004
public void Zip_ZipArchiveOpen_Crc32_Test(
CompressionType compressionType,
int compressionLevel,
int sizeMb,
float expectedRatio
)
{
var fileSize = sizeMb * 1024 * 1024;
var testData = TestPseudoTextStream.Create(fileSize);
var expectedCrc = CalculateCrc32(testData);
// Create archive with specified compression and level
using var zipStream = new MemoryStream();
using (var writer = CreateWriterWithLevel(zipStream, compressionType, compressionLevel))
{
writer.Write(
$"{compressionType}_{compressionLevel}_{sizeMb}MiB.txt",
new MemoryStream(testData)
);
}
// Calculate and output actual compression ratio
var actualRatio = (double)zipStream.Length / testData.Length;
//Debug.WriteLine($"Zip_ZipArchiveOpen_Crc32_Test: {compressionType} Level={compressionLevel} Size={sizeMb}MB Expected={expectedRatio:F3} Actual={actualRatio:F3}");
// Verify the archive
zipStream.Position = 0;
using var archive = ZipArchive.Open(zipStream);
var entry = archive.Entries.Single(e => !e.IsDirectory);
using var entryStream = entry.OpenEntryStream();
using var extractedStream = new MemoryStream();
entryStream.CopyTo(extractedStream);
var extractedData = extractedStream.ToArray();
var actualCrc = CalculateCrc32(extractedData);
Assert.Equal(compressionType, entry.CompressionType);
Assert.Equal(expectedCrc, actualCrc);
Assert.Equal(testData.Length, extractedData.Length);
// For smaller files, verify full content; for larger, spot check
if (testData.Length <= sizeMb * 2)
{
// Verify the archive
zipStream.Position = 0;
using var archive = ZipArchive.Open(zipStream);
var entry = archive.Entries.Single(e => !e.IsDirectory);
using var entryStream = entry.OpenEntryStream();
using var extractedStream = new MemoryStream();
entryStream.CopyTo(extractedStream);
var extractedData = extractedStream.ToArray();
var actualCrc = CalculateCrc32(extractedData);
Assert.Equal(compressionType, entry.CompressionType);
Assert.Equal(expectedCrc, actualCrc);
Assert.Equal(testData.Length, extractedData.Length);
Assert.Equal(testData, extractedData);
}
else
[Theory]
[InlineData(CompressionType.Deflate, 1, 2, 0.11f)] // was 0.8, actual 0.104
[InlineData(CompressionType.Deflate, 3, 2, 0.08f)] // was 0.8, actual 0.077
[InlineData(CompressionType.Deflate, 6, 2, 0.045f)] // was 0.8, actual 0.042
[InlineData(CompressionType.Deflate, 9, 2, 0.04f)] // was 0.7, actual 0.038
[InlineData(CompressionType.ZStandard, 1, 2, 0.025f)] // was 0.8, actual 0.023
[InlineData(CompressionType.ZStandard, 3, 2, 0.015f)] // was 0.7, actual 0.012
[InlineData(CompressionType.ZStandard, 9, 2, 0.006f)] // was 0.7, actual 0.005
[InlineData(CompressionType.ZStandard, 22, 2, 0.005f)] // was 0.7, actual 0.004
[InlineData(CompressionType.BZip2, 0, 2, 0.035f)] // was 0.8, actual 0.032
[InlineData(CompressionType.LZMA, 0, 2, 0.005f)] // was 0.8, actual 0.004
public void Zip_ZipArchiveOpen_Crc32_Test(
CompressionType compressionType,
int compressionLevel,
int sizeMb,
float expectedRatio
)
{
VerifyDataSpotCheck(testData, extractedData);
}
VerifyCompressionRatio(
testData.Length,
zipStream.Length,
expectedRatio,
$"{compressionType} Level {compressionLevel}"
);
}
var fileSize = sizeMb * 1024 * 1024;
var testData = TestPseudoTextStream.Create(fileSize);
var expectedCrc = CalculateCrc32(testData);
// Create archive with specified compression and level
using var zipStream = new MemoryStream();
using (var writer = CreateWriterWithLevel(zipStream, compressionType, compressionLevel))
{
writer.Write(
$"{compressionType}_{compressionLevel}_{sizeMb}MiB.txt",
new MemoryStream(testData)
);
}
// Calculate and output actual compression ratio
var actualRatio = (double)zipStream.Length / testData.Length;
//Debug.WriteLine($"Zip_ZipArchiveOpen_Crc32_Test: {compressionType} Level={compressionLevel} Size={sizeMb}MB Expected={expectedRatio:F3} Actual={actualRatio:F3}");
// Verify the archive
zipStream.Position = 0;
using var archive = ZipArchive.Open(zipStream);
var entry = archive.Entries.Single(e => !e.IsDirectory);
using var entryStream = entry.OpenEntryStream();
using var extractedStream = new MemoryStream();
entryStream.CopyTo(extractedStream);
var extractedData = extractedStream.ToArray();
var actualCrc = CalculateCrc32(extractedData);
Assert.Equal(compressionType, entry.CompressionType);
Assert.Equal(expectedCrc, actualCrc);
Assert.Equal(testData.Length, extractedData.Length);
// For smaller files, verify full content; for larger, spot check
if (testData.Length <= sizeMb * 2)
{
Assert.Equal(testData, extractedData);
}
else
{
VerifyDataSpotCheck(testData, extractedData);
}
VerifyCompressionRatio(
testData.Length,
zipStream.Length,
expectedRatio,
$"{compressionType} Level {compressionLevel}"
);
}*/
}

View File

@@ -1,5 +1,6 @@
using System;
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Common;
using SharpCompress.IO;
@@ -11,65 +12,66 @@ using Xunit;
namespace SharpCompress.Test.Zip;
public class ZipReaderTests : ReaderTests
{
public ZipReaderTests() => UseExtensionInsteadOfNameToVerify = true;
[Fact]
public void Issue_269_Double_Skip()
public async Task Issue_269_Double_Skip()
{
var path = Path.Combine(TEST_ARCHIVES_PATH, "PrePostHeaders.zip");
using Stream stream = new ForwardOnlyStream(File.OpenRead(path));
using var reader = ReaderFactory.Open(stream);
var count = 0;
while (reader.MoveToNextEntry())
while (await reader.MoveToNextEntryAsync())
{
count++;
if (!reader.Entry.IsDirectory)
{
if (count % 2 != 0)
{
reader.WriteEntryTo(Stream.Null);
await reader.WriteEntryToAsync(Stream.Null);
}
}
}
}
[Fact]
public void Zip_Zip64_Streamed_Read() => Read("Zip.zip64.zip", CompressionType.Deflate);
public Task Zip_Zip64_Streamed_Read() => ReadAsync("Zip.zip64.zip", CompressionType.Deflate);
[Fact]
public void Zip_ZipX_Streamed_Read() => Read("Zip.zipx", CompressionType.LZMA);
public Task Zip_ZipX_Streamed_Read() => ReadAsync("Zip.zipx", CompressionType.LZMA);
[Fact]
public void Zip_BZip2_Streamed_Read() => Read("Zip.bzip2.dd.zip", CompressionType.BZip2);
public Task Zip_BZip2_Streamed_Read() => ReadAsync("Zip.bzip2.dd.zip", CompressionType.BZip2);
[Fact]
public void Zip_BZip2_Read() => Read("Zip.bzip2.zip", CompressionType.BZip2);
public Task Zip_BZip2_Read() => ReadAsync("Zip.bzip2.zip", CompressionType.BZip2);
[Fact]
public void Zip_Deflate_Streamed2_Read() =>
Read("Zip.deflate.dd-.zip", CompressionType.Deflate);
public Task Zip_Deflate_Streamed2_Read() =>
ReadAsync("Zip.deflate.dd-.zip", CompressionType.Deflate);
[Fact]
public void Zip_Deflate_Streamed_Read() => Read("Zip.deflate.dd.zip", CompressionType.Deflate);
public Task Zip_Deflate_Streamed_Read() => ReadAsync("Zip.deflate.dd.zip", CompressionType.Deflate);
[Fact]
public void Zip_Deflate_Streamed_Skip()
public async Task Zip_Deflate_Streamed_Skip()
{
using Stream stream = new ForwardOnlyStream(
File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd.zip"))
);
using var reader = ReaderFactory.Open(stream);
var x = 0;
while (reader.MoveToNextEntry())
while (await reader.MoveToNextEntryAsync())
{
if (!reader.Entry.IsDirectory)
{
x++;
if (x % 2 == 0)
{
reader.WriteEntryToDirectory(
await reader.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -79,44 +81,44 @@ public class ZipReaderTests : ReaderTests
}
[Fact]
public void Zip_Deflate_Read() => Read("Zip.deflate.zip", CompressionType.Deflate);
public Task Zip_Deflate_Read() => ReadAsync("Zip.deflate.zip", CompressionType.Deflate);
[Fact]
public void Zip_Deflate64_Read() => Read("Zip.deflate64.zip", CompressionType.Deflate64);
public Task Zip_Deflate64_Read() => ReadAsync("Zip.deflate64.zip", CompressionType.Deflate64);
[Fact]
public void Zip_LZMA_Streamed_Read() => Read("Zip.lzma.dd.zip", CompressionType.LZMA);
public Task Zip_LZMA_Streamed_Read() => ReadAsync("Zip.lzma.dd.zip", CompressionType.LZMA);
[Fact]
public void Zip_LZMA_Read() => Read("Zip.lzma.zip", CompressionType.LZMA);
public Task Zip_LZMA_Read() => ReadAsync("Zip.lzma.zip", CompressionType.LZMA);
[Fact]
public void Zip_PPMd_Streamed_Read() => Read("Zip.ppmd.dd.zip", CompressionType.PPMd);
public Task Zip_PPMd_Streamed_Read() => ReadAsync("Zip.ppmd.dd.zip", CompressionType.PPMd);
[Fact]
public void Zip_PPMd_Read() => Read("Zip.ppmd.zip", CompressionType.PPMd);
public Task Zip_PPMd_Read() => ReadAsync("Zip.ppmd.zip", CompressionType.PPMd);
[Fact]
public void Zip_None_Read() => Read("Zip.none.zip", CompressionType.None);
public Task Zip_None_Read() => ReadAsync("Zip.none.zip", CompressionType.None);
[Fact]
public void Zip_Deflate_NoEmptyDirs_Read() =>
Read("Zip.deflate.noEmptyDirs.zip", CompressionType.Deflate);
public Task Zip_Deflate_NoEmptyDirs_Read() =>
ReadAsync("Zip.deflate.noEmptyDirs.zip", CompressionType.Deflate);
[Fact]
public void Zip_BZip2_PkwareEncryption_Read()
public async Task Zip_BZip2_PkwareEncryption_Read()
{
using (
Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.bzip2.pkware.zip"))
)
using (var reader = ZipReader.Open(stream, new ReaderOptions { Password = "test" }))
{
while (reader.MoveToNextEntry())
while (await reader.MoveToNextEntryAsync())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(CompressionType.BZip2, reader.Entry.CompressionType);
reader.WriteEntryToDirectory(
await reader.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -127,18 +129,18 @@ public class ZipReaderTests : ReaderTests
}
[Fact]
public void Zip_Reader_Disposal_Test()
public async Task Zip_Reader_Disposal_Test()
{
using var stream = new TestStream(
File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd.zip"))
);
using (var reader = ReaderFactory.Open(stream))
{
while (reader.MoveToNextEntry())
while (await reader.MoveToNextEntryAsync())
{
if (!reader.Entry.IsDirectory)
{
reader.WriteEntryToDirectory(
await reader.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -149,17 +151,17 @@ public class ZipReaderTests : ReaderTests
}
[Fact]
public void Zip_Reader_Disposal_Test2()
public async Task Zip_Reader_Disposal_Test2()
{
using var stream = new TestStream(
File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd.zip"))
);
var reader = ReaderFactory.Open(stream);
while (reader.MoveToNextEntry())
while (await reader.MoveToNextEntryAsync())
{
if (!reader.Entry.IsDirectory)
{
reader.WriteEntryToDirectory(
await reader.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -169,8 +171,8 @@ public class ZipReaderTests : ReaderTests
}
[Fact]
public void Zip_LZMA_WinzipAES_Read() =>
Assert.Throws<NotSupportedException>(() =>
public Task Zip_LZMA_WinzipAES_Read() =>
Assert.ThrowsAsync<NotSupportedException>(async () =>
{
using (
Stream stream = File.OpenRead(
@@ -179,12 +181,12 @@ public class ZipReaderTests : ReaderTests
)
using (var reader = ZipReader.Open(stream, new ReaderOptions { Password = "test" }))
{
while (reader.MoveToNextEntry())
while (await reader.MoveToNextEntryAsync())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(CompressionType.Unknown, reader.Entry.CompressionType);
reader.WriteEntryToDirectory(
await reader.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -195,7 +197,7 @@ public class ZipReaderTests : ReaderTests
});
[Fact]
public void Zip_Deflate_WinzipAES_Read()
public async Task Zip_Deflate_WinzipAES_Read()
{
using (
Stream stream = File.OpenRead(
@@ -204,12 +206,12 @@ public class ZipReaderTests : ReaderTests
)
using (var reader = ZipReader.Open(stream, new ReaderOptions { Password = "test" }))
{
while (reader.MoveToNextEntry())
while (await reader.MoveToNextEntryAsync())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(CompressionType.Unknown, reader.Entry.CompressionType);
reader.WriteEntryToDirectory(
await reader.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -220,18 +222,18 @@ public class ZipReaderTests : ReaderTests
}
[Fact]
public void Zip_Deflate_ZipCrypto_Read()
public async Task Zip_Deflate_ZipCrypto_Read()
{
var count = 0;
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "zipcrypto.zip")))
using (var reader = ZipReader.Open(stream, new ReaderOptions { Password = "test" }))
{
while (reader.MoveToNextEntry())
while (await reader.MoveToNextEntryAsync())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(CompressionType.None, reader.Entry.CompressionType);
reader.WriteEntryToDirectory(
await reader.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
@@ -243,7 +245,7 @@ public class ZipReaderTests : ReaderTests
}
[Fact]
public void TestSharpCompressWithEmptyStream()
public async Task TestSharpCompressWithEmptyStream()
{
var expected = new[]
{
@@ -267,9 +269,9 @@ public class ZipReaderTests : ReaderTests
SharpCompressStream.Create(stream, leaveOpen: true, throwOnDispose: true)
);
var i = 0;
while (zipReader.MoveToNextEntry())
while (await zipReader.MoveToNextEntryAsync())
{
using (var entry = zipReader.OpenEntryStream())
using (var entry = await zipReader.OpenEntryStreamAsync())
{
var tempStream = new MemoryStream();
const int bufSize = 0x1000;
@@ -288,7 +290,7 @@ public class ZipReaderTests : ReaderTests
}
[Fact]
public void Zip_None_Issue86_Streamed_Read()
public async Task Zip_None_Issue86_Streamed_Read()
{
var keys = new[] { "Empty1", "Empty2", "Dir1/", "Dir2/", "Fake1", "Fake2", "Internal.zip" };
@@ -298,7 +300,7 @@ public class ZipReaderTests : ReaderTests
using var reader = ZipReader.Open(stream);
foreach (var key in keys)
{
reader.MoveToNextEntry();
await reader.MoveToNextEntryAsync();
Assert.Equal(reader.Entry.Key, key);
@@ -308,11 +310,11 @@ public class ZipReaderTests : ReaderTests
}
}
Assert.False(reader.MoveToNextEntry());
Assert.False(await reader.MoveToNextEntryAsync());
}
[Fact]
public void Zip_ReaderMoveToNextEntry()
public async Task Zip_ReaderMoveToNextEntryAsync()
{
var keys = new[] { "version", "sizehint", "data/0/metadata", "data/0/records" };
@@ -320,59 +322,67 @@ public class ZipReaderTests : ReaderTests
using var reader = ZipReader.Open(fileStream);
foreach (var key in keys)
{
reader.MoveToNextEntry();
await reader.MoveToNextEntryAsync();
Assert.Equal(reader.Entry.Key, key);
}
}
[Fact]
public void Issue_685()
public async Task Issue_685()
{
var count = 0;
using var fileStream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Issue_685.zip"));
using var reader = ZipReader.Open(fileStream);
while (reader.MoveToNextEntry())
while (await reader.MoveToNextEntryAsync())
{
count++;
reader.OpenEntryStream().Dispose(); // Uncomment for workaround
var stream = await reader.OpenEntryStreamAsync();
#if !NETSTANDARD2_0 && !NETFRAMEWORK
await stream.DisposeAsync(); // Uncomment for workaround
#else
stream.Dispose();
#endif
}
Assert.Equal(4, count);
}
[Fact]
public void Zip_ReaderFactory_Uncompressed_Read_All()
public async Task Zip_ReaderFactory_Uncompressed_Read_All()
{
var zipPath = Path.Combine(TEST_ARCHIVES_PATH, "Zip.uncompressed.zip");
using var stream = File.OpenRead(zipPath);
using var reader = ReaderFactory.Open(stream);
while (reader.MoveToNextEntry())
while (await reader.MoveToNextEntryAsync())
{
var target = new MemoryStream();
reader.OpenEntryStream().CopyTo(target);
await (await reader.OpenEntryStreamAsync()).CopyToAsync(target);
}
}
[Fact]
public void Zip_ReaderFactory_Uncompressed_Skip_All()
public async Task Zip_ReaderFactory_Uncompressed_Skip_All()
{
var zipPath = Path.Combine(TEST_ARCHIVES_PATH, "Zip.uncompressed.zip");
using var stream = File.OpenRead(zipPath);
using var reader = ReaderFactory.Open(stream);
while (reader.MoveToNextEntry()) { }
while (await reader.MoveToNextEntryAsync()) { }
}
//this test uses a large 7zip file containing a zip file inside it to test zip64 support
// we probably shouldn't be allowing ExtractAllEntries here but it works for now.
[Fact]
public void Zip_Uncompressed_64bit()
public async Task Zip_Uncompressed_64bit()
{
var zipPath = Path.Combine(TEST_ARCHIVES_PATH, "64bitstream.zip.7z");
using var stream = File.OpenRead(zipPath);
var archive = ArchiveFactory.Open(stream);
var reader = archive.ExtractAllEntries();
reader.MoveToNextEntry();
var zipReader = ZipReader.Open(reader.OpenEntryStream());
await reader.MoveToNextEntryAsync();
var zipReader = ZipReader.Open(await reader.OpenEntryStreamAsync());
var x = 0;
while (zipReader.MoveToNextEntry())
while (await zipReader.MoveToNextEntryAsync())
{
x++;
}
@@ -383,17 +393,17 @@ public class ZipReaderTests : ReaderTests
[Fact]
public void Zip_Uncompressed_Encrypted_Read()
{
using var archive = ArchiveFactory.Open(
using var reader = ReaderFactory.Open(
Path.Combine(TEST_ARCHIVES_PATH, "Zip.none.encrypted.zip"),
new ReaderOptions { Password = "test" }
);
using var reader = archive.ExtractAllEntries();
reader.MoveToNextEntry();
reader.MoveToNextEntryAsync();
Assert.Equal("first.txt", reader.Entry.Key);
Assert.Equal(199, reader.Entry.Size);
reader.OpenEntryStream().Dispose();
reader.MoveToNextEntry();
reader.OpenEntryStreamAsync().Dispose();
reader.MoveToNextEntryAsync();
Assert.Equal("second.txt", reader.Entry.Key);
Assert.Equal(197, reader.Entry.Size);
}
}

View File

@@ -1,4 +1,5 @@
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common;
using Xunit;
@@ -10,8 +11,8 @@ public class ZipWriterTests : WriterTests
: base(ArchiveType.Zip) { }
[Fact]
public void Zip_Deflate_Write() =>
Write(
public Task Zip_Deflate_Write() =>
WriteAsync(
CompressionType.Deflate,
"Zip.deflate.noEmptyDirs.zip",
"Zip.deflate.noEmptyDirs.zip",
@@ -19,8 +20,8 @@ public class ZipWriterTests : WriterTests
);
[Fact]
public void Zip_BZip2_Write() =>
Write(
public Task Zip_BZip2_Write() =>
WriteAsync(
CompressionType.BZip2,
"Zip.bzip2.noEmptyDirs.zip",
"Zip.bzip2.noEmptyDirs.zip",
@@ -28,8 +29,8 @@ public class ZipWriterTests : WriterTests
);
[Fact]
public void Zip_None_Write() =>
Write(
public Task Zip_None_Write() =>
WriteAsync(
CompressionType.None,
"Zip.none.noEmptyDirs.zip",
"Zip.none.noEmptyDirs.zip",
@@ -37,8 +38,8 @@ public class ZipWriterTests : WriterTests
);
[Fact]
public void Zip_LZMA_Write() =>
Write(
public Task Zip_LZMA_Write() =>
WriteAsync(
CompressionType.LZMA,
"Zip.lzma.noEmptyDirs.zip",
"Zip.lzma.noEmptyDirs.zip",
@@ -46,8 +47,8 @@ public class ZipWriterTests : WriterTests
);
[Fact]
public void Zip_PPMd_Write() =>
Write(
public Task Zip_PPMd_Write() =>
WriteAsync(
CompressionType.PPMd,
"Zip.ppmd.noEmptyDirs.zip",
"Zip.ppmd.noEmptyDirs.zip",
@@ -55,8 +56,12 @@ public class ZipWriterTests : WriterTests
);
[Fact]
public void Zip_Rar_Write() =>
Assert.Throws<InvalidFormatException>(() =>
Write(CompressionType.Rar, "Zip.ppmd.noEmptyDirs.zip", "Zip.ppmd.noEmptyDirs.zip")
public Task Zip_Rar_Write() =>
Assert.ThrowsAsync<InvalidFormatException>(async () =>
await WriteAsync(
CompressionType.Rar,
"Zip.ppmd.noEmptyDirs.zip",
"Zip.ppmd.noEmptyDirs.zip"
)
);
}

View File

@@ -4,20 +4,20 @@
".NETFramework,Version=v4.8": {
"AwesomeAssertions": {
"type": "Direct",
"requested": "[9.2.0, )",
"resolved": "9.2.0",
"contentHash": "RCkuFyKmesmZR74XLOzYvTpG/IbHfBeFapFTMvFskPzEK4z3YrVmHB2FIFJ0DhwjuIDdPL/hc8zS40IwMAN0BA==",
"requested": "[9.2.1, )",
"resolved": "9.2.1",
"contentHash": "lbwhyQNXxxEGx4oCbFqNfFy2DsywsvNhN6qoOjY4wwhMgI2L9+YrxjyF/M0io99yrvWV1Cjj12LP2QGcC43Uhw==",
"dependencies": {
"System.Threading.Tasks.Extensions": "4.5.4"
}
},
"Microsoft.NET.Test.Sdk": {
"type": "Direct",
"requested": "[17.13.0, )",
"resolved": "17.13.0",
"contentHash": "W19wCPizaIC9Zh47w8wWI/yxuqR7/dtABwOrc8r2jX/8mUNxM2vw4fXDh+DJTeogxV+KzKwg5jNNGQVwf3LXyA==",
"requested": "[18.0.0, )",
"resolved": "18.0.0",
"contentHash": "bvxj2Asb7nT+tqOFFerrhQeEjUYLwx0Poi0Rznu63WbqN+A4uDn1t5NWXfAOOQsF6lpmK6N2v+Vvgso7KWZS7g==",
"dependencies": {
"Microsoft.CodeCoverage": "17.13.0"
"Microsoft.CodeCoverage": "18.0.0"
}
},
"Microsoft.NETFramework.ReferenceAssemblies": {
@@ -49,20 +49,10 @@
"Microsoft.TestPlatform.ObjectModel": "17.13.0"
}
},
"Xunit.SkippableFact": {
"type": "Direct",
"requested": "[1.5.23, )",
"resolved": "1.5.23",
"contentHash": "JlKobLTlsGcuJ8OtoodxL63bUagHSVBnF+oQ2GgnkwNqK+XYjeYyhQasULi5Ebx1MNDGNbOMplQYr89mR+nItQ==",
"dependencies": {
"Validation": "2.5.51",
"xunit.extensibility.execution": "2.4.0"
}
},
"Microsoft.CodeCoverage": {
"type": "Transitive",
"resolved": "17.13.0",
"contentHash": "9LIUy0y+DvUmEPtbRDw6Bay3rzwqFV8P4efTrK4CZhQle3M/QwLPjISghfcolmEGAPWxuJi6m98ZEfk4VR4Lfg=="
"resolved": "18.0.0",
"contentHash": "DFPhMrsIofgJ1DDU3ModqqRArDm15/bNl4ecmcuBspZkZ4ONYnCC0R8U27WzK7cYv6r8l6Q/fRmvg7cb+I/dJA=="
},
"Microsoft.NETFramework.ReferenceAssemblies.net48": {
"type": "Transitive",
@@ -84,8 +74,8 @@
},
"System.Numerics.Vectors": {
"type": "Transitive",
"resolved": "4.5.0",
"contentHash": "QQTlPTl06J/iiDbJCiepZ4H//BVraReU4O4EoRw1U02H5TLUIT7xn3GnDp9AXPSlJUDyFs4uWjWafNX6WrAojQ=="
"resolved": "4.6.1",
"contentHash": "sQxefTnhagrhoq2ReR0D/6K0zJcr9Hrd6kikeXsA1I8kOCboTavcUC4r7TSfpKFeE163uMuxZcyfO1mGO3EN8Q=="
},
"System.Reflection.Metadata": {
"type": "Transitive",
@@ -97,8 +87,8 @@
},
"System.Runtime.CompilerServices.Unsafe": {
"type": "Transitive",
"resolved": "6.0.0",
"contentHash": "/iUeP3tq1S0XdNNoMz5C9twLSrM/TH+qElHkXWaPvuNOt+99G75NrV0OS2EqHx5wMN7popYjpc8oTjC1y16DLg=="
"resolved": "6.1.2",
"contentHash": "2hBr6zdbIBTDE3EhK7NSVNdX58uTK6iHW/P/Axmm9sl1xoGSLqDvMtpecn226TNwHByFokYwJmt/aQQNlO5CRw=="
},
"System.Threading.Tasks.Extensions": {
"type": "Transitive",
@@ -108,11 +98,6 @@
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
}
},
"Validation": {
"type": "Transitive",
"resolved": "2.5.51",
"contentHash": "g/Aug7PVWaenlJ0QUyt/mEetngkQNsMCuNeRVXbcJED1nZS7JcK+GTU4kz3jcQ7bFuKfi8PF4ExXH7XSFNuSLQ=="
},
"xunit.abstractions": {
"type": "Transitive",
"resolved": "2.0.3",
@@ -156,34 +141,47 @@
"sharpcompress": {
"type": "Project",
"dependencies": {
"System.Buffers": "[4.6.0, )",
"Microsoft.Bcl.AsyncInterfaces": "[8.0.0, )",
"System.Buffers": "[4.6.1, )",
"System.Memory": "[4.6.3, )",
"System.Text.Encoding.CodePages": "[8.0.0, )",
"ZstdSharp.Port": "[0.8.6, )"
}
},
"Microsoft.Bcl.AsyncInterfaces": {
"type": "CentralTransitive",
"requested": "[8.0.0, )",
"resolved": "5.0.0",
"contentHash": "W8DPQjkMScOMTtJbPwmPyj9c3zYSFGawDW3jwlBOOsnY+EzZFLgNQ/UMkK35JmkNOVPdCyPr2Tw7Vv9N+KA3ZQ==",
"resolved": "8.0.0",
"contentHash": "3WA9q9yVqJp222P3x1wYIGDAkpjAku0TMUaaQV22g6L67AI0LdOIrVS7Ht2vJfLHGSPVuqN94vIr15qn+HEkHw==",
"dependencies": {
"System.Threading.Tasks.Extensions": "4.5.4"
}
},
"System.Buffers": {
"type": "CentralTransitive",
"requested": "[4.6.0, )",
"resolved": "4.6.0",
"contentHash": "lN6tZi7Q46zFzAbRYXTIvfXcyvQQgxnY7Xm6C6xQ9784dEL1amjM6S6Iw4ZpsvesAKnRVsM4scrDQaDqSClkjA=="
"requested": "[4.6.1, )",
"resolved": "4.6.1",
"contentHash": "N8GXpmiLMtljq7gwvyS+1QvKT/W2J8sNAvx+HVg4NGmsG/H+2k/y9QI23auLJRterrzCiDH+IWAw4V/GPwsMlw=="
},
"System.Memory": {
"type": "CentralTransitive",
"requested": "[4.6.0, )",
"resolved": "4.5.5",
"contentHash": "XIWiDvKPXaTveaB7HVganDlOCRoj03l+jrwNvcge/t8vhGYKvqV+dMv6G4SAX2NoNmN0wZfVPTAlFwZcZvVOUw==",
"requested": "[4.6.3, )",
"resolved": "4.6.3",
"contentHash": "qdcDOgnFZY40+Q9876JUHnlHu7bosOHX8XISRoH94fwk6hgaeQGSgfZd8srWRZNt5bV9ZW2TljcegDNxsf+96A==",
"dependencies": {
"System.Buffers": "4.5.1",
"System.Numerics.Vectors": "4.5.0",
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
"System.Buffers": "4.6.1",
"System.Numerics.Vectors": "4.6.1",
"System.Runtime.CompilerServices.Unsafe": "6.1.2"
}
},
"System.Text.Encoding.CodePages": {
"type": "CentralTransitive",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "OZIsVplFGaVY90G2SbpgU7EnCoOO5pw1t4ic21dBF3/1omrJFpAGoNAVpPyMVOC90/hvgkGG3VFqR13YgZMQfg==",
"dependencies": {
"System.Memory": "4.5.5",
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
}
},
"ZstdSharp.Port": {
@@ -201,18 +199,18 @@
"net8.0": {
"AwesomeAssertions": {
"type": "Direct",
"requested": "[9.2.0, )",
"resolved": "9.2.0",
"contentHash": "RCkuFyKmesmZR74XLOzYvTpG/IbHfBeFapFTMvFskPzEK4z3YrVmHB2FIFJ0DhwjuIDdPL/hc8zS40IwMAN0BA=="
"requested": "[9.2.1, )",
"resolved": "9.2.1",
"contentHash": "lbwhyQNXxxEGx4oCbFqNfFy2DsywsvNhN6qoOjY4wwhMgI2L9+YrxjyF/M0io99yrvWV1Cjj12LP2QGcC43Uhw=="
},
"Microsoft.NET.Test.Sdk": {
"type": "Direct",
"requested": "[17.13.0, )",
"resolved": "17.13.0",
"contentHash": "W19wCPizaIC9Zh47w8wWI/yxuqR7/dtABwOrc8r2jX/8mUNxM2vw4fXDh+DJTeogxV+KzKwg5jNNGQVwf3LXyA==",
"requested": "[18.0.0, )",
"resolved": "18.0.0",
"contentHash": "bvxj2Asb7nT+tqOFFerrhQeEjUYLwx0Poi0Rznu63WbqN+A4uDn1t5NWXfAOOQsF6lpmK6N2v+Vvgso7KWZS7g==",
"dependencies": {
"Microsoft.CodeCoverage": "17.13.0",
"Microsoft.TestPlatform.TestHost": "17.13.0"
"Microsoft.CodeCoverage": "18.0.0",
"Microsoft.TestPlatform.TestHost": "18.0.0"
}
},
"Microsoft.NETFramework.ReferenceAssemblies": {
@@ -247,20 +245,10 @@
"resolved": "3.1.5",
"contentHash": "tKi7dSTwP4m5m9eXPM2Ime4Kn7xNf4x4zT9sdLO/G4hZVnQCRiMTWoSZqI/pYTVeI27oPPqHBKYI/DjJ9GsYgA=="
},
"Xunit.SkippableFact": {
"type": "Direct",
"requested": "[1.5.23, )",
"resolved": "1.5.23",
"contentHash": "JlKobLTlsGcuJ8OtoodxL63bUagHSVBnF+oQ2GgnkwNqK+XYjeYyhQasULi5Ebx1MNDGNbOMplQYr89mR+nItQ==",
"dependencies": {
"Validation": "2.5.51",
"xunit.extensibility.execution": "2.4.0"
}
},
"Microsoft.CodeCoverage": {
"type": "Transitive",
"resolved": "17.13.0",
"contentHash": "9LIUy0y+DvUmEPtbRDw6Bay3rzwqFV8P4efTrK4CZhQle3M/QwLPjISghfcolmEGAPWxuJi6m98ZEfk4VR4Lfg=="
"resolved": "18.0.0",
"contentHash": "DFPhMrsIofgJ1DDU3ModqqRArDm15/bNl4ecmcuBspZkZ4ONYnCC0R8U27WzK7cYv6r8l6Q/fRmvg7cb+I/dJA=="
},
"Microsoft.NETFramework.ReferenceAssemblies.net461": {
"type": "Transitive",
@@ -269,35 +257,38 @@
},
"Microsoft.TestPlatform.ObjectModel": {
"type": "Transitive",
"resolved": "17.13.0",
"contentHash": "bt0E0Dx+iqW97o4A59RCmUmz/5NarJ7LRL+jXbSHod72ibL5XdNm1Ke+UO5tFhBG4VwHLcSjqq9BUSblGNWamw==",
"resolved": "18.0.0",
"contentHash": "Al/a99ymb8UdEEh6DKNiaoFn5i8fvX5PdM9LfU9Z/Q8NJrlyHHzF+LRHLbR+t89gRsJ2fFMpwYxgEn3eH1BQwA==",
"dependencies": {
"System.Reflection.Metadata": "1.6.0"
"System.Reflection.Metadata": "8.0.0"
}
},
"Microsoft.TestPlatform.TestHost": {
"type": "Transitive",
"resolved": "17.13.0",
"contentHash": "9GGw08Dc3AXspjekdyTdZ/wYWFlxbgcF0s7BKxzVX+hzAwpifDOdxM+ceVaaJSQOwqt3jtuNlHn3XTpKUS9x9Q==",
"resolved": "18.0.0",
"contentHash": "aAxE8Thr9ZHGrljOYaDeLJqitQi75iE4xeEFn6CEGFirlHSn1KwpKPniuEn6zCLZ90Z3XqNlrC3ZJTuvBov45w==",
"dependencies": {
"Microsoft.TestPlatform.ObjectModel": "17.13.0",
"Newtonsoft.Json": "13.0.1"
"Microsoft.TestPlatform.ObjectModel": "18.0.0",
"Newtonsoft.Json": "13.0.3"
}
},
"Newtonsoft.Json": {
"type": "Transitive",
"resolved": "13.0.1",
"contentHash": "ppPFpBcvxdsfUonNcvITKqLl3bqxWbDCZIzDWHzjpdAHRFfZe0Dw9HmA0+za13IdyrgJwpkDTDA9fHaxOrt20A=="
"resolved": "13.0.3",
"contentHash": "HrC5BXdl00IP9zeV+0Z848QWPAoCr9P3bDEZguI+gkLcBKAOxix/tLEAAHC+UvDNPv4a2d18lOReHMOagPa+zQ=="
},
"System.Collections.Immutable": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "AurL6Y5BA1WotzlEvVaIDpqzpIPvYnnldxru8oXJU2yFxFUy3+pNXjXd1ymO+RA0rq0+590Q8gaz2l3Sr7fmqg=="
},
"System.Reflection.Metadata": {
"type": "Transitive",
"resolved": "1.6.0",
"contentHash": "COC1aiAJjCoA5GBF+QKL2uLqEBew4JsCkQmoHKbN3TlOZKa2fKLz5CpiRQKDz0RsAOEGsVKqOD5bomsXq/4STQ=="
},
"Validation": {
"type": "Transitive",
"resolved": "2.5.51",
"contentHash": "g/Aug7PVWaenlJ0QUyt/mEetngkQNsMCuNeRVXbcJED1nZS7JcK+GTU4kz3jcQ7bFuKfi8PF4ExXH7XSFNuSLQ=="
"resolved": "8.0.0",
"contentHash": "ptvgrFh7PvWI8bcVqG5rsA/weWM09EnthFHR5SCnS6IN+P4mj6rE1lBDC4U8HL9/57htKAqy4KQ3bBj84cfYyQ==",
"dependencies": {
"System.Collections.Immutable": "8.0.0"
}
},
"xunit.abstractions": {
"type": "Transitive",
@@ -342,16 +333,9 @@
"sharpcompress": {
"type": "Project",
"dependencies": {
"System.Buffers": "[4.6.0, )",
"ZstdSharp.Port": "[0.8.6, )"
}
},
"System.Buffers": {
"type": "CentralTransitive",
"requested": "[4.6.0, )",
"resolved": "4.6.0",
"contentHash": "lN6tZi7Q46zFzAbRYXTIvfXcyvQQgxnY7Xm6C6xQ9784dEL1amjM6S6Iw4ZpsvesAKnRVsM4scrDQaDqSClkjA=="
},
"ZstdSharp.Port": {
"type": "CentralTransitive",
"requested": "[0.8.6, )",