Compare commits

...

65 Commits

Author SHA1 Message Date
Adam Hathcock
73e9b61fcc try a new agent config 2025-10-25 16:17:42 +01:00
Adam Hathcock
699bc5f34b chore: add Copilot coding agent config and CI workflow 2025-10-25 16:05:09 +01:00
Adam Hathcock
9eed8e842c Merge pull request #972 from TwanVanDongen/master
Handle vendor-specific and malformed ZIP extra fields safely
2025-10-25 13:53:10 +01:00
Twan van Dongen
6d652a12ee And again forgot to apply CSharpierAdds bounds checks to prevent exceptions when extra fields are truncated or non-standard (e.g., 0x4341 "AC"/ARC0). Stops parsing gracefully, allowing other fields to be processed. 2025-10-24 17:18:37 +02:00
Adam Hathcock
e043e06656 Merge pull request #969 from adamhathcock/adam/perf
Add JB perf testing project.
2025-10-23 14:34:43 +01:00
Adam Hathcock
14b52599f4 Update src/SharpCompress/Compressors/Rar/UnpackV1/Unpack.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-23 14:20:54 +01:00
Adam Hathcock
e3e2c0c567 Update tests/SharpCompress.Performance/LargeMemoryStream.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-23 14:19:16 +01:00
Adam Hathcock
4fc5d60f03 reduce visibility 2025-10-23 14:16:39 +01:00
Adam Hathcock
c37a9e0f82 Merge remote-tracking branch 'origin/adam/perf' into adam/perf 2025-10-23 13:50:31 +01:00
Adam Hathcock
fed17ebb96 fmt 2025-10-23 13:50:07 +01:00
Adam Hathcock
eeac678872 More usage of pool and better copy 2025-10-23 13:49:54 +01:00
Adam Hathcock
f9ed0f2df9 Update tests/SharpCompress.Performance/Program.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-23 11:47:42 +01:00
Adam Hathcock
0ddbacac85 Update src/SharpCompress/Compressors/Rar/UnpackV1/UnpackUtility.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-23 11:47:27 +01:00
Adam Hathcock
f0d28aa5cf fmt 2025-10-23 11:43:38 +01:00
Adam Hathcock
cc84f6fee4 more making rar faster 2025-10-23 11:43:21 +01:00
Adam Hathcock
00e6eef369 used AI to optimize some copys and shifting 2025-10-23 11:18:50 +01:00
Adam Hathcock
1ae71907bc don't need to clear everything 2025-10-23 10:53:54 +01:00
Adam Hathcock
3ff688fba2 clear and null check 2025-10-23 10:48:18 +01:00
Adam Hathcock
bb59b3d456 add pool to LZMA out window 2025-10-23 09:54:52 +01:00
Adam Hathcock
186ea74ada add some fixes for rar to pool memory 2025-10-23 09:40:15 +01:00
Adam Hathcock
c108f2dcf3 add perf testing project using JB memory and cpu 2025-10-23 09:39:57 +01:00
Adam Hathcock
4cca232d83 Merge pull request #959 from adamhathcock/adam/xz-wrapped-often
Removed wrappers that weren't needed (probably)
2025-10-22 11:54:47 +01:00
Adam Hathcock
1db511e9cb Merge branch 'master' into adam/xz-wrapped-often 2025-10-22 11:51:46 +01:00
Adam Hathcock
76afa7d3bf Merge pull request #968 from adamhathcock/adam/rework-deps
rework dependencies to be correct for frameworks and update
2025-10-22 11:51:30 +01:00
Adam Hathcock
3513f7b1cd Update src/SharpCompress/SharpCompress.csproj
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-22 10:51:12 +01:00
Adam Hathcock
4531fe39e6 Merge branch 'master' into adam/rework-deps 2025-10-22 10:48:16 +01:00
Adam Hathcock
8d276a85bc rework dependencies to be correct for frameworks and update 2025-10-22 10:47:43 +01:00
Adam Hathcock
5f0d042bc3 Merge pull request #967 from adamhathcock/adam/reduce-custom-utilities
Reduce custom utilities for arrays/bytes
2025-10-22 10:41:10 +01:00
Adam Hathcock
408f07e3c4 Merge branch 'master' into adam/reduce-custom-utilities 2025-10-22 10:38:01 +01:00
Adam Hathcock
d1a540c90c use windows instead of skippable fact 2025-10-22 10:32:47 +01:00
Adam Hathcock
00df8e930e add windows only compile constant 2025-10-22 10:30:40 +01:00
Adam Hathcock
3b768b1b77 Merge pull request #961 from adamhathcock/dependabot/nuget/AwesomeAssertions-9.2.1
Bump AwesomeAssertions from 9.2.0 to 9.2.1
2025-10-22 10:25:01 +01:00
Adam Hathcock
42a7ececa0 Merge branch 'master' into adam/xz-wrapped-often 2025-10-22 10:22:36 +01:00
Adam Hathcock
e8867de049 Merge branch 'master' into dependabot/nuget/AwesomeAssertions-9.2.1 2025-10-22 10:21:59 +01:00
Adam Hathcock
a1dfa3dfa3 xplat tests for path characters 2025-10-22 10:21:22 +01:00
Adam Hathcock
83917d4f79 Merge remote-tracking branch 'origin/master' into adam/reduce-custom-utilities 2025-10-22 10:17:20 +01:00
Adam Hathcock
513cd4f905 some AI suggestions 2025-10-22 10:16:45 +01:00
Adam Hathcock
eda0309df3 Merge pull request #966 from adamhathcock/adam/reduce-stackalloc
Remove a dynamically created stackalloc
2025-10-22 10:13:14 +01:00
Adam Hathcock
74e27c028e fix the span length 2025-10-22 10:10:07 +01:00
Adam Hathcock
36c06c4089 ugh, this is used because it shadows a field 2025-10-22 09:32:19 +01:00
Adam Hathcock
249b8a9cdd add AI generated tests 2025-10-22 09:28:07 +01:00
Adam Hathcock
62bee15f00 fmt 2025-10-22 09:19:30 +01:00
Adam Hathcock
d8797b69e4 remove do while 2025-10-22 09:19:09 +01:00
Adam Hathcock
084fe72b02 Consolidate not null 2025-10-22 09:17:13 +01:00
Adam Hathcock
c823acaa3f optimize ReadFully and Skip 2025-10-22 09:10:16 +01:00
Adam Hathcock
e0d6cd9cb7 Try to reduce custom functions for array/byte management 2025-10-22 09:00:21 +01:00
Adam Hathcock
01021e102b remove some extra stackallocs 2025-10-22 08:36:03 +01:00
Adam Hathcock
6de738ff17 reduce dynamic stackallocs in unpackv1 2025-10-22 08:32:19 +01:00
Adam Hathcock
c0612547eb Merge pull request #964 from adamhathcock/adam/extract-all-solid-only
Only allow extract all on archives that are solid (some rars and 7zip only)
2025-10-21 14:08:23 +01:00
Adam Hathcock
e960907698 Update src/SharpCompress/Archives/AbstractArchive.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-21 13:55:56 +01:00
Adam Hathcock
84e03b1b27 Allow 7zip files of all sizes? 2025-10-21 10:28:58 +01:00
Adam Hathcock
f1a80da34b fix tests that use extract all wrongly 2025-10-21 09:56:29 +01:00
Adam Hathcock
5a5a55e556 fmt 2025-10-21 09:22:35 +01:00
Adam Hathcock
e1f132b45b Only allow extract all on archives that are solid (some rars and 7zip only) 2025-10-21 09:21:46 +01:00
dependabot[bot]
087011aede Bump AwesomeAssertions from 9.2.0 to 9.2.1
---
updated-dependencies:
- dependency-name: AwesomeAssertions
  dependency-version: 9.2.1
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-10-20 10:44:17 +00:00
Adam Hathcock
1430bf9b31 fmt 2025-10-15 09:54:13 +01:00
Adam Hathcock
4e5de817ef Removed too many wrappers
# Conflicts:
#	src/SharpCompress/Compressors/Xz/XZIndex.cs
2025-10-15 09:53:46 +01:00
Adam Hathcock
5d6b94f8c3 Merge pull request #952 from adamhathcock/dependabot/github_actions/actions/checkout-5
Bump actions/checkout from 4 to 5
2025-10-14 08:25:53 +01:00
Adam Hathcock
8dfbe56f42 Merge branch 'master' into dependabot/github_actions/actions/checkout-5 2025-10-14 08:23:18 +01:00
Adam Hathcock
df79d983d7 Merge pull request #957 from adamhathcock/dependabot/github_actions/actions/setup-dotnet-5
Bump actions/setup-dotnet from 4 to 5
2025-10-14 08:22:47 +01:00
dependabot[bot]
6c23a28826 Bump actions/setup-dotnet from 4 to 5
Bumps [actions/setup-dotnet](https://github.com/actions/setup-dotnet) from 4 to 5.
- [Release notes](https://github.com/actions/setup-dotnet/releases)
- [Commits](https://github.com/actions/setup-dotnet/compare/v4...v5)

---
updated-dependencies:
- dependency-name: actions/setup-dotnet
  dependency-version: '5'
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-10-13 16:21:25 +00:00
dependabot[bot]
f72289570a Bump actions/checkout from 4 to 5
Bumps [actions/checkout](https://github.com/actions/checkout) from 4 to 5.
- [Release notes](https://github.com/actions/checkout/releases)
- [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md)
- [Commits](https://github.com/actions/checkout/compare/v4...v5)

---
updated-dependencies:
- dependency-name: actions/checkout
  dependency-version: '5'
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-10-13 16:12:51 +00:00
Adam Hathcock
51bc9dc20e Merge pull request #950 from adamhathcock/adamhathcock-patch-1
Configure Dependabot for NuGet updates
2025-10-13 16:52:07 +01:00
Adam Hathcock
e45ac6bfa9 Update .github/dependabot.yml
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-13 16:48:55 +01:00
Adam Hathcock
44d1cbdb0c Configure Dependabot for NuGet updates
Added NuGet package ecosystem configuration for Dependabot.
2025-10-13 16:47:46 +01:00
57 changed files with 1941 additions and 840 deletions

13
.github/COPILOT_AGENT_README.md vendored Normal file
View File

@@ -0,0 +1,13 @@
# Copilot Coding Agent Configuration
This repository includes a minimal opt-in configuration and CI workflow to allow the GitHub Copilot coding agent to open and validate PRs.
- .copilot-agent.yml: opt-in config for automated agents
- .github/workflows/dotnetcore.yml: CI runs on PRs touching the solution, source, or tests to validate changes
- AGENTS.yml: general information for this project
Maintainers can adjust the allowed paths or disable the agent by editing or removing .copilot-agent.yml.
Notes:
- Do not change any other files in the repository.
- If build/test paths are different, update the workflow accordingly; this workflow targets SharpCompress.sln and the SharpCompress.Tests test project.

7
.github/agents/copilot-agent.yml vendored Normal file
View File

@@ -0,0 +1,7 @@
enabled: true
agent:
name: copilot-coding-agent
allow:
- paths: ["src/**/*", "tests/**/*", "README.md", "AGENTS.md"]
actions: ["create", "modify"]
require_review_before_merge: true

View File

@@ -1,6 +1,13 @@
version: 2
updates:
- package-ecosystem: "github-actions" # search for actions - there are other options available
directory: "/" # search in .github/workflows under root `/`
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly" # check for action update every week
interval: "weekly"
- package-ecosystem: "nuget"
directory: "/" # change to "/src/YourProject" if .csproj files are in subfolders
schedule:
interval: "weekly"
open-pull-requests-limit: 5
# optional: target-branch: "master"

View File

@@ -14,8 +14,8 @@ jobs:
os: [windows-latest, ubuntu-latest]
steps:
- uses: actions/checkout@v4
- uses: actions/setup-dotnet@v4
- uses: actions/checkout@v5
- uses: actions/setup-dotnet@v5
with:
dotnet-version: 8.0.x
- run: dotnet run --project build/build.csproj

View File

@@ -1,18 +1,18 @@
<Project>
<ItemGroup>
<PackageVersion Include="Bullseye" Version="6.0.0" />
<PackageVersion Include="AwesomeAssertions" Version="9.2.0" />
<PackageVersion Include="AwesomeAssertions" Version="9.2.1" />
<PackageVersion Include="Glob" Version="1.1.9" />
<PackageVersion Include="JetBrains.Profiler.SelfApi" Version="2.5.14" />
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="8.0.0" />
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="17.13.0" />
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="18.0.0" />
<PackageVersion Include="Mono.Posix.NETStandard" Version="1.0.0" />
<PackageVersion Include="SimpleExec" Version="12.0.0" />
<PackageVersion Include="System.Buffers" Version="4.6.0" />
<PackageVersion Include="System.Memory" Version="4.6.0" />
<PackageVersion Include="System.Buffers" Version="4.6.1" />
<PackageVersion Include="System.Memory" Version="4.6.3" />
<PackageVersion Include="System.Text.Encoding.CodePages" Version="8.0.0" />
<PackageVersion Include="xunit" Version="2.9.3" />
<PackageVersion Include="xunit.runner.visualstudio" Version="3.1.5" />
<PackageVersion Include="xunit.SkippableFact" Version="1.5.23" />
<PackageVersion Include="ZstdSharp.Port" Version="0.8.6" />
<PackageVersion Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
<PackageVersion Include="Microsoft.NETFramework.ReferenceAssemblies" Version="1.0.3" />

View File

@@ -23,6 +23,8 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Config", "Config", "{CDB425
.github\workflows\dotnetcore.yml = .github\workflows\dotnetcore.yml
EndProjectSection
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SharpCompress.Performance", "tests\SharpCompress.Performance\SharpCompress.Performance.csproj", "{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -41,6 +43,10 @@ Global
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Debug|Any CPU.Build.0 = Debug|Any CPU
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Release|Any CPU.ActiveCfg = Release|Any CPU
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Release|Any CPU.Build.0 = Release|Any CPU
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}.Debug|Any CPU.Build.0 = Debug|Any CPU
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}.Release|Any CPU.ActiveCfg = Release|Any CPU
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
@@ -48,5 +54,6 @@ Global
GlobalSection(NestedProjects) = preSolution
{FD19DDD8-72B2-4024-8665-0D1F7A2AA998} = {3C5BE746-03E5-4895-9988-0B57F162F86C}
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F} = {0F0901FF-E8D9-426A-B5A2-17C7F47C1529}
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17} = {0F0901FF-E8D9-426A-B5A2-17C7F47C1529}
EndGlobalSection
EndGlobal

View File

@@ -144,6 +144,12 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
/// <returns></returns>
public IReader ExtractAllEntries()
{
if (!IsSolid && Type != ArchiveType.SevenZip)
{
throw new InvalidOperationException(
"ExtractAllEntries can only be used on solid archives or 7Zip archives (which require random access)."
);
}
((IArchiveExtractionListener)this).EnsureEntriesLoaded();
return CreateReaderForSolidExtraction();
}

View File

@@ -45,7 +45,7 @@ public static class ArchiveFactory
/// <param name="options"></param>
public static IArchive Open(string filePath, ReaderOptions? options = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), options);
}
@@ -68,7 +68,7 @@ public static class ArchiveFactory
/// <param name="options"></param>
public static IArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? options = null)
{
fileInfos.CheckNotNull(nameof(fileInfos));
fileInfos.NotNull(nameof(fileInfos));
var filesArray = fileInfos.ToArray();
if (filesArray.Length == 0)
{
@@ -81,7 +81,7 @@ public static class ArchiveFactory
return Open(fileInfo, options);
}
fileInfo.CheckNotNull(nameof(fileInfo));
fileInfo.NotNull(nameof(fileInfo));
options ??= new ReaderOptions { LeaveStreamOpen = false };
return FindFactory<IMultiArchiveFactory>(fileInfo).Open(filesArray, options);
@@ -94,7 +94,7 @@ public static class ArchiveFactory
/// <param name="options"></param>
public static IArchive Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
{
streams.CheckNotNull(nameof(streams));
streams.NotNull(nameof(streams));
var streamsArray = streams.ToArray();
if (streamsArray.Length == 0)
{
@@ -107,7 +107,7 @@ public static class ArchiveFactory
return Open(firstStream, options);
}
firstStream.CheckNotNull(nameof(firstStream));
firstStream.NotNull(nameof(firstStream));
options ??= new ReaderOptions();
return FindFactory<IMultiArchiveFactory>(firstStream).Open(streamsArray, options);
@@ -129,7 +129,7 @@ public static class ArchiveFactory
private static T FindFactory<T>(FileInfo finfo)
where T : IFactory
{
finfo.CheckNotNull(nameof(finfo));
finfo.NotNull(nameof(finfo));
using Stream stream = finfo.OpenRead();
return FindFactory<T>(stream);
}
@@ -137,7 +137,7 @@ public static class ArchiveFactory
private static T FindFactory<T>(Stream stream)
where T : IFactory
{
stream.CheckNotNull(nameof(stream));
stream.NotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
{
throw new ArgumentException("Stream should be readable and seekable");
@@ -172,7 +172,7 @@ public static class ArchiveFactory
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
filePath.NotNullOrEmpty(nameof(filePath));
using Stream s = File.OpenRead(filePath);
return IsArchive(s, out type, bufferSize);
}
@@ -184,7 +184,7 @@ public static class ArchiveFactory
)
{
type = null;
stream.CheckNotNull(nameof(stream));
stream.NotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
{
@@ -215,7 +215,7 @@ public static class ArchiveFactory
/// <returns></returns>
public static IEnumerable<string> GetFileParts(string part1)
{
part1.CheckNotNullOrEmpty(nameof(part1));
part1.NotNullOrEmpty(nameof(part1));
return GetFileParts(new FileInfo(part1)).Select(a => a.FullName);
}
@@ -226,7 +226,7 @@ public static class ArchiveFactory
/// <returns></returns>
public static IEnumerable<FileInfo> GetFileParts(FileInfo part1)
{
part1.CheckNotNull(nameof(part1));
part1.NotNull(nameof(part1));
yield return part1;
foreach (var factory in Factory.Factories.OfType<IFactory>())

View File

@@ -21,7 +21,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
/// <param name="readerOptions"></param>
public static GZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
@@ -32,7 +32,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
/// <param name="readerOptions"></param>
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
fileInfo.NotNull(nameof(fileInfo));
return new GZipArchive(
new SourceStream(
fileInfo,
@@ -52,7 +52,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
ReaderOptions? readerOptions = null
)
{
fileInfos.CheckNotNull(nameof(fileInfos));
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new GZipArchive(
new SourceStream(
@@ -70,7 +70,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
/// <param name="readerOptions"></param>
public static GZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.CheckNotNull(nameof(streams));
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new GZipArchive(
new SourceStream(
@@ -88,7 +88,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
/// <param name="readerOptions"></param>
public static GZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{

View File

@@ -25,7 +25,7 @@ public static class IArchiveEntryExtensions
using (entryStream)
{
using Stream s = new ListeningStream(streamListener, entryStream);
s.TransferTo(streamToWriteTo);
s.CopyTo(streamToWriteTo);
}
streamListener.FireEntryExtractionEnd(archiveEntry);
}

View File

@@ -45,12 +45,10 @@ public static class IArchiveExtensions
var seenDirectories = new HashSet<string>();
// Extract
var entries = archive.ExtractAllEntries();
while (entries.MoveToNextEntry())
foreach (var entry in archive.Entries)
{
cancellationToken.ThrowIfCancellationRequested();
var entry = entries.Entry;
if (entry.IsDirectory)
{
var dirPath = Path.Combine(destination, entry.Key.NotNull("Entry Key is null"));
@@ -77,7 +75,7 @@ public static class IArchiveExtensions
// Write file
using var fs = File.OpenWrite(path);
entries.WriteEntryTo(fs);
entry.WriteTo(fs);
// Update progress
bytesRead += entry.Size;

View File

@@ -95,7 +95,7 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
/// <param name="options"></param>
public static RarArchive Open(string filePath, ReaderOptions? options = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
filePath.NotNullOrEmpty(nameof(filePath));
var fileInfo = new FileInfo(filePath);
return new RarArchive(
new SourceStream(
@@ -113,7 +113,7 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
/// <param name="options"></param>
public static RarArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
fileInfo.NotNull(nameof(fileInfo));
return new RarArchive(
new SourceStream(
fileInfo,
@@ -130,7 +130,7 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
/// <param name="options"></param>
public static RarArchive Open(Stream stream, ReaderOptions? options = null)
{
stream.CheckNotNull(nameof(stream));
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
@@ -150,7 +150,7 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
ReaderOptions? readerOptions = null
)
{
fileInfos.CheckNotNull(nameof(fileInfos));
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new RarArchive(
new SourceStream(
@@ -168,7 +168,7 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
/// <param name="readerOptions"></param>
public static RarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.CheckNotNull(nameof(streams));
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new RarArchive(
new SourceStream(

View File

@@ -21,7 +21,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.CheckNotNullOrEmpty("filePath");
filePath.NotNullOrEmpty("filePath");
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
@@ -32,7 +32,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull("fileInfo");
fileInfo.NotNull("fileInfo");
return new SevenZipArchive(
new SourceStream(
fileInfo,
@@ -52,7 +52,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
ReaderOptions? readerOptions = null
)
{
fileInfos.CheckNotNull(nameof(fileInfos));
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new SevenZipArchive(
new SourceStream(
@@ -73,7 +73,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
ReaderOptions? readerOptions = null
)
{
streams.CheckNotNull(nameof(streams));
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new SevenZipArchive(
new SourceStream(
@@ -91,7 +91,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull("stream");
stream.NotNull("stream");
if (stream is not { CanSeek: true })
{

View File

@@ -22,7 +22,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
/// <param name="readerOptions"></param>
public static TarArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
@@ -33,7 +33,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
/// <param name="readerOptions"></param>
public static TarArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
fileInfo.NotNull(nameof(fileInfo));
return new TarArchive(
new SourceStream(
fileInfo,
@@ -53,7 +53,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
ReaderOptions? readerOptions = null
)
{
fileInfos.CheckNotNull(nameof(fileInfos));
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new TarArchive(
new SourceStream(
@@ -71,7 +71,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
/// <param name="readerOptions"></param>
public static TarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.CheckNotNull(nameof(streams));
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new TarArchive(
new SourceStream(
@@ -89,7 +89,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
/// <param name="readerOptions"></param>
public static TarArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
@@ -178,7 +178,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
using (var entryStream = entry.OpenEntryStream())
{
using var memoryStream = new MemoryStream();
entryStream.TransferTo(memoryStream);
entryStream.CopyTo(memoryStream);
memoryStream.Position = 0;
var bytes = memoryStream.ToArray();

View File

@@ -43,7 +43,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
/// <param name="readerOptions"></param>
public static ZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
@@ -54,7 +54,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
/// <param name="readerOptions"></param>
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
fileInfo.NotNull(nameof(fileInfo));
return new ZipArchive(
new SourceStream(
fileInfo,
@@ -74,7 +74,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
ReaderOptions? readerOptions = null
)
{
fileInfos.CheckNotNull(nameof(fileInfos));
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new ZipArchive(
new SourceStream(
@@ -92,7 +92,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
/// <param name="readerOptions"></param>
public static ZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.CheckNotNull(nameof(streams));
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new ZipArchive(
new SourceStream(
@@ -110,7 +110,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
/// <param name="readerOptions"></param>
public static ZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{

View File

@@ -91,8 +91,15 @@ internal abstract class ZipFileEntry : ZipHeader
protected void LoadExtra(byte[] extra)
{
for (var i = 0; i < extra.Length - 4; )
for (var i = 0; i < extra.Length; )
{
// Ensure we have at least a header (2-byte ID + 2-byte length)
if (i + 4 > extra.Length)
{
// Incomplete header — stop parsing extras
break;
}
var type = (ExtraDataType)BinaryPrimitives.ReadUInt16LittleEndian(extra.AsSpan(i));
if (!Enum.IsDefined(typeof(ExtraDataType), type))
{
@@ -106,7 +113,17 @@ internal abstract class ZipFileEntry : ZipHeader
if (length > extra.Length)
{
// bad extras block
return;
break; // allow processing optional other blocks
}
// Some ZIP files contain vendor-specific or malformed extra fields where the declared
// data length extends beyond the remaining buffer. This adjustment ensures that
// we only read data within bounds (i + 4 + length <= extra.Length)
// The example here is: 41 43 18 00 41 52 43 30 46 EB FF FF 51 29 03 C6 03 00 00 00 00 00 00 00 00
// No existing zip utility uses 0x4341 ('AC')
if (i + 4 + length > extra.Length)
{
// incomplete or corrupt field
break; // allow processing optional other blocks
}
var data = new byte[length];

View File

@@ -1,11 +1,12 @@
#nullable disable
using System;
using System.Buffers;
using System.IO;
namespace SharpCompress.Compressors.LZMA.LZ;
internal class OutWindow
internal class OutWindow : IDisposable
{
private byte[] _buffer;
private int _windowSize;
@@ -15,19 +16,22 @@ internal class OutWindow
private int _pendingDist;
private Stream _stream;
public long _total;
public long _limit;
private long _total;
private long _limit;
public long Total => _total;
public void Create(int windowSize)
{
if (_windowSize != windowSize)
{
_buffer = new byte[windowSize];
}
else
{
_buffer[windowSize - 1] = 0;
if (_buffer is not null)
{
ArrayPool<byte>.Shared.Return(_buffer);
}
_buffer = ArrayPool<byte>.Shared.Rent(windowSize);
}
_buffer[windowSize - 1] = 0;
_windowSize = windowSize;
_pos = 0;
_streamPos = 0;
@@ -36,7 +40,22 @@ internal class OutWindow
_limit = 0;
}
public void Reset() => Create(_windowSize);
public void Dispose()
{
ReleaseStream();
if (_buffer is null)
{
return;
}
ArrayPool<byte>.Shared.Return(_buffer);
_buffer = null;
}
public void Reset()
{
ReleaseStream();
Create(_windowSize);
}
public void Init(Stream stream)
{
@@ -66,7 +85,7 @@ internal class OutWindow
_stream = null;
}
public void Flush()
private void Flush()
{
if (_stream is null)
{

View File

@@ -294,7 +294,7 @@ public class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Stream
}
else
{
_outWindow.SetLimit(long.MaxValue - _outWindow._total);
_outWindow.SetLimit(long.MaxValue - _outWindow.Total);
}
var rangeDecoder = new RangeCoder.Decoder();
@@ -305,6 +305,7 @@ public class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Stream
_outWindow.ReleaseStream();
rangeDecoder.ReleaseStream();
_outWindow.Dispose();
_outWindow = null;
}
@@ -316,7 +317,7 @@ public class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Stream
while (outWindow.HasSpace)
{
var posState = (uint)outWindow._total & _posStateMask;
var posState = (uint)outWindow.Total & _posStateMask;
if (
_isMatchDecoders[(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState]
.Decode(rangeDecoder) == 0
@@ -328,18 +329,14 @@ public class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Stream
{
b = _literalDecoder.DecodeWithMatchByte(
rangeDecoder,
(uint)outWindow._total,
(uint)outWindow.Total,
prevByte,
outWindow.GetByte((int)_rep0)
);
}
else
{
b = _literalDecoder.DecodeNormal(
rangeDecoder,
(uint)outWindow._total,
prevByte
);
b = _literalDecoder.DecodeNormal(rangeDecoder, (uint)outWindow.Total, prevByte);
}
outWindow.PutByte(b);
_state.UpdateChar();
@@ -424,7 +421,7 @@ public class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Stream
_rep0 = posSlot;
}
}
if (_rep0 >= outWindow._total || _rep0 >= dictionarySizeCheck)
if (_rep0 >= outWindow.Total || _rep0 >= dictionarySizeCheck)
{
if (_rep0 == 0xFFFFFFFF)
{

View File

@@ -178,6 +178,7 @@ public class LzmaStream : Stream, IStreamStack
_position = _encoder.Code(null, true);
}
_inputStream?.Dispose();
_outWindow.Dispose();
}
base.Dispose(disposing);
}

View File

@@ -13,7 +13,7 @@ using SharpCompress.Compressors.Rar.VM;
namespace SharpCompress.Compressors.Rar.UnpackV1;
internal sealed partial class Unpack : BitInput, IRarUnpack, IDisposable
internal sealed partial class Unpack : BitInput, IRarUnpack
{
private readonly BitInput Inp;
private bool disposed;
@@ -22,15 +22,17 @@ internal sealed partial class Unpack : BitInput, IRarUnpack, IDisposable
// to ease in porting Unpack50.cs
Inp = this;
public void Dispose()
public override void Dispose()
{
if (!disposed)
{
base.Dispose();
if (!externalWindow)
{
ArrayPool<byte>.Shared.Return(window);
window = null;
}
rarVM.Dispose();
disposed = true;
}
}
@@ -574,104 +576,111 @@ internal sealed partial class Unpack : BitInput, IRarUnpack, IDisposable
var FilteredDataOffset = Prg.FilteredDataOffset;
var FilteredDataSize = Prg.FilteredDataSize;
var FilteredData = new byte[FilteredDataSize];
for (var i = 0; i < FilteredDataSize; i++)
var FilteredData = ArrayPool<byte>.Shared.Rent(FilteredDataSize);
try
{
FilteredData[i] = rarVM.Mem[FilteredDataOffset + i];
Array.Copy(
rarVM.Mem,
FilteredDataOffset,
FilteredData,
0,
FilteredDataSize
);
// Prg.GlobalData.get(FilteredDataOffset
// +
// i);
}
prgStack[I] = null;
while (I + 1 < prgStack.Count)
{
var NextFilter = prgStack[I + 1];
if (
NextFilter is null
|| NextFilter.BlockStart != BlockStart
|| NextFilter.BlockLength != FilteredDataSize
|| NextFilter.NextWindow
)
{
break;
}
// apply several filters to same data block
rarVM.setMemory(0, FilteredData, 0, FilteredDataSize);
// .SetMemory(0,FilteredData,FilteredDataSize);
var pPrg = filters[NextFilter.ParentFilter].Program;
var NextPrg = NextFilter.Program;
if (pPrg.GlobalData.Count > RarVM.VM_FIXEDGLOBALSIZE)
{
// copy global data from previous script execution
// if any
// NextPrg->GlobalData.Alloc(ParentPrg->GlobalData.Size());
NextPrg.GlobalData.SetSize(pPrg.GlobalData.Count);
// memcpy(&NextPrg->GlobalData[VM_FIXEDGLOBALSIZE],&ParentPrg->GlobalData[VM_FIXEDGLOBALSIZE],ParentPrg->GlobalData.Size()-VM_FIXEDGLOBALSIZE);
for (
var i = 0;
i < pPrg.GlobalData.Count - RarVM.VM_FIXEDGLOBALSIZE;
i++
)
{
NextPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i] = pPrg.GlobalData[
RarVM.VM_FIXEDGLOBALSIZE + i
];
}
}
ExecuteCode(NextPrg);
if (NextPrg.GlobalData.Count > RarVM.VM_FIXEDGLOBALSIZE)
{
// save global data for next script execution
if (pPrg.GlobalData.Count < NextPrg.GlobalData.Count)
{
pPrg.GlobalData.SetSize(NextPrg.GlobalData.Count);
}
// memcpy(&ParentPrg->GlobalData[VM_FIXEDGLOBALSIZE],&NextPrg->GlobalData[VM_FIXEDGLOBALSIZE],NextPrg->GlobalData.Size()-VM_FIXEDGLOBALSIZE);
for (
var i = 0;
i < NextPrg.GlobalData.Count - RarVM.VM_FIXEDGLOBALSIZE;
i++
)
{
pPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i] = NextPrg.GlobalData[
RarVM.VM_FIXEDGLOBALSIZE + i
];
}
}
else
{
pPrg.GlobalData.Clear();
}
FilteredDataOffset = NextPrg.FilteredDataOffset;
FilteredDataSize = NextPrg.FilteredDataSize;
FilteredData = new byte[FilteredDataSize];
for (var i = 0; i < FilteredDataSize; i++)
{
FilteredData[i] = NextPrg.GlobalData[FilteredDataOffset + i];
}
I++;
prgStack[I] = null;
while (I + 1 < prgStack.Count)
{
var NextFilter = prgStack[I + 1];
if (
NextFilter is null
|| NextFilter.BlockStart != BlockStart
|| NextFilter.BlockLength != FilteredDataSize
|| NextFilter.NextWindow
)
{
break;
}
// apply several filters to same data block
rarVM.setMemory(0, FilteredData, 0, FilteredDataSize);
// .SetMemory(0,FilteredData,FilteredDataSize);
var pPrg = filters[NextFilter.ParentFilter].Program;
var NextPrg = NextFilter.Program;
if (pPrg.GlobalData.Count > RarVM.VM_FIXEDGLOBALSIZE)
{
// copy global data from previous script execution
// if any
// NextPrg->GlobalData.Alloc(ParentPrg->GlobalData.Size());
NextPrg.GlobalData.SetSize(pPrg.GlobalData.Count);
// memcpy(&NextPrg->GlobalData[VM_FIXEDGLOBALSIZE],&ParentPrg->GlobalData[VM_FIXEDGLOBALSIZE],ParentPrg->GlobalData.Size()-VM_FIXEDGLOBALSIZE);
for (
var i = 0;
i < pPrg.GlobalData.Count - RarVM.VM_FIXEDGLOBALSIZE;
i++
)
{
NextPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i] =
pPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i];
}
}
ExecuteCode(NextPrg);
if (NextPrg.GlobalData.Count > RarVM.VM_FIXEDGLOBALSIZE)
{
// save global data for next script execution
if (pPrg.GlobalData.Count < NextPrg.GlobalData.Count)
{
pPrg.GlobalData.SetSize(NextPrg.GlobalData.Count);
}
// memcpy(&ParentPrg->GlobalData[VM_FIXEDGLOBALSIZE],&NextPrg->GlobalData[VM_FIXEDGLOBALSIZE],NextPrg->GlobalData.Size()-VM_FIXEDGLOBALSIZE);
for (
var i = 0;
i < NextPrg.GlobalData.Count - RarVM.VM_FIXEDGLOBALSIZE;
i++
)
{
pPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i] =
NextPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i];
}
}
else
{
pPrg.GlobalData.Clear();
}
FilteredDataOffset = NextPrg.FilteredDataOffset;
FilteredDataSize = NextPrg.FilteredDataSize;
if (FilteredData.Length < FilteredDataSize)
{
ArrayPool<byte>.Shared.Return(FilteredData);
FilteredData = ArrayPool<byte>.Shared.Rent(FilteredDataSize);
}
for (var i = 0; i < FilteredDataSize; i++)
{
FilteredData[i] = NextPrg.GlobalData[FilteredDataOffset + i];
}
I++;
prgStack[I] = null;
}
writeStream.Write(FilteredData, 0, FilteredDataSize);
writtenFileSize += FilteredDataSize;
destUnpSize -= FilteredDataSize;
WrittenBorder = BlockEnd;
WriteSize = (unpPtr - WrittenBorder) & PackDef.MAXWINMASK;
}
finally
{
ArrayPool<byte>.Shared.Return(FilteredData);
}
writeStream.Write(FilteredData, 0, FilteredDataSize);
unpSomeRead = true;
writtenFileSize += FilteredDataSize;
destUnpSize -= FilteredDataSize;
WrittenBorder = BlockEnd;
WriteSize = (unpPtr - WrittenBorder) & PackDef.MAXWINMASK;
}
else
{
@@ -695,15 +704,10 @@ internal sealed partial class Unpack : BitInput, IRarUnpack, IDisposable
private void UnpWriteArea(int startPtr, int endPtr)
{
if (endPtr != startPtr)
{
unpSomeRead = true;
}
if (endPtr < startPtr)
{
UnpWriteData(window, startPtr, -startPtr & PackDef.MAXWINMASK);
UnpWriteData(window, 0, endPtr);
unpAllBuf = true;
}
else
{
@@ -757,19 +761,27 @@ internal sealed partial class Unpack : BitInput, IRarUnpack, IDisposable
// System.out.println("copyString(" + length + ", " + distance + ")");
var destPtr = unpPtr - distance;
var safeZone = PackDef.MAXWINSIZE - 260;
// System.out.println(unpPtr+":"+distance);
if (destPtr >= 0 && destPtr < PackDef.MAXWINSIZE - 260 && unpPtr < PackDef.MAXWINSIZE - 260)
// Fast path: use Array.Copy for bulk operations when in safe zone
if (destPtr >= 0 && destPtr < safeZone && unpPtr < safeZone && distance >= length)
{
window[unpPtr++] = window[destPtr++];
while (--length > 0)
// Non-overlapping copy: can use Array.Copy directly
Array.Copy(window, destPtr, window, unpPtr, length);
unpPtr += length;
}
else if (destPtr >= 0 && destPtr < safeZone && unpPtr < safeZone)
{
// Overlapping copy in safe zone: use byte-by-byte to handle self-referential copies
for (int i = 0; i < length; i++)
{
window[unpPtr++] = window[destPtr++];
window[unpPtr + i] = window[destPtr + i];
}
unpPtr += length;
}
else
{
// Slow path with wraparound mask
while (length-- != 0)
{
window[unpPtr] = window[destPtr++ & PackDef.MAXWINMASK];
@@ -1028,7 +1040,7 @@ internal sealed partial class Unpack : BitInput, IRarUnpack, IDisposable
vmCode.Add((byte)(GetBits() >> 8));
AddBits(8);
}
return (AddVMCode(FirstByte, vmCode, Length));
return AddVMCode(FirstByte, vmCode);
}
private bool ReadVMCodePPM()
@@ -1073,12 +1085,12 @@ internal sealed partial class Unpack : BitInput, IRarUnpack, IDisposable
}
vmCode.Add((byte)Ch); // VMCode[I]=Ch;
}
return (AddVMCode(FirstByte, vmCode, Length));
return AddVMCode(FirstByte, vmCode);
}
private bool AddVMCode(int firstByte, List<byte> vmCode, int length)
private bool AddVMCode(int firstByte, List<byte> vmCode)
{
var Inp = new BitInput();
using var Inp = new BitInput();
Inp.InitBitInput();
// memcpy(Inp.InBuf,Code,Min(BitInput::MAX_SIZE,CodeSize));
@@ -1086,7 +1098,6 @@ internal sealed partial class Unpack : BitInput, IRarUnpack, IDisposable
{
Inp.InBuf[i] = vmCode[i];
}
rarVM.init();
int FiltPos;
if ((firstByte & 0x80) != 0)
@@ -1199,19 +1210,28 @@ internal sealed partial class Unpack : BitInput, IRarUnpack, IDisposable
{
return (false);
}
Span<byte> VMCode = stackalloc byte[VMCodeSize];
for (var I = 0; I < VMCodeSize; I++)
{
if (Inp.Overflow(3))
{
return (false);
}
VMCode[I] = (byte)(Inp.GetBits() >> 8);
Inp.AddBits(8);
}
// VM.Prepare(&VMCode[0],VMCodeSize,&Filter->Prg);
rarVM.prepare(VMCode, VMCodeSize, Filter.Program);
var VMCode = ArrayPool<byte>.Shared.Rent(VMCodeSize);
try
{
for (var I = 0; I < VMCodeSize; I++)
{
if (Inp.Overflow(3))
{
return (false);
}
VMCode[I] = (byte)(Inp.GetBits() >> 8);
Inp.AddBits(8);
}
// VM.Prepare(&VMCode[0],VMCodeSize,&Filter->Prg);
rarVM.prepare(VMCode.AsSpan(0, VMCodeSize), Filter.Program);
}
finally
{
ArrayPool<byte>.Shared.Return(VMCode);
}
}
StackFilter.Program.AltCommands = Filter.Program.Commands; // StackFilter->Prg.AltCmd=&Filter->Prg.Cmd[0];
StackFilter.Program.CommandCount = Filter.Program.CommandCount;

View File

@@ -19,14 +19,9 @@ internal partial class Unpack
private bool suspended;
internal bool unpAllBuf;
//private ComprDataIO unpIO;
private Stream readStream;
private Stream writeStream;
internal bool unpSomeRead;
private int readTop;
private long destUnpSize;
@@ -808,15 +803,10 @@ internal partial class Unpack
private void oldUnpWriteBuf()
{
if (unpPtr != wrPtr)
{
unpSomeRead = true;
}
if (unpPtr < wrPtr)
{
writeStream.Write(window, wrPtr, -wrPtr & PackDef.MAXWINMASK);
writeStream.Write(window, 0, unpPtr);
unpAllBuf = true;
}
else
{

View File

@@ -1,4 +1,5 @@
using System;
using System.Runtime.CompilerServices;
using SharpCompress.Compressors.Rar.VM;
namespace SharpCompress.Compressors.Rar.UnpackV1;
@@ -9,167 +10,15 @@ internal static class UnpackUtility
internal static uint DecodeNumber(this BitInput input, Decode.Decode dec) =>
(uint)input.decodeNumber(dec);
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static int decodeNumber(this BitInput input, Decode.Decode dec)
{
int bits;
long bitField = input.GetBits() & 0xfffe;
// if (bitField < dec.getDecodeLen()[8]) {
// if (bitField < dec.getDecodeLen()[4]) {
// if (bitField < dec.getDecodeLen()[2]) {
// if (bitField < dec.getDecodeLen()[1]) {
// bits = 1;
// } else {
// bits = 2;
// }
// } else {
// if (bitField < dec.getDecodeLen()[3]) {
// bits = 3;
// } else {
// bits = 4;
// }
// }
// } else {
// if (bitField < dec.getDecodeLen()[6]) {
// if (bitField < dec.getDecodeLen()[5])
// bits = 5;
// else
// bits = 6;
// } else {
// if (bitField < dec.getDecodeLen()[7]) {
// bits = 7;
// } else {
// bits = 8;
// }
// }
// }
// } else {
// if (bitField < dec.getDecodeLen()[12]) {
// if (bitField < dec.getDecodeLen()[10])
// if (bitField < dec.getDecodeLen()[9])
// bits = 9;
// else
// bits = 10;
// else if (bitField < dec.getDecodeLen()[11])
// bits = 11;
// else
// bits = 12;
// } else {
// if (bitField < dec.getDecodeLen()[14]) {
// if (bitField < dec.getDecodeLen()[13]) {
// bits = 13;
// } else {
// bits = 14;
// }
// } else {
// bits = 15;
// }
// }
// }
// addbits(bits);
// int N = dec.getDecodePos()[bits]
// + (((int) bitField - dec.getDecodeLen()[bits - 1]) >>> (16 - bits));
// if (N >= dec.getMaxNum()) {
// N = 0;
// }
// return (dec.getDecodeNum()[N]);
var decodeLen = dec.DecodeLen;
if (bitField < decodeLen[8])
{
if (bitField < decodeLen[4])
{
if (bitField < decodeLen[2])
{
if (bitField < decodeLen[1])
{
bits = 1;
}
else
{
bits = 2;
}
}
else
{
if (bitField < decodeLen[3])
{
bits = 3;
}
else
{
bits = 4;
}
}
}
else
{
if (bitField < decodeLen[6])
{
if (bitField < decodeLen[5])
{
bits = 5;
}
else
{
bits = 6;
}
}
else
{
if (bitField < decodeLen[7])
{
bits = 7;
}
else
{
bits = 8;
}
}
}
}
else
{
if (bitField < decodeLen[12])
{
if (bitField < decodeLen[10])
{
if (bitField < decodeLen[9])
{
bits = 9;
}
else
{
bits = 10;
}
}
else if (bitField < decodeLen[11])
{
bits = 11;
}
else
{
bits = 12;
}
}
else
{
if (bitField < decodeLen[14])
{
if (bitField < decodeLen[13])
{
bits = 13;
}
else
{
bits = 14;
}
}
else
{
bits = 15;
}
}
}
// Binary search to find the bit length - faster than nested ifs
int bits = FindDecodeBits(bitField, decodeLen);
input.AddBits(bits);
var N =
dec.DecodePos[bits]
@@ -181,6 +30,52 @@ internal static class UnpackUtility
return (dec.DecodeNum[N]);
}
/// <summary>
/// Fast binary search to find which bit length matches the bitField.
/// Optimized with cached array access to minimize memory lookups.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static int FindDecodeBits(long bitField, int[] decodeLen)
{
// Cache critical values to reduce array access overhead
long len4 = decodeLen[4];
long len8 = decodeLen[8];
long len12 = decodeLen[12];
if (bitField < len8)
{
if (bitField < len4)
{
long len2 = decodeLen[2];
if (bitField < len2)
{
return bitField < decodeLen[1] ? 1 : 2;
}
return bitField < decodeLen[3] ? 3 : 4;
}
long len6 = decodeLen[6];
if (bitField < len6)
{
return bitField < decodeLen[5] ? 5 : 6;
}
return bitField < decodeLen[7] ? 7 : 8;
}
if (bitField < len12)
{
long len10 = decodeLen[10];
if (bitField < len10)
{
return bitField < decodeLen[9] ? 9 : 10;
}
return bitField < decodeLen[11] ? 11 : 12;
}
long len14 = decodeLen[14];
return bitField < len14 ? (bitField < decodeLen[13] ? 13 : 14) : 15;
}
internal static void makeDecodeTables(
Span<byte> lenTab,
int offset,
@@ -194,8 +89,7 @@ internal static class UnpackUtility
long M,
N;
new Span<int>(dec.DecodeNum).Clear(); // memset(Dec->DecodeNum,0,Size*sizeof(*Dec->DecodeNum));
new Span<int>(dec.DecodeNum).Clear();
for (i = 0; i < size; i++)
{
lenCount[lenTab[offset + i] & 0xF]++;

View File

@@ -413,7 +413,7 @@ internal partial class Unpack
else
//x memcpy(Mem,Window+BlockStart,BlockLength);
{
Utility.Copy(Window, BlockStart, Mem, 0, BlockLength);
Buffer.BlockCopy(Window, (int)BlockStart, Mem, 0, (int)BlockLength);
}
}
else
@@ -427,9 +427,21 @@ internal partial class Unpack
else
{
//x memcpy(Mem,Window+BlockStart,FirstPartLength);
Utility.Copy(Window, BlockStart, Mem, 0, FirstPartLength);
Buffer.BlockCopy(
Window,
(int)BlockStart,
Mem,
0,
(int)FirstPartLength
);
//x memcpy(Mem+FirstPartLength,Window,BlockEnd);
Utility.Copy(Window, 0, Mem, FirstPartLength, BlockEnd);
Buffer.BlockCopy(
Window,
0,
Mem,
(int)FirstPartLength,
(int)BlockEnd
);
}
}

View File

@@ -1,6 +1,9 @@
using System;
using System.Buffers;
namespace SharpCompress.Compressors.Rar.VM;
internal class BitInput
internal class BitInput : IDisposable
{
/// <summary> the max size of the input</summary>
internal const int MAX_SIZE = 0x8000;
@@ -20,9 +23,11 @@ internal class BitInput
set => inBit = value;
}
public bool ExternalBuffer;
private byte[] _privateBuffer = ArrayPool<byte>.Shared.Rent(MAX_SIZE);
private bool _disposed;
/// <summary> </summary>
internal BitInput() => InBuf = new byte[MAX_SIZE];
internal BitInput() => InBuf = _privateBuffer;
internal byte[] InBuf { get; }
@@ -87,4 +92,14 @@ internal class BitInput
/// <returns> true if an Oververflow would occur
/// </returns>
internal bool Overflow(int IncPtr) => (inAddr + IncPtr >= MAX_SIZE);
public virtual void Dispose()
{
if (_disposed)
{
return;
}
ArrayPool<byte>.Shared.Return(_privateBuffer);
_disposed = true;
}
}

View File

@@ -1,6 +1,5 @@
#nullable disable
using System;
using System.Buffers;
using System.Buffers.Binary;
using System.Collections.Generic;
@@ -16,7 +15,9 @@ internal sealed class RarVM : BitInput
// Mem.set_Renamed(offset + 3, Byte.valueOf((sbyte) ((Utility.URShift(value_Renamed, 24)) & 0xff)));
//}
internal byte[] Mem { get; private set; }
internal byte[] Mem => _memory.NotNull();
private byte[]? _memory = ArrayPool<byte>.Shared.Rent(VM_MEMSIZE + 4);
public const int VM_MEMSIZE = 0x40000;
@@ -40,11 +41,18 @@ internal sealed class RarVM : BitInput
private int IP;
internal RarVM() =>
//InitBlock();
Mem = null;
internal RarVM() { }
internal void init() => Mem ??= new byte[VM_MEMSIZE + 4];
public override void Dispose()
{
base.Dispose();
if (_memory is null)
{
return;
}
ArrayPool<byte>.Shared.Return(_memory);
_memory = null;
}
private bool IsVMMem(byte[] mem) => Mem == mem;
@@ -776,9 +784,10 @@ internal sealed class RarVM : BitInput
}
}
public void prepare(ReadOnlySpan<byte> code, int codeSize, VMPreparedProgram prg)
public void prepare(ReadOnlySpan<byte> code, VMPreparedProgram prg)
{
InitBitInput();
var codeSize = code.Length;
var cpLength = Math.Min(MAX_SIZE, codeSize);
// memcpy(inBuf,Code,Min(CodeSize,BitInput::MAX_SIZE));
@@ -795,7 +804,7 @@ internal sealed class RarVM : BitInput
prg.CommandCount = 0;
if (xorSum == code[0])
{
var filterType = IsStandardFilter(code, codeSize);
var filterType = IsStandardFilter(code);
if (filterType != VMStandardFilters.VMSF_NONE)
{
var curCmd = new VMPreparedCommand();
@@ -1105,7 +1114,7 @@ internal sealed class RarVM : BitInput
}
}
private VMStandardFilters IsStandardFilter(ReadOnlySpan<byte> code, int codeSize)
private VMStandardFilters IsStandardFilter(ReadOnlySpan<byte> code)
{
VMStandardFilterSignature[] stdList =
{
@@ -1130,6 +1139,7 @@ internal sealed class RarVM : BitInput
private void ExecuteStandardFilter(VMStandardFilters filterType)
{
var mem = Mem;
switch (filterType)
{
case VMStandardFilters.VMSF_E8:
@@ -1148,7 +1158,7 @@ internal sealed class RarVM : BitInput
);
for (var curPos = 0; curPos < dataSize - 4; )
{
var curByte = Mem[curPos++];
var curByte = mem[curPos++];
if (curByte == 0xe8 || curByte == cmpByte2)
{
// #ifdef PRESENT_INT32
@@ -1164,19 +1174,19 @@ internal sealed class RarVM : BitInput
// SET_VALUE(false,Data,Addr-Offset);
// #else
var offset = curPos + fileOffset;
long Addr = GetValue(false, Mem, curPos);
long Addr = GetValue(false, mem, curPos);
if ((Addr & unchecked((int)0x80000000)) != 0)
{
if (((Addr + offset) & unchecked((int)0x80000000)) == 0)
{
SetValue(false, Mem, curPos, (int)Addr + fileSize);
SetValue(false, mem, curPos, (int)Addr + fileSize);
}
}
else
{
if (((Addr - fileSize) & unchecked((int)0x80000000)) != 0)
{
SetValue(false, Mem, curPos, (int)(Addr - offset));
SetValue(false, mem, curPos, (int)(Addr - offset));
}
}
@@ -1204,7 +1214,7 @@ internal sealed class RarVM : BitInput
while (curPos < dataSize - 21)
{
var Byte = (Mem[curPos] & 0x1f) - 0x10;
var Byte = (mem[curPos] & 0x1f) - 0x10;
if (Byte >= 0)
{
var cmdMask = Masks[Byte];
@@ -1250,7 +1260,7 @@ internal sealed class RarVM : BitInput
var channels = R[0] & unchecked((int)0xFFffFFff);
var srcPos = 0;
var border = (dataSize * 2) & unchecked((int)0xFFffFFff);
SetValue(false, Mem, VM_GLOBALMEMADDR + 0x20, dataSize);
SetValue(false, mem, VM_GLOBALMEMADDR + 0x20, dataSize);
if (dataSize >= VM_GLOBALMEMADDR / 2)
{
break;
@@ -1268,7 +1278,7 @@ internal sealed class RarVM : BitInput
destPos += channels
)
{
Mem[destPos] = (PrevByte = (byte)(PrevByte - Mem[srcPos++]));
mem[destPos] = (PrevByte = (byte)(PrevByte - mem[srcPos++]));
}
}
}
@@ -1283,7 +1293,7 @@ internal sealed class RarVM : BitInput
var channels = 3;
var srcPos = 0;
var destDataPos = dataSize;
SetValue(false, Mem, VM_GLOBALMEMADDR + 0x20, dataSize);
SetValue(false, mem, VM_GLOBALMEMADDR + 0x20, dataSize);
if (dataSize >= VM_GLOBALMEMADDR / 2 || posR < 0)
{
break;
@@ -1299,8 +1309,8 @@ internal sealed class RarVM : BitInput
if (upperPos >= 3)
{
var upperDataPos = destDataPos + upperPos;
var upperByte = Mem[upperDataPos] & 0xff;
var upperLeftByte = Mem[upperDataPos - 3] & 0xff;
var upperByte = mem[upperDataPos] & 0xff;
var upperLeftByte = mem[upperDataPos - 3] & 0xff;
predicted = prevByte + upperByte - upperLeftByte;
var pa = Math.Abs((int)(predicted - prevByte));
var pb = Math.Abs((int)(predicted - upperByte));
@@ -1326,15 +1336,15 @@ internal sealed class RarVM : BitInput
predicted = prevByte;
}
prevByte = ((predicted - Mem[srcPos++]) & 0xff) & 0xff;
Mem[destDataPos + i] = (byte)(prevByte & 0xff);
prevByte = ((predicted - mem[srcPos++]) & 0xff) & 0xff;
mem[destDataPos + i] = (byte)(prevByte & 0xff);
}
}
for (int i = posR, border = dataSize - 2; i < border; i += 3)
{
var G = Mem[destDataPos + i + 1];
Mem[destDataPos + i] = (byte)(Mem[destDataPos + i] + G);
Mem[destDataPos + i + 2] = (byte)(Mem[destDataPos + i + 2] + G);
var G = mem[destDataPos + i + 1];
mem[destDataPos + i] = (byte)(mem[destDataPos + i] + G);
mem[destDataPos + i + 2] = (byte)(mem[destDataPos + i + 2] + G);
}
}
break;
@@ -1347,7 +1357,7 @@ internal sealed class RarVM : BitInput
var destDataPos = dataSize;
//byte *SrcData=Mem,*DestData=SrcData+DataSize;
SetValue(false, Mem, VM_GLOBALMEMADDR + 0x20, dataSize);
SetValue(false, mem, VM_GLOBALMEMADDR + 0x20, dataSize);
if (dataSize >= VM_GLOBALMEMADDR / 2)
{
break;
@@ -1377,10 +1387,10 @@ internal sealed class RarVM : BitInput
var predicted = (8 * prevByte) + (K1 * D1) + (K2 * D2) + (K3 * D3);
predicted = Utility.URShift(predicted, 3) & 0xff;
long curByte = Mem[srcPos++];
long curByte = mem[srcPos++];
predicted -= curByte;
Mem[destDataPos + i] = (byte)predicted;
mem[destDataPos + i] = (byte)predicted;
prevDelta = (byte)(predicted - prevByte);
//fix java byte
@@ -1480,15 +1490,15 @@ internal sealed class RarVM : BitInput
}
while (srcPos < dataSize)
{
var curByte = Mem[srcPos++];
if (curByte == 2 && (curByte = Mem[srcPos++]) != 2)
var curByte = mem[srcPos++];
if (curByte == 2 && (curByte = mem[srcPos++]) != 2)
{
curByte = (byte)(curByte - 32);
}
Mem[destPos++] = curByte;
mem[destPos++] = curByte;
}
SetValue(false, Mem, VM_GLOBALMEMADDR + 0x1c, destPos - dataSize);
SetValue(false, Mem, VM_GLOBALMEMADDR + 0x20, dataSize);
SetValue(false, mem, VM_GLOBALMEMADDR + 0x1c, destPos - dataSize);
SetValue(false, mem, VM_GLOBALMEMADDR + 0x20, dataSize);
}
break;
}
@@ -1528,15 +1538,14 @@ internal sealed class RarVM : BitInput
{
if (pos < VM_MEMSIZE)
{
//&& data!=Mem+Pos)
//memmove(Mem+Pos,Data,Min(DataSize,VM_MEMSIZE-Pos));
for (var i = 0; i < Math.Min(data.Length - offset, dataSize); i++)
// Use Array.Copy for fast bulk memory operations instead of byte-by-byte loop
// Calculate how much data can actually fit in VM memory
int copyLength = Math.Min(dataSize, VM_MEMSIZE - pos);
copyLength = Math.Min(copyLength, data.Length - offset);
if (copyLength > 0)
{
if ((VM_MEMSIZE - pos) < i)
{
break;
}
Mem[pos + i] = data[offset + i];
Array.Copy(data, offset, Mem, pos, copyLength);
}
}
}

View File

@@ -22,9 +22,7 @@ public class XZFooter
public static XZFooter FromStream(Stream stream)
{
var footer = new XZFooter(
new BinaryReader(SharpCompressStream.Create(stream, leaveOpen: true), Encoding.UTF8)
);
var footer = new XZFooter(new BinaryReader(stream, Encoding.UTF8, true));
footer.Process();
return footer;
}

View File

@@ -18,9 +18,7 @@ public class XZHeader
public static XZHeader FromStream(Stream stream)
{
var header = new XZHeader(
new BinaryReader(SharpCompressStream.Create(stream, leaveOpen: true), Encoding.UTF8)
);
var header = new XZHeader(new BinaryReader(stream, Encoding.UTF8, true));
header.Process();
return header;
}

View File

@@ -32,7 +32,7 @@ public class XZIndex
public static XZIndex FromStream(Stream stream, bool indexMarkerAlreadyVerified)
{
var index = new XZIndex(
new BinaryReader(SharpCompressStream.Create(stream, leaveOpen: true), Encoding.UTF8),
new BinaryReader(stream, Encoding.UTF8, true),
indexMarkerAlreadyVerified
);
index.Process();

View File

@@ -34,7 +34,7 @@ public abstract class Factory : IFactory
/// <exception cref="ArgumentNullException"><paramref name="factory"/> must not be null.</exception>
public static void RegisterFactory(Factory factory)
{
factory.CheckNotNull(nameof(factory));
factory.NotNull(nameof(factory));
_factories.Add(factory);
}

View File

@@ -1,4 +1,5 @@
using System;
using System.Buffers;
using System.Diagnostics;
using System.IO;
using System.Text;
@@ -44,7 +45,11 @@ public class SharpCompressStream : Stream, IStreamStack
_bufferingEnabled = _bufferSize > 0;
if (_bufferingEnabled)
{
_buffer = new byte[_bufferSize];
if (_buffer is not null)
{
ArrayPool<byte>.Shared.Return(_buffer);
}
_buffer = ArrayPool<byte>.Shared.Rent(_bufferSize);
_bufferPosition = 0;
_bufferedLength = 0;
if (_bufferingEnabled)
@@ -173,6 +178,11 @@ public class SharpCompressStream : Stream, IStreamStack
if (disposing)
{
Stream.Dispose();
if (_buffer != null)
{
ArrayPool<byte>.Shared.Return(_buffer);
_buffer = null;
}
}
}

View File

@@ -4,7 +4,7 @@ using System;
using System.Collections;
using System.Collections.Generic;
namespace SharpCompress.Helpers;
namespace SharpCompress;
internal sealed class LazyReadOnlyCollection<T> : ICollection<T>
{

View File

@@ -4,20 +4,19 @@ using System.Diagnostics.CodeAnalysis;
using System.Linq;
using System.Runtime.CompilerServices;
namespace SharpCompress.Helpers;
namespace SharpCompress;
internal static class NotNullExtensions
{
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static IEnumerable<T> Empty<T>(this IEnumerable<T>? source) =>
source ?? Enumerable.Empty<T>();
public static IEnumerable<T> Empty<T>(this IEnumerable<T>? source) => source ?? [];
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static IEnumerable<T> Empty<T>(this T? source)
{
if (source is null)
{
return Enumerable.Empty<T>();
return [];
}
return source.AsEnumerable();
}
@@ -68,4 +67,15 @@ internal static class NotNullExtensions
return obj.Value;
}
#endif
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static string NotNullOrEmpty(this string obj, string name)
{
obj.NotNull(name);
if (obj.Length == 0)
{
throw new ArgumentException("String is empty.", name);
}
return obj;
}
}

View File

@@ -24,7 +24,7 @@ namespace SharpCompress.Readers.Arc
/// <returns></returns>
public static ArcReader Open(Stream stream, ReaderOptions? options = null)
{
stream.CheckNotNull(nameof(stream));
stream.NotNull(nameof(stream));
return new ArcReader(stream, options ?? new ReaderOptions());
}

View File

@@ -22,7 +22,7 @@ public class GZipReader : AbstractReader<GZipEntry, GZipVolume>
/// <returns></returns>
public static GZipReader Open(Stream stream, ReaderOptions? options = null)
{
stream.CheckNotNull(nameof(stream));
stream.NotNull(nameof(stream));
return new GZipReader(stream, options ?? new ReaderOptions());
}

View File

@@ -40,6 +40,29 @@ public abstract class RarReader : AbstractReader<RarReaderEntry, RarVolume>
public override RarVolume? Volume => volume;
public static RarReader Open(string filePath, ReaderOptions? options = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), options);
}
public static RarReader Open(FileInfo fileInfo, ReaderOptions? options = null)
{
options ??= new ReaderOptions { LeaveStreamOpen = false };
return Open(fileInfo.OpenRead(), options);
}
public static RarReader Open(IEnumerable<string> filePaths, ReaderOptions? options = null)
{
return Open(filePaths.Select(x => new FileInfo(x)), options);
}
public static RarReader Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? options = null)
{
options ??= new ReaderOptions { LeaveStreamOpen = false };
return Open(fileInfos.Select(x => x.OpenRead()), options);
}
/// <summary>
/// Opens a RarReader for Non-seeking usage with a single volume
/// </summary>
@@ -48,7 +71,7 @@ public abstract class RarReader : AbstractReader<RarReaderEntry, RarVolume>
/// <returns></returns>
public static RarReader Open(Stream stream, ReaderOptions? options = null)
{
stream.CheckNotNull(nameof(stream));
stream.NotNull(nameof(stream));
return new SingleVolumeRarReader(stream, options ?? new ReaderOptions());
}
@@ -60,7 +83,7 @@ public abstract class RarReader : AbstractReader<RarReaderEntry, RarVolume>
/// <returns></returns>
public static RarReader Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
{
streams.CheckNotNull(nameof(streams));
streams.NotNull(nameof(streams));
return new MultiVolumeRarReader(streams, options ?? new ReaderOptions());
}

View File

@@ -9,6 +9,18 @@ namespace SharpCompress.Readers;
public static class ReaderFactory
{
public static IReader Open(string filePath, ReaderOptions? options = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), options);
}
public static IReader Open(FileInfo fileInfo, ReaderOptions? options = null)
{
options ??= new ReaderOptions { LeaveStreamOpen = false };
return Open(fileInfo.OpenRead(), options);
}
/// <summary>
/// Opens a Reader for Non-seeking usage
/// </summary>
@@ -17,7 +29,7 @@ public static class ReaderFactory
/// <returns></returns>
public static IReader Open(Stream stream, ReaderOptions? options = null)
{
stream.CheckNotNull(nameof(stream));
stream.NotNull(nameof(stream));
options ??= new ReaderOptions() { LeaveStreamOpen = false };
var bStream = new SharpCompressStream(stream, bufferSize: options.BufferSize);

View File

@@ -55,7 +55,7 @@ public class TarReader : AbstractReader<TarEntry, TarVolume>
/// <returns></returns>
public static TarReader Open(Stream stream, ReaderOptions? options = null)
{
stream.CheckNotNull(nameof(stream));
stream.NotNull(nameof(stream));
options = options ?? new ReaderOptions();
var rewindableStream = new SharpCompressStream(stream);

View File

@@ -44,7 +44,7 @@ public class ZipReader : AbstractReader<ZipEntry, ZipVolume>
/// <returns></returns>
public static ZipReader Open(Stream stream, ReaderOptions? options = null)
{
stream.CheckNotNull(nameof(stream));
stream.NotNull(nameof(stream));
return new ZipReader(stream, options ?? new ReaderOptions());
}
@@ -54,7 +54,7 @@ public class ZipReader : AbstractReader<ZipEntry, ZipVolume>
IEnumerable<ZipEntry> entries
)
{
stream.CheckNotNull(nameof(stream));
stream.NotNull(nameof(stream));
return new ZipReader(stream, options ?? new ReaderOptions(), entries);
}

View File

@@ -35,9 +35,14 @@
<DefineConstants>$(DefineConstants);DEBUG_STREAMS</DefineConstants>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="System.Buffers" />
<PackageReference Include="ZstdSharp.Port" />
<PackageReference Include="Microsoft.SourceLink.GitHub" PrivateAssets="All" />
</ItemGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'net48' Or '$(TargetFramework)' == 'net481' ">
<PackageReference Include="Microsoft.Bcl.AsyncInterfaces" />
<PackageReference Include="System.Text.Encoding.CodePages" />
<PackageReference Include="System.Buffers" />
<PackageReference Include="System.Memory" />
<PackageReference Include="Microsoft.NETFramework.ReferenceAssemblies" PrivateAssets="All" />
</ItemGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'netstandard2.0' ">

View File

@@ -1,4 +1,3 @@
global using SharpCompress.Helpers;
using System;
using System.Buffers;
using System.Collections.Generic;
@@ -7,10 +6,14 @@ using System.IO;
using System.Text;
using SharpCompress.Readers;
namespace SharpCompress.Helpers;
namespace SharpCompress;
internal static class Utility
{
//80kb is a good industry standard temporary buffer size
private const int TEMP_BUFFER_SIZE = 81920;
private static readonly HashSet<char> invalidChars = new(Path.GetInvalidFileNameChars());
public static ReadOnlyCollection<T> ToReadOnly<T>(this IList<T> items) => new(items);
/// <summary>
@@ -19,14 +22,7 @@ internal static class Utility
/// <param name="number">Number to operate on</param>
/// <param name="bits">Amount of bits to shift</param>
/// <returns>The resulting number from the shift operation</returns>
public static int URShift(int number, int bits)
{
if (number >= 0)
{
return number >> bits;
}
return (number >> bits) + (2 << ~bits);
}
public static int URShift(int number, int bits) => (int)((uint)number >> bits);
/// <summary>
/// Performs an unsigned bitwise right shift with the specified number
@@ -34,14 +30,7 @@ internal static class Utility
/// <param name="number">Number to operate on</param>
/// <param name="bits">Amount of bits to shift</param>
/// <returns>The resulting number from the shift operation</returns>
public static long URShift(long number, int bits)
{
if (number >= 0)
{
return number >> bits;
}
return (number >> bits) + (2L << ~bits);
}
public static long URShift(long number, int bits) => (long)((ulong)number >> bits);
public static void SetSize(this List<byte> list, int count)
{
@@ -68,60 +57,11 @@ internal static class Utility
}
}
public static void Copy(
Array sourceArray,
long sourceIndex,
Array destinationArray,
long destinationIndex,
long length
)
{
if (sourceIndex > int.MaxValue || sourceIndex < int.MinValue)
{
throw new ArgumentOutOfRangeException(nameof(sourceIndex));
}
if (destinationIndex > int.MaxValue || destinationIndex < int.MinValue)
{
throw new ArgumentOutOfRangeException(nameof(destinationIndex));
}
if (length > int.MaxValue || length < int.MinValue)
{
throw new ArgumentOutOfRangeException(nameof(length));
}
Array.Copy(
sourceArray,
(int)sourceIndex,
destinationArray,
(int)destinationIndex,
(int)length
);
}
public static IEnumerable<T> AsEnumerable<T>(this T item)
{
yield return item;
}
public static void CheckNotNull(this object obj, string name)
{
if (obj is null)
{
throw new ArgumentNullException(name);
}
}
public static void CheckNotNullOrEmpty(this string obj, string name)
{
obj.CheckNotNull(name);
if (obj.Length == 0)
{
throw new ArgumentException("String is empty.", name);
}
}
public static void Skip(this Stream source, long advanceAmount)
{
if (source.CanSeek)
@@ -130,79 +70,23 @@ internal static class Utility
return;
}
var buffer = GetTransferByteArray();
try
using var buffer = MemoryPool<byte>.Shared.Rent(TEMP_BUFFER_SIZE);
while (advanceAmount > 0)
{
var read = 0;
var readCount = 0;
do
var toRead = (int)Math.Min(buffer.Memory.Length, advanceAmount);
var read = source.Read(buffer.Memory.Slice(0, toRead).Span);
if (read <= 0)
{
readCount = buffer.Length;
if (readCount > advanceAmount)
{
readCount = (int)advanceAmount;
}
read = source.Read(buffer, 0, readCount);
if (read <= 0)
{
break;
}
advanceAmount -= read;
if (advanceAmount == 0)
{
break;
}
} while (true);
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
break;
}
advanceAmount -= read;
}
}
public static void Skip(this Stream source)
{
var buffer = GetTransferByteArray();
try
{
do { } while (source.Read(buffer, 0, buffer.Length) == buffer.Length);
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
}
}
public static bool Find(this Stream source, byte[] array)
{
var buffer = GetTransferByteArray();
try
{
var count = 0;
var len = source.Read(buffer, 0, buffer.Length);
do
{
for (var i = 0; i < len; i++)
{
if (array[count] == buffer[i])
{
count++;
if (count == array.Length)
{
source.Position = source.Position - len + i - array.Length + 1;
return true;
}
}
}
} while ((len = source.Read(buffer, 0, buffer.Length)) > 0);
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
}
return false;
using var buffer = MemoryPool<byte>.Shared.Rent(TEMP_BUFFER_SIZE);
while (source.Read(buffer.Memory.Span) > 0) { }
}
public static DateTime DosDateToDateTime(ushort iDate, ushort iTime)
@@ -271,31 +155,12 @@ internal static class Utility
return sTime.AddSeconds(unixtime);
}
public static long TransferTo(this Stream source, Stream destination)
{
var array = GetTransferByteArray();
try
{
long total = 0;
while (ReadTransferBlock(source, array, out var count))
{
destination.Write(array, 0, count);
total += count;
}
return total;
}
finally
{
ArrayPool<byte>.Shared.Return(array);
}
}
public static long TransferTo(this Stream source, Stream destination, long maxLength)
{
var array = GetTransferByteArray();
var maxReadSize = array.Length;
var array = ArrayPool<byte>.Shared.Rent(TEMP_BUFFER_SIZE);
try
{
var maxReadSize = array.Length;
long total = 0;
var remaining = maxLength;
if (remaining < maxReadSize)
@@ -331,12 +196,13 @@ internal static class Utility
IReaderExtractionListener readerExtractionListener
)
{
var array = GetTransferByteArray();
var array = ArrayPool<byte>.Shared.Rent(TEMP_BUFFER_SIZE);
try
{
var iterations = 0;
long total = 0;
while (ReadTransferBlock(source, array, out var count))
int count;
while ((count = source.Read(array, 0, array.Length)) != 0)
{
total += count;
destination.Write(array, 0, count);
@@ -351,12 +217,10 @@ internal static class Utility
}
}
private static bool ReadTransferBlock(Stream source, byte[] array, out int count) =>
(count = source.Read(array, 0, array.Length)) != 0;
private static bool ReadTransferBlock(Stream source, byte[] array, int size, out int count)
private static bool ReadTransferBlock(Stream source, byte[] array, int maxSize, out int count)
{
if (size > array.Length)
var size = maxSize;
if (maxSize > array.Length)
{
size = array.Length;
}
@@ -364,8 +228,34 @@ internal static class Utility
return count != 0;
}
private static byte[] GetTransferByteArray() => ArrayPool<byte>.Shared.Rent(81920);
#if NET60_OR_GREATER
public static bool ReadFully(this Stream stream, byte[] buffer)
{
try
{
stream.ReadExactly(buffer);
return true;
}
catch (EndOfStreamException)
{
return false;
}
}
public static bool ReadFully(this Stream stream, Span<byte> buffer)
{
try
{
stream.ReadExactly(buffer);
return true;
}
catch (EndOfStreamException)
{
return false;
}
}
#else
public static bool ReadFully(this Stream stream, byte[] buffer)
{
var total = 0;
@@ -395,6 +285,7 @@ internal static class Utility
}
return (total >= buffer.Length);
}
#endif
public static string TrimNulls(this string source) => source.Replace('\0', ' ').Trim();
@@ -439,7 +330,6 @@ internal static class Utility
public static string ReplaceInvalidFileNameChars(string fileName)
{
var invalidChars = new HashSet<char>(Path.GetInvalidFileNameChars());
var sb = new StringBuilder(fileName.Length);
foreach (var c in fileName)
{

View File

@@ -47,7 +47,7 @@ public sealed class GZipWriter : AbstractWriter
var stream = (GZipStream)OutputStream;
stream.FileName = filename;
stream.LastModified = modificationTime;
source.TransferTo(stream);
source.CopyTo(stream);
_wroteToStream = true;
}
}

View File

@@ -83,7 +83,7 @@ public class ZipWriter : AbstractWriter
public void Write(string entryPath, Stream source, ZipWriterEntryOptions zipWriterEntryOptions)
{
using var output = WriteToStream(entryPath, zipWriterEntryOptions);
source.TransferTo(output);
source.CopyTo(output);
}
public Stream WriteToStream(string entryPath, ZipWriterEntryOptions options)

View File

@@ -2,6 +2,15 @@
"version": 2,
"dependencies": {
".NETFramework,Version=v4.8": {
"Microsoft.Bcl.AsyncInterfaces": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "3WA9q9yVqJp222P3x1wYIGDAkpjAku0TMUaaQV22g6L67AI0LdOIrVS7Ht2vJfLHGSPVuqN94vIr15qn+HEkHw==",
"dependencies": {
"System.Threading.Tasks.Extensions": "4.5.4"
}
},
"Microsoft.NETFramework.ReferenceAssemblies": {
"type": "Direct",
"requested": "[1.0.3, )",
@@ -23,9 +32,30 @@
},
"System.Buffers": {
"type": "Direct",
"requested": "[4.6.0, )",
"resolved": "4.6.0",
"contentHash": "lN6tZi7Q46zFzAbRYXTIvfXcyvQQgxnY7Xm6C6xQ9784dEL1amjM6S6Iw4ZpsvesAKnRVsM4scrDQaDqSClkjA=="
"requested": "[4.6.1, )",
"resolved": "4.6.1",
"contentHash": "N8GXpmiLMtljq7gwvyS+1QvKT/W2J8sNAvx+HVg4NGmsG/H+2k/y9QI23auLJRterrzCiDH+IWAw4V/GPwsMlw=="
},
"System.Memory": {
"type": "Direct",
"requested": "[4.6.3, )",
"resolved": "4.6.3",
"contentHash": "qdcDOgnFZY40+Q9876JUHnlHu7bosOHX8XISRoH94fwk6hgaeQGSgfZd8srWRZNt5bV9ZW2TljcegDNxsf+96A==",
"dependencies": {
"System.Buffers": "4.6.1",
"System.Numerics.Vectors": "4.6.1",
"System.Runtime.CompilerServices.Unsafe": "6.1.2"
}
},
"System.Text.Encoding.CodePages": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "OZIsVplFGaVY90G2SbpgU7EnCoOO5pw1t4ic21dBF3/1omrJFpAGoNAVpPyMVOC90/hvgkGG3VFqR13YgZMQfg==",
"dependencies": {
"System.Memory": "4.5.5",
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
}
},
"ZstdSharp.Port": {
"type": "Direct",
@@ -55,13 +85,13 @@
},
"System.Numerics.Vectors": {
"type": "Transitive",
"resolved": "4.5.0",
"contentHash": "QQTlPTl06J/iiDbJCiepZ4H//BVraReU4O4EoRw1U02H5TLUIT7xn3GnDp9AXPSlJUDyFs4uWjWafNX6WrAojQ=="
"resolved": "4.6.1",
"contentHash": "sQxefTnhagrhoq2ReR0D/6K0zJcr9Hrd6kikeXsA1I8kOCboTavcUC4r7TSfpKFeE163uMuxZcyfO1mGO3EN8Q=="
},
"System.Runtime.CompilerServices.Unsafe": {
"type": "Transitive",
"resolved": "6.0.0",
"contentHash": "/iUeP3tq1S0XdNNoMz5C9twLSrM/TH+qElHkXWaPvuNOt+99G75NrV0OS2EqHx5wMN7popYjpc8oTjC1y16DLg=="
"resolved": "6.1.2",
"contentHash": "2hBr6zdbIBTDE3EhK7NSVNdX58uTK6iHW/P/Axmm9sl1xoGSLqDvMtpecn226TNwHByFokYwJmt/aQQNlO5CRw=="
},
"System.Threading.Tasks.Extensions": {
"type": "Transitive",
@@ -70,29 +100,18 @@
"dependencies": {
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
}
},
}
},
".NETFramework,Version=v4.8.1": {
"Microsoft.Bcl.AsyncInterfaces": {
"type": "CentralTransitive",
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "5.0.0",
"contentHash": "W8DPQjkMScOMTtJbPwmPyj9c3zYSFGawDW3jwlBOOsnY+EzZFLgNQ/UMkK35JmkNOVPdCyPr2Tw7Vv9N+KA3ZQ==",
"resolved": "8.0.0",
"contentHash": "3WA9q9yVqJp222P3x1wYIGDAkpjAku0TMUaaQV22g6L67AI0LdOIrVS7Ht2vJfLHGSPVuqN94vIr15qn+HEkHw==",
"dependencies": {
"System.Threading.Tasks.Extensions": "4.5.4"
}
},
"System.Memory": {
"type": "CentralTransitive",
"requested": "[4.6.0, )",
"resolved": "4.5.5",
"contentHash": "XIWiDvKPXaTveaB7HVganDlOCRoj03l+jrwNvcge/t8vhGYKvqV+dMv6G4SAX2NoNmN0wZfVPTAlFwZcZvVOUw==",
"dependencies": {
"System.Buffers": "4.5.1",
"System.Numerics.Vectors": "4.5.0",
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
}
}
},
".NETFramework,Version=v4.8.1": {
"Microsoft.NETFramework.ReferenceAssemblies": {
"type": "Direct",
"requested": "[1.0.3, )",
@@ -114,9 +133,30 @@
},
"System.Buffers": {
"type": "Direct",
"requested": "[4.6.0, )",
"resolved": "4.6.0",
"contentHash": "lN6tZi7Q46zFzAbRYXTIvfXcyvQQgxnY7Xm6C6xQ9784dEL1amjM6S6Iw4ZpsvesAKnRVsM4scrDQaDqSClkjA=="
"requested": "[4.6.1, )",
"resolved": "4.6.1",
"contentHash": "N8GXpmiLMtljq7gwvyS+1QvKT/W2J8sNAvx+HVg4NGmsG/H+2k/y9QI23auLJRterrzCiDH+IWAw4V/GPwsMlw=="
},
"System.Memory": {
"type": "Direct",
"requested": "[4.6.3, )",
"resolved": "4.6.3",
"contentHash": "qdcDOgnFZY40+Q9876JUHnlHu7bosOHX8XISRoH94fwk6hgaeQGSgfZd8srWRZNt5bV9ZW2TljcegDNxsf+96A==",
"dependencies": {
"System.Buffers": "4.6.1",
"System.Numerics.Vectors": "4.6.1",
"System.Runtime.CompilerServices.Unsafe": "6.1.2"
}
},
"System.Text.Encoding.CodePages": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "OZIsVplFGaVY90G2SbpgU7EnCoOO5pw1t4ic21dBF3/1omrJFpAGoNAVpPyMVOC90/hvgkGG3VFqR13YgZMQfg==",
"dependencies": {
"System.Memory": "4.5.5",
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
}
},
"ZstdSharp.Port": {
"type": "Direct",
@@ -146,13 +186,13 @@
},
"System.Numerics.Vectors": {
"type": "Transitive",
"resolved": "4.5.0",
"contentHash": "QQTlPTl06J/iiDbJCiepZ4H//BVraReU4O4EoRw1U02H5TLUIT7xn3GnDp9AXPSlJUDyFs4uWjWafNX6WrAojQ=="
"resolved": "4.6.1",
"contentHash": "sQxefTnhagrhoq2ReR0D/6K0zJcr9Hrd6kikeXsA1I8kOCboTavcUC4r7TSfpKFeE163uMuxZcyfO1mGO3EN8Q=="
},
"System.Runtime.CompilerServices.Unsafe": {
"type": "Transitive",
"resolved": "6.0.0",
"contentHash": "/iUeP3tq1S0XdNNoMz5C9twLSrM/TH+qElHkXWaPvuNOt+99G75NrV0OS2EqHx5wMN7popYjpc8oTjC1y16DLg=="
"resolved": "6.1.2",
"contentHash": "2hBr6zdbIBTDE3EhK7NSVNdX58uTK6iHW/P/Axmm9sl1xoGSLqDvMtpecn226TNwHByFokYwJmt/aQQNlO5CRw=="
},
"System.Threading.Tasks.Extensions": {
"type": "Transitive",
@@ -161,26 +201,6 @@
"dependencies": {
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
}
},
"Microsoft.Bcl.AsyncInterfaces": {
"type": "CentralTransitive",
"requested": "[8.0.0, )",
"resolved": "5.0.0",
"contentHash": "W8DPQjkMScOMTtJbPwmPyj9c3zYSFGawDW3jwlBOOsnY+EzZFLgNQ/UMkK35JmkNOVPdCyPr2Tw7Vv9N+KA3ZQ==",
"dependencies": {
"System.Threading.Tasks.Extensions": "4.5.4"
}
},
"System.Memory": {
"type": "CentralTransitive",
"requested": "[4.6.0, )",
"resolved": "4.5.5",
"contentHash": "XIWiDvKPXaTveaB7HVganDlOCRoj03l+jrwNvcge/t8vhGYKvqV+dMv6G4SAX2NoNmN0wZfVPTAlFwZcZvVOUw==",
"dependencies": {
"System.Buffers": "4.5.1",
"System.Numerics.Vectors": "4.5.0",
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
}
}
},
".NETStandard,Version=v2.0": {
@@ -193,15 +213,6 @@
"System.Threading.Tasks.Extensions": "4.5.4"
}
},
"Microsoft.NETFramework.ReferenceAssemblies": {
"type": "Direct",
"requested": "[1.0.3, )",
"resolved": "1.0.3",
"contentHash": "vUc9Npcs14QsyOD01tnv/m8sQUnGTGOw1BCmKcv77LBJY7OxhJ+zJF7UD/sCL3lYNFuqmQEVlkfS4Quif6FyYg==",
"dependencies": {
"Microsoft.NETFramework.ReferenceAssemblies.net461": "1.0.3"
}
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[8.0.0, )",
@@ -221,21 +232,15 @@
"Microsoft.NETCore.Platforms": "1.1.0"
}
},
"System.Buffers": {
"type": "Direct",
"requested": "[4.6.0, )",
"resolved": "4.6.0",
"contentHash": "lN6tZi7Q46zFzAbRYXTIvfXcyvQQgxnY7Xm6C6xQ9784dEL1amjM6S6Iw4ZpsvesAKnRVsM4scrDQaDqSClkjA=="
},
"System.Memory": {
"type": "Direct",
"requested": "[4.6.0, )",
"resolved": "4.6.0",
"contentHash": "OEkbBQoklHngJ8UD8ez2AERSk2g+/qpAaSWWCBFbpH727HxDq5ydVkuncBaKcKfwRqXGWx64dS6G1SUScMsitg==",
"requested": "[4.6.3, )",
"resolved": "4.6.3",
"contentHash": "qdcDOgnFZY40+Q9876JUHnlHu7bosOHX8XISRoH94fwk6hgaeQGSgfZd8srWRZNt5bV9ZW2TljcegDNxsf+96A==",
"dependencies": {
"System.Buffers": "4.6.0",
"System.Numerics.Vectors": "4.6.0",
"System.Runtime.CompilerServices.Unsafe": "6.1.0"
"System.Buffers": "4.6.1",
"System.Numerics.Vectors": "4.6.1",
"System.Runtime.CompilerServices.Unsafe": "6.1.2"
}
},
"System.Text.Encoding.CodePages": {
@@ -269,11 +274,6 @@
"resolved": "1.1.0",
"contentHash": "kz0PEW2lhqygehI/d6XsPCQzD7ff7gUJaVGPVETX611eadGsA3A877GdSlU0LRVMCTH/+P3o2iDTak+S08V2+A=="
},
"Microsoft.NETFramework.ReferenceAssemblies.net461": {
"type": "Transitive",
"resolved": "1.0.3",
"contentHash": "AmOJZwCqnOCNp6PPcf9joyogScWLtwy0M1WkqfEQ0M9nYwyDD7EX9ZjscKS5iYnyvteX7kzSKFCKt9I9dXA6mA=="
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "8.0.0",
@@ -281,13 +281,13 @@
},
"System.Numerics.Vectors": {
"type": "Transitive",
"resolved": "4.6.0",
"contentHash": "t+SoieZsRuEyiw/J+qXUbolyO219tKQQI0+2/YI+Qv7YdGValA6WiuokrNKqjrTNsy5ABWU11bdKOzUdheteXg=="
"resolved": "4.6.1",
"contentHash": "sQxefTnhagrhoq2ReR0D/6K0zJcr9Hrd6kikeXsA1I8kOCboTavcUC4r7TSfpKFeE163uMuxZcyfO1mGO3EN8Q=="
},
"System.Runtime.CompilerServices.Unsafe": {
"type": "Transitive",
"resolved": "6.1.0",
"contentHash": "5o/HZxx6RVqYlhKSq8/zronDkALJZUT2Vz0hx43f0gwe8mwlM0y2nYlqdBwLMzr262Bwvpikeb/yEwkAa5PADg=="
"resolved": "6.1.2",
"contentHash": "2hBr6zdbIBTDE3EhK7NSVNdX58uTK6iHW/P/Axmm9sl1xoGSLqDvMtpecn226TNwHByFokYwJmt/aQQNlO5CRw=="
},
"System.Threading.Tasks.Extensions": {
"type": "Transitive",
@@ -296,18 +296,15 @@
"dependencies": {
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
}
},
"System.Buffers": {
"type": "CentralTransitive",
"requested": "[4.6.1, )",
"resolved": "4.6.1",
"contentHash": "N8GXpmiLMtljq7gwvyS+1QvKT/W2J8sNAvx+HVg4NGmsG/H+2k/y9QI23auLJRterrzCiDH+IWAw4V/GPwsMlw=="
}
},
"net6.0": {
"Microsoft.NETFramework.ReferenceAssemblies": {
"type": "Direct",
"requested": "[1.0.3, )",
"resolved": "1.0.3",
"contentHash": "vUc9Npcs14QsyOD01tnv/m8sQUnGTGOw1BCmKcv77LBJY7OxhJ+zJF7UD/sCL3lYNFuqmQEVlkfS4Quif6FyYg==",
"dependencies": {
"Microsoft.NETFramework.ReferenceAssemblies.net461": "1.0.3"
}
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[8.0.0, )",
@@ -318,12 +315,6 @@
"Microsoft.SourceLink.Common": "8.0.0"
}
},
"System.Buffers": {
"type": "Direct",
"requested": "[4.6.0, )",
"resolved": "4.6.0",
"contentHash": "lN6tZi7Q46zFzAbRYXTIvfXcyvQQgxnY7Xm6C6xQ9784dEL1amjM6S6Iw4ZpsvesAKnRVsM4scrDQaDqSClkjA=="
},
"ZstdSharp.Port": {
"type": "Direct",
"requested": "[0.8.6, )",
@@ -335,11 +326,6 @@
"resolved": "8.0.0",
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
},
"Microsoft.NETFramework.ReferenceAssemblies.net461": {
"type": "Transitive",
"resolved": "1.0.3",
"contentHash": "AmOJZwCqnOCNp6PPcf9joyogScWLtwy0M1WkqfEQ0M9nYwyDD7EX9ZjscKS5iYnyvteX7kzSKFCKt9I9dXA6mA=="
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "8.0.0",
@@ -349,18 +335,9 @@
"net8.0": {
"Microsoft.NET.ILLink.Tasks": {
"type": "Direct",
"requested": "[8.0.17, )",
"resolved": "8.0.17",
"contentHash": "x5/y4l8AtshpBOrCZdlE4txw8K3e3s9meBFeZeR3l8hbbku2V7kK6ojhXvrbjg1rk3G+JqL1BI26gtgc1ZrdUw=="
},
"Microsoft.NETFramework.ReferenceAssemblies": {
"type": "Direct",
"requested": "[1.0.3, )",
"resolved": "1.0.3",
"contentHash": "vUc9Npcs14QsyOD01tnv/m8sQUnGTGOw1BCmKcv77LBJY7OxhJ+zJF7UD/sCL3lYNFuqmQEVlkfS4Quif6FyYg==",
"dependencies": {
"Microsoft.NETFramework.ReferenceAssemblies.net461": "1.0.3"
}
"requested": "[8.0.21, )",
"resolved": "8.0.21",
"contentHash": "s8H5PZQs50OcNkaB6Si54+v3GWM7vzs6vxFRMlD3aXsbM+aPCtod62gmK0BYWou9diGzmo56j8cIf/PziijDqQ=="
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
@@ -372,12 +349,6 @@
"Microsoft.SourceLink.Common": "8.0.0"
}
},
"System.Buffers": {
"type": "Direct",
"requested": "[4.6.0, )",
"resolved": "4.6.0",
"contentHash": "lN6tZi7Q46zFzAbRYXTIvfXcyvQQgxnY7Xm6C6xQ9784dEL1amjM6S6Iw4ZpsvesAKnRVsM4scrDQaDqSClkjA=="
},
"ZstdSharp.Port": {
"type": "Direct",
"requested": "[0.8.6, )",
@@ -389,11 +360,6 @@
"resolved": "8.0.0",
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
},
"Microsoft.NETFramework.ReferenceAssemblies.net461": {
"type": "Transitive",
"resolved": "1.0.3",
"contentHash": "AmOJZwCqnOCNp6PPcf9joyogScWLtwy0M1WkqfEQ0M9nYwyDD7EX9ZjscKS5iYnyvteX7kzSKFCKt9I9dXA6mA=="
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "8.0.0",

View File

@@ -0,0 +1,49 @@
using System;
using JetBrains.Profiler.SelfApi;
namespace SharpCompress.Test;
public static class JetbrainsProfiler
{
private sealed class CpuClass : IDisposable
{
public CpuClass(string snapshotPath)
{
DotTrace.Init();
var config2 = new DotTrace.Config();
config2.SaveToDir(snapshotPath);
DotTrace.Attach(config2);
DotTrace.StartCollectingData();
}
public void Dispose()
{
DotTrace.StopCollectingData();
DotTrace.SaveData();
DotTrace.Detach();
}
}
private sealed class MemoryClass : IDisposable
{
public MemoryClass(string snapshotPath)
{
DotMemory.Init();
var config = new DotMemory.Config();
config.UseLogLevelVerbose();
config.SaveToDir(snapshotPath);
DotMemory.Attach(config);
DotMemory.GetSnapshot("Before");
}
public void Dispose()
{
DotMemory.GetSnapshot("After");
DotMemory.Detach();
}
}
public static IDisposable Cpu(string snapshotPath) => new CpuClass(snapshotPath);
public static IDisposable Memory(string snapshotPath) => new MemoryClass(snapshotPath);
}

View File

@@ -0,0 +1,280 @@
using System;
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Performance;
/// <summary>
/// A Stream implementation backed by a List of byte arrays that supports large position values.
/// This allows handling streams larger than typical 32-bit or even standard 64-bit constraints
/// by chunking data into multiple byte array segments.
/// </summary>
public class LargeMemoryStream : Stream
{
private readonly List<byte[]> _chunks;
private readonly int _chunkSize;
private long _position;
private bool _isDisposed;
/// <summary>
/// Initializes a new instance of the LargeMemoryStream class.
/// </summary>
/// <param name="chunkSize">The size of each chunk in the backing byte array list. Defaults to 1MB.</param>
public LargeMemoryStream(int chunkSize = 1024 * 1024)
{
if (chunkSize <= 0)
throw new ArgumentException("Chunk size must be greater than zero.", nameof(chunkSize));
_chunks = new List<byte[]>();
_chunkSize = chunkSize;
_position = 0;
}
public override bool CanRead => true;
public override bool CanSeek => true;
public override bool CanWrite => true;
public override long Length
{
get
{
ThrowIfDisposed();
if (_chunks.Count == 0)
return 0;
long length = (long)(_chunks.Count - 1) * _chunkSize;
length += _chunks[_chunks.Count - 1].Length;
return length;
}
}
public override long Position
{
get
{
ThrowIfDisposed();
return _position;
}
set
{
ThrowIfDisposed();
if (value < 0)
throw new ArgumentOutOfRangeException(
nameof(value),
"Position cannot be negative."
);
_position = value;
}
}
public override void Flush()
{
ThrowIfDisposed();
// No-op for in-memory stream
}
public override int Read(byte[] buffer, int offset, int count)
{
ThrowIfDisposed();
if (buffer == null)
throw new ArgumentNullException(nameof(buffer));
if (offset < 0 || count < 0 || offset + count > buffer.Length)
throw new ArgumentOutOfRangeException();
long length = Length;
if (_position >= length)
return 0;
int bytesToRead = (int)Math.Min(count, length - _position);
int bytesRead = 0;
while (bytesRead < bytesToRead)
{
long chunkIndex = _position / _chunkSize;
int chunkOffset = (int)(_position % _chunkSize);
if (chunkIndex >= _chunks.Count)
break;
byte[] chunk = _chunks[(int)chunkIndex];
int availableInChunk = chunk.Length - chunkOffset;
int bytesToCopyFromChunk = Math.Min(availableInChunk, bytesToRead - bytesRead);
Array.Copy(chunk, chunkOffset, buffer, offset + bytesRead, bytesToCopyFromChunk);
_position += bytesToCopyFromChunk;
bytesRead += bytesToCopyFromChunk;
}
return bytesRead;
}
public override void Write(byte[] buffer, int offset, int count)
{
ThrowIfDisposed();
if (buffer == null)
throw new ArgumentNullException(nameof(buffer));
if (offset < 0 || count < 0 || offset + count > buffer.Length)
throw new ArgumentOutOfRangeException();
int bytesWritten = 0;
while (bytesWritten < count)
{
long chunkIndex = _position / _chunkSize;
int chunkOffset = (int)(_position % _chunkSize);
// Ensure we have enough chunks
while (_chunks.Count <= chunkIndex)
{
_chunks.Add(new byte[_chunkSize]);
}
byte[] chunk = _chunks[(int)chunkIndex];
int availableInChunk = chunk.Length - chunkOffset;
int bytesToCopyToChunk = Math.Min(availableInChunk, count - bytesWritten);
Array.Copy(buffer, offset + bytesWritten, chunk, chunkOffset, bytesToCopyToChunk);
_position += bytesToCopyToChunk;
bytesWritten += bytesToCopyToChunk;
}
}
public override long Seek(long offset, SeekOrigin origin)
{
ThrowIfDisposed();
long newPosition = origin switch
{
SeekOrigin.Begin => offset,
SeekOrigin.Current => _position + offset,
SeekOrigin.End => Length + offset,
_ => throw new ArgumentOutOfRangeException(nameof(origin)),
};
if (newPosition < 0)
throw new ArgumentOutOfRangeException(
nameof(offset),
"Cannot seek before the beginning of the stream."
);
_position = newPosition;
return _position;
}
public override void SetLength(long value)
{
ThrowIfDisposed();
if (value < 0)
throw new ArgumentOutOfRangeException(nameof(value), "Length cannot be negative.");
long currentLength = Length;
if (value < currentLength)
{
// Truncate
long chunkIndex = (value + _chunkSize - 1) / _chunkSize;
if (chunkIndex > 0)
chunkIndex--;
_chunks.RemoveRange((int)(chunkIndex + 1), _chunks.Count - (int)(chunkIndex + 1));
if (chunkIndex < _chunks.Count)
{
int lastChunkSize = (int)(value - chunkIndex * _chunkSize);
var x = _chunks[(int)chunkIndex];
Array.Resize(ref x, lastChunkSize);
}
if (_position > value)
_position = value;
}
else if (value > currentLength)
{
// Extend with zeros
long chunkIndex = currentLength / _chunkSize;
int chunkOffset = (int)(currentLength % _chunkSize);
while ((long)_chunks.Count * _chunkSize < value)
{
_chunks.Add(new byte[_chunkSize]);
}
// Resize the last chunk if needed
if (_chunks.Count > 0)
{
long lastChunkNeededSize = value - (long)(_chunks.Count - 1) * _chunkSize;
if (lastChunkNeededSize < _chunkSize)
{
var x = _chunks[^1];
Array.Resize(ref x, (int)lastChunkNeededSize);
}
}
}
}
/// <summary>
/// Gets the number of chunks in the backing list.
/// </summary>
public int ChunkCount => _chunks.Count;
/// <summary>
/// Gets the size of each chunk in bytes.
/// </summary>
public int ChunkSize => _chunkSize;
/// <summary>
/// Converts the stream contents to a single byte array.
/// This may consume significant memory for large streams.
/// </summary>
public byte[] ToArray()
{
ThrowIfDisposed();
long length = Length;
byte[] result = new byte[length];
long currentPosition = _position;
try
{
_position = 0;
int totalRead = 0;
while (totalRead < length)
{
int bytesToRead = (int)Math.Min(length - totalRead, int.MaxValue);
int bytesRead = Read(result, totalRead, bytesToRead);
if (bytesRead == 0)
break;
totalRead += bytesRead;
}
}
finally
{
_position = currentPosition;
}
return result;
}
private void ThrowIfDisposed()
{
if (_isDisposed)
throw new ObjectDisposedException(GetType().Name);
}
protected override void Dispose(bool disposing)
{
if (!_isDisposed)
{
if (disposing)
{
_chunks.Clear();
}
_isDisposed = true;
}
base.Dispose(disposing);
}
}

View File

@@ -0,0 +1,54 @@
using System;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Performance;
using SharpCompress.Readers;
using SharpCompress.Test;
var index = AppDomain.CurrentDomain.BaseDirectory.IndexOf(
"SharpCompress.Performance",
StringComparison.OrdinalIgnoreCase
);
var path = AppDomain.CurrentDomain.BaseDirectory.Substring(0, index);
var SOLUTION_BASE_PATH = Path.GetDirectoryName(path) ?? throw new ArgumentNullException();
var TEST_ARCHIVES_PATH = Path.Combine(SOLUTION_BASE_PATH, "TestArchives", "Archives");
//using var _ = JetbrainsProfiler.Memory($"/Users/adam/temp/");
using (var __ = JetbrainsProfiler.Cpu($"/Users/adam/temp/"))
{
var testArchives = new[]
{
"Rar.Audio_program.rar",
//"64bitstream.zip.7z",
//"TarWithSymlink.tar.gz"
};
var arcs = testArchives.Select(a => Path.Combine(TEST_ARCHIVES_PATH, a)).ToArray();
for (int i = 0; i < 50; i++)
{
using var found = ArchiveFactory.Open(arcs[0]);
foreach (var entry in found.Entries.Where(entry => !entry.IsDirectory))
{
Console.WriteLine($"Extracting {entry.Key}");
using var entryStream = entry.OpenEntryStream();
entryStream.CopyTo(Stream.Null);
}
/*using var found = ReaderFactory.Open(arcs[0]);
while (found.MoveToNextEntry())
{
var entry = found.Entry;
if (entry.IsDirectory)
continue;
Console.WriteLine($"Extracting {entry.Key}");
found.WriteEntryTo(Stream.Null);
}*/
}
Console.WriteLine("Still running...");
}
await Task.Delay(500);

View File

@@ -0,0 +1,10 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net8.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="JetBrains.Profiler.SelfApi" />
<ProjectReference Include="..\..\src\SharpCompress\SharpCompress.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,50 @@
{
"version": 2,
"dependencies": {
"net8.0": {
"JetBrains.Profiler.SelfApi": {
"type": "Direct",
"requested": "[2.5.14, )",
"resolved": "2.5.14",
"contentHash": "9+NcTe49B2M8/MOledSxKZkQKqavFf5xXZw4JL4bVu/KYiw6OOaD6cDQmNGSO18yUP/WoBXsXGKmZ9VOpmyadw==",
"dependencies": {
"JetBrains.HabitatDetector": "1.4.5",
"JetBrains.Profiler.Api": "1.4.10"
}
},
"JetBrains.FormatRipper": {
"type": "Transitive",
"resolved": "2.4.0",
"contentHash": "k5eGab1DArJH0k94ZO9oxDxg8go1KvR1oPGPzyVvfplEHetgrc2hGZ6Cken8fVsdS/Xp3hMnHd9L5MXb7JJM4A=="
},
"JetBrains.HabitatDetector": {
"type": "Transitive",
"resolved": "1.4.5",
"contentHash": "5kb1G32O8fmlS2QnJLycEnHbq9ukuDUHQll4mqOAPLEE1JEJcz12W6cTt1CMpQY3n/6R0jZAhmBvaJm2zixvow==",
"dependencies": {
"JetBrains.FormatRipper": "2.4.0"
}
},
"JetBrains.Profiler.Api": {
"type": "Transitive",
"resolved": "1.4.10",
"contentHash": "XBynPGDiWB6uWoiVwkki3uUsXqc66lRC1YX8LWYWc579ioJSB5OzZ8KsRK2q+eawj3OxrkeCsgXlb6mwBkCebQ==",
"dependencies": {
"JetBrains.HabitatDetector": "1.4.5"
}
},
"sharpcompress": {
"type": "Project",
"dependencies": {
"ZstdSharp.Port": "[0.8.6, )"
}
},
"ZstdSharp.Port": {
"type": "CentralTransitive",
"requested": "[0.8.6, )",
"resolved": "0.8.6",
"contentHash": "iP4jVLQoQmUjMU88g1WObiNr6YKZGvh4aOXn3yOJsHqZsflwRsxZPcIBvNXgjXO3vQKSLctXGLTpcBPLnWPS8A=="
}
}
}
}

View File

@@ -255,16 +255,10 @@ public class ArchiveTests : ReaderTests
protected void ArchiveExtractToDirectory(
string testArchive,
ReaderOptions? readerOptions = null
) => ArchiveExtractToDirectory(ArchiveFactory.AutoFactory, testArchive, readerOptions);
protected void ArchiveExtractToDirectory(
IArchiveFactory archiveFactory,
string testArchive,
ReaderOptions? readerOptions = null
)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using (var archive = archiveFactory.Open(new FileInfo(testArchive), readerOptions))
using (var archive = ArchiveFactory.Open(new FileInfo(testArchive), readerOptions))
{
archive.ExtractToDirectory(SCRATCH_FILES_PATH);
}
@@ -342,13 +336,12 @@ public class ArchiveTests : ReaderTests
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using var archive = ArchiveFactory.Open(testArchive);
using var reader = archive.ExtractAllEntries();
while (reader.MoveToNextEntry())
foreach (var entry in archive.Entries)
{
if (!reader.Entry.IsDirectory)
if (!entry.IsDirectory)
{
var memory = new MemoryStream();
reader.WriteEntryTo(memory);
entry.WriteTo(memory);
memory.Position = 0;

View File

@@ -223,7 +223,7 @@ public class RarReaderTests : ReaderTests
var destinationFileName = Path.Combine(destdir, file);
using var fs = File.OpenWrite(destinationFileName);
entryStream.TransferTo(fs);
entryStream.CopyTo(fs);
}
}
}
@@ -407,10 +407,16 @@ public class RarReaderTests : ReaderTests
Path.Combine("exe", "test.exe"),
}
);
using var archive = RarArchive.Open(
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part01.rar")
using var reader = RarReader.Open(
[
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part01.rar"),
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part02.rar"),
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part03.rar"),
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part04.rar"),
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part05.rar"),
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part06.rar"),
]
);
using var reader = archive.ExtractAllEntries();
while (reader.MoveToNextEntry())
{
Assert.Equal(expectedOrder.Pop(), reader.Entry.Key);

View File

@@ -9,6 +9,9 @@
<PropertyGroup Condition="'$(Configuration)|$(TargetFramework)|$(Platform)'=='Debug|net8.0|AnyCPU'">
<DefineConstants>$(DefineConstants);DEBUG_STREAMS</DefineConstants>
</PropertyGroup>
<PropertyGroup Condition="$([System.Runtime.InteropServices.RuntimeInformation]::IsOSPlatform($([System.Runtime.InteropServices.OSPlatform]::Windows)))">
<DefineConstants>$(DefineConstants);WINDOWS</DefineConstants>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\..\src\SharpCompress\SharpCompress.csproj" />
</ItemGroup>
@@ -17,7 +20,6 @@
<PackageReference Include="Microsoft.NET.Test.Sdk" />
<PackageReference Include="xunit.runner.visualstudio" />
<PackageReference Include="xunit" />
<PackageReference Include="Xunit.SkippableFact" />
<PackageReference Include="Microsoft.NETFramework.ReferenceAssemblies" PrivateAssets="All" />
</ItemGroup>
<ItemGroup Condition=" '$(VersionlessImplicitFrameworkDefine)' != 'NETFRAMEWORK' ">

View File

@@ -85,7 +85,7 @@ public class TarReaderTests : ReaderTests
var destinationFileName = Path.Combine(destdir, file.NotNull());
using var fs = File.OpenWrite(destinationFileName);
entryStream.TransferTo(fs);
entryStream.CopyTo(fs);
}
}
}

View File

@@ -1,4 +1,3 @@
global using SharpCompress.Helpers;
using System;
using System.Collections.Generic;
using System.IO;

View File

@@ -0,0 +1,754 @@
using System;
using System.Buffers;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using Xunit;
namespace SharpCompress.Test;
public class UtilityTests
{
#region URShift Tests
[Fact]
public void URShift_Int_PositiveNumber_ShiftsCorrectly()
{
var result = Utility.URShift(16, 2);
Assert.Equal(4, result);
}
[Fact]
public void URShift_Int_NegativeNumber_PerformsUnsignedShift()
{
// -1 in binary is all 1s (0xFFFFFFFF), shifted right by 1 should be 0x7FFFFFFF
var result = Utility.URShift(-1, 1);
Assert.Equal(int.MaxValue, result);
}
[Fact]
public void URShift_Int_Zero_ReturnsZero()
{
var result = Utility.URShift(0, 5);
Assert.Equal(0, result);
}
[Fact]
public void URShift_Long_PositiveNumber_ShiftsCorrectly()
{
var result = Utility.URShift(32L, 3);
Assert.Equal(4L, result);
}
[Fact]
public void URShift_Long_NegativeNumber_PerformsUnsignedShift()
{
var result = Utility.URShift(-1L, 1);
Assert.Equal(long.MaxValue, result);
}
[Fact]
public void URShift_Long_Zero_ReturnsZero()
{
var result = Utility.URShift(0L, 10);
Assert.Equal(0L, result);
}
#endregion
#region ReadFully Tests
[Fact]
public void ReadFully_ByteArray_ReadsExactlyRequiredBytes()
{
var data = new byte[] { 1, 2, 3, 4, 5 };
using var stream = new MemoryStream(data);
var buffer = new byte[5];
var result = stream.ReadFully(buffer);
Assert.True(result);
Assert.Equal(data, buffer);
}
[Fact]
public void ReadFully_ByteArray_ReturnsFalseWhenNotEnoughData()
{
var data = new byte[] { 1, 2, 3 };
using var stream = new MemoryStream(data);
var buffer = new byte[5];
var result = stream.ReadFully(buffer);
Assert.False(result);
}
[Fact]
public void ReadFully_ByteArray_EmptyStream_ReturnsFalse()
{
using var stream = new MemoryStream();
var buffer = new byte[5];
var result = stream.ReadFully(buffer);
Assert.False(result);
}
[Fact]
public void ReadFully_ByteArray_EmptyBuffer_ReturnsTrue()
{
var data = new byte[] { 1, 2, 3 };
using var stream = new MemoryStream(data);
var buffer = new byte[0];
var result = stream.ReadFully(buffer);
Assert.True(result);
}
[Fact]
public void ReadFully_Span_ReadsExactlyRequiredBytes()
{
var data = new byte[] { 1, 2, 3, 4, 5 };
using var stream = new MemoryStream(data);
Span<byte> buffer = new byte[5];
var result = stream.ReadFully(buffer);
Assert.True(result);
Assert.Equal(data, buffer.ToArray());
}
[Fact]
public void ReadFully_Span_ReturnsFalseWhenNotEnoughData()
{
var data = new byte[] { 1, 2, 3 };
using var stream = new MemoryStream(data);
Span<byte> buffer = new byte[5];
var result = stream.ReadFully(buffer);
Assert.False(result);
}
[Fact]
public void ReadFully_Span_EmptyStream_ReturnsFalse()
{
using var stream = new MemoryStream();
Span<byte> buffer = new byte[5];
var result = stream.ReadFully(buffer);
Assert.False(result);
}
[Fact]
public void ReadFully_Span_EmptyBuffer_ReturnsTrue()
{
var data = new byte[] { 1, 2, 3 };
using var stream = new MemoryStream(data);
Span<byte> buffer = new byte[0];
var result = stream.ReadFully(buffer);
Assert.True(result);
}
#endregion
#region Skip Tests
[Fact]
public void Skip_SeekableStream_UsesSeek()
{
var data = new byte[] { 1, 2, 3, 4, 5 };
using var stream = new MemoryStream(data);
stream.Skip(3);
Assert.Equal(3, stream.Position);
}
[Fact]
public void Skip_SeekableStream_SkipsCorrectAmount()
{
var data = new byte[] { 1, 2, 3, 4, 5 };
using var stream = new MemoryStream(data);
stream.Skip(2);
var buffer = new byte[2];
stream.Read(buffer);
Assert.Equal(new byte[] { 3, 4 }, buffer);
}
[Fact]
public void Skip_NonSeekableStream_SkipsCorrectAmount()
{
var data = new byte[] { 1, 2, 3, 4, 5 };
using var seekableStream = new MemoryStream(data);
using var nonSeekableStream = new NonSeekableStream(seekableStream);
nonSeekableStream.Skip(2);
var buffer = new byte[2];
nonSeekableStream.Read(buffer);
Assert.Equal(new byte[] { 3, 4 }, buffer);
}
[Fact]
public void Skip_NonSeekableStream_SkipsBeyondStreamEnd()
{
var data = new byte[] { 1, 2, 3 };
using var seekableStream = new MemoryStream(data);
using var nonSeekableStream = new NonSeekableStream(seekableStream);
// Should not throw, just skip what's available
nonSeekableStream.Skip(10);
Assert.Equal(-1, nonSeekableStream.ReadByte());
}
[Fact]
public void Skip_Parameterless_SkipsEntireStream()
{
var data = new byte[] { 1, 2, 3, 4, 5 };
using var stream = new MemoryStream(data);
stream.Skip();
Assert.Equal(-1, stream.ReadByte());
}
[Fact]
public void Skip_Zero_DoesNotMove()
{
var data = new byte[] { 1, 2, 3, 4, 5 };
using var stream = new MemoryStream(data);
stream.Position = 2;
stream.Skip(0);
Assert.Equal(2, stream.Position);
}
#endregion
#region SetSize Tests
[Fact]
public void SetSize_GrowsList_AddsZeroBytes()
{
var list = new List<byte> { 1, 2, 3 };
Utility.SetSize(list, 5);
Assert.Equal(5, list.Count);
Assert.Equal(new byte[] { 1, 2, 3, 0, 0 }, list);
}
[Fact]
public void SetSize_ShrinksListByOne()
{
var list = new List<byte> { 1, 2, 3, 4, 5 };
Utility.SetSize(list, 3);
Assert.Equal(3, list.Count);
Assert.Equal(new byte[] { 1, 2, 3 }, list);
}
[Fact]
public void SetSize_ToZero_ClearsAllItems()
{
var list = new List<byte> { 1, 2, 3 };
Utility.SetSize(list, 0);
Assert.Empty(list);
}
[Fact]
public void SetSize_SameSize_NoChange()
{
var list = new List<byte> { 1, 2, 3 };
Utility.SetSize(list, 3);
Assert.Equal(3, list.Count);
Assert.Equal(new byte[] { 1, 2, 3 }, list);
}
#endregion
#region ForEach Tests
[Fact]
public void ForEach_ExecutesActionForEachItem()
{
var items = new[] { 1, 2, 3, 4, 5 };
var results = new List<int>();
items.ForEach(x => results.Add(x));
Assert.Equal(items, results);
}
[Fact]
public void ForEach_EmptyCollection_NoExecutions()
{
var items = Array.Empty<int>();
var count = 0;
items.ForEach(x => count++);
Assert.Equal(0, count);
}
#endregion
#region AsEnumerable Tests
[Fact]
public void AsEnumerable_SingleItem_YieldsItem()
{
var item = 42;
var result = item.AsEnumerable().ToList();
Assert.Single(result);
Assert.Equal(42, result[0]);
}
[Fact]
public void AsEnumerable_String_YieldsString()
{
var item = "test";
var result = item.AsEnumerable().ToList();
Assert.Single(result);
Assert.Equal("test", result[0]);
}
#endregion
#region DosDateToDateTime Tests
[Fact]
public void DosDateToDateTime_ValidDate_ConvertsCorrectly()
{
// DOS date format: year (7 bits) | month (4 bits) | day (5 bits)
// DOS time format: hour (5 bits) | minute (6 bits) | second (5 bits, in 2-second increments)
// This represents: 2020-01-15 10:30:20 (approximately)
ushort dosDate = (ushort)(((2020 - 1980) << 9) | (1 << 5) | 15); // 2020-01-15
ushort dosTime = (ushort)((10 << 11) | (30 << 5) | 10); // 10:30:20
var result = Utility.DosDateToDateTime(dosDate, dosTime);
Assert.Equal(2020, result.Year);
Assert.Equal(1, result.Month);
Assert.Equal(15, result.Day);
Assert.Equal(10, result.Hour);
Assert.Equal(30, result.Minute);
Assert.Equal(20, result.Second);
}
[Fact]
public void DosDateToDateTime_InvalidDate_DefaultsTo1980_01_01()
{
ushort dosDate = ushort.MaxValue;
ushort dosTime = (ushort)((10 << 11) | (30 << 5) | 10);
var result = Utility.DosDateToDateTime(dosDate, dosTime);
Assert.Equal(1980, result.Year);
Assert.Equal(1, result.Month);
Assert.Equal(1, result.Day);
}
[Fact]
public void DosDateToDateTime_InvalidTime_DefaultsToMidnight()
{
ushort dosDate = (ushort)(((2020 - 1980) << 9) | (1 << 5) | 15);
ushort dosTime = ushort.MaxValue;
var result = Utility.DosDateToDateTime(dosDate, dosTime);
Assert.Equal(0, result.Hour);
Assert.Equal(0, result.Minute);
Assert.Equal(0, result.Second);
}
[Fact]
public void DosDateToDateTime_FromUint_ConvertsCorrectly()
{
ushort dosDate = (ushort)(((2020 - 1980) << 9) | (6 << 5) | 20); // 2020-06-20
ushort dosTime = (ushort)((14 << 11) | (45 << 5) | 15); // 14:45:30
uint combined = (uint)(dosDate << 16) | dosTime;
var result = Utility.DosDateToDateTime(combined);
Assert.Equal(2020, result.Year);
Assert.Equal(6, result.Month);
Assert.Equal(20, result.Day);
Assert.Equal(14, result.Hour);
Assert.Equal(45, result.Minute);
}
#endregion
#region DateTimeToDosTime Tests
[Fact]
public void DateTimeToDosTime_ValidDateTime_ConvertsCorrectly()
{
//always do local time
var dt = new DateTime(2020, 6, 15, 14, 30, 20, DateTimeKind.Local);
var result = Utility.DateTimeToDosTime(dt);
// Verify we can convert back
var reversed = Utility.DosDateToDateTime(result);
Assert.Equal(2020, reversed.Year);
Assert.Equal(6, reversed.Month);
Assert.Equal(15, reversed.Day);
Assert.Equal(14, reversed.Hour);
Assert.Equal(30, reversed.Minute);
// Seconds are rounded down to nearest even number in DOS format
Assert.True(reversed.Second == 20 || reversed.Second == 18);
}
[Fact]
public void DateTimeToDosTime_NullDateTime_ReturnsZero()
{
DateTime? dt = null;
var result = Utility.DateTimeToDosTime(dt);
Assert.Equal(0u, result);
}
#endregion
#region UnixTimeToDateTime Tests
[Fact]
public void UnixTimeToDateTime_Zero_Returns1970_01_01()
{
var result = Utility.UnixTimeToDateTime(0);
Assert.Equal(1970, result.Year);
Assert.Equal(1, result.Month);
Assert.Equal(1, result.Day);
Assert.Equal(0, result.Hour);
Assert.Equal(0, result.Minute);
Assert.Equal(0, result.Second);
}
[Fact]
public void UnixTimeToDateTime_ValidTimestamp_ConvertsCorrectly()
{
// January 1, 2000 00:00:00 UTC is 946684800 seconds after epoch
var result = Utility.UnixTimeToDateTime(946684800);
Assert.Equal(2000, result.Year);
Assert.Equal(1, result.Month);
Assert.Equal(1, result.Day);
}
[Fact]
public void UnixTimeToDateTime_NegativeTimestamp_ReturnsBeforeEpoch()
{
// -86400 is one day before epoch
var result = Utility.UnixTimeToDateTime(-86400);
Assert.Equal(1969, result.Year);
Assert.Equal(12, result.Month);
Assert.Equal(31, result.Day);
}
#endregion
#region TransferTo Tests
[Fact]
public void TransferTo_WithMaxLength_TransfersCorrectAmount()
{
var sourceData = new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 };
using var source = new MemoryStream(sourceData);
using var destination = new MemoryStream();
var transferred = source.TransferTo(destination, 5);
Assert.Equal(5, transferred);
Assert.Equal(new byte[] { 1, 2, 3, 4, 5 }, destination.ToArray());
}
[Fact]
public void TransferTo_SourceSmallerThanMax_TransfersAll()
{
var sourceData = new byte[] { 1, 2, 3 };
using var source = new MemoryStream(sourceData);
using var destination = new MemoryStream();
var transferred = source.TransferTo(destination, 100);
Assert.Equal(3, transferred);
Assert.Equal(sourceData, destination.ToArray());
}
[Fact]
public void TransferTo_EmptySource_TransfersNothing()
{
using var source = new MemoryStream();
using var destination = new MemoryStream();
var transferred = source.TransferTo(destination, 100);
Assert.Equal(0, transferred);
Assert.Empty(destination.ToArray());
}
#endregion
#region SwapUINT32 Tests
[Fact]
public void SwapUINT32_SimpleValue_SwapsEndianness()
{
uint value = 0x12345678;
var result = Utility.SwapUINT32(value);
Assert.Equal(0x78563412u, result);
}
[Fact]
public void SwapUINT32_Zero_ReturnsZero()
{
var result = Utility.SwapUINT32(0);
Assert.Equal(0u, result);
}
[Fact]
public void SwapUINT32_MaxValue_SwapsCorrectly()
{
var result = Utility.SwapUINT32(uint.MaxValue);
Assert.Equal(uint.MaxValue, result);
}
[Fact]
public void SwapUINT32_Involution_SwappingTwiceReturnsOriginal()
{
uint value = 0x12345678;
var result = Utility.SwapUINT32(Utility.SwapUINT32(value));
Assert.Equal(value, result);
}
#endregion
#region SetLittleUInt32 Tests
[Fact]
public void SetLittleUInt32_InsertsValueCorrectly()
{
byte[] buffer = new byte[10];
uint value = 0x12345678;
Utility.SetLittleUInt32(ref buffer, value, 2);
Assert.Equal(0x78, buffer[2]);
Assert.Equal(0x56, buffer[3]);
Assert.Equal(0x34, buffer[4]);
Assert.Equal(0x12, buffer[5]);
}
[Fact]
public void SetLittleUInt32_AtOffset_InsertsBehindOffset()
{
byte[] buffer = new byte[10];
uint value = 0xDEADBEEF;
Utility.SetLittleUInt32(ref buffer, value, 5);
Assert.Equal(0xEF, buffer[5]);
Assert.Equal(0xBE, buffer[6]);
Assert.Equal(0xAD, buffer[7]);
Assert.Equal(0xDE, buffer[8]);
}
#endregion
#region SetBigUInt32 Tests
[Fact]
public void SetBigUInt32_InsertsValueCorrectly()
{
byte[] buffer = new byte[10];
uint value = 0x12345678;
Utility.SetBigUInt32(ref buffer, value, 2);
Assert.Equal(0x12, buffer[2]);
Assert.Equal(0x34, buffer[3]);
Assert.Equal(0x56, buffer[4]);
Assert.Equal(0x78, buffer[5]);
}
[Fact]
public void SetBigUInt32_AtOffset_InsertsBehindOffset()
{
byte[] buffer = new byte[10];
uint value = 0xDEADBEEF;
Utility.SetBigUInt32(ref buffer, value, 5);
Assert.Equal(0xDE, buffer[5]);
Assert.Equal(0xAD, buffer[6]);
Assert.Equal(0xBE, buffer[7]);
Assert.Equal(0xEF, buffer[8]);
}
#endregion
#region ReplaceInvalidFileNameChars Tests
#if WINDOWS
[Theory]
[InlineData("valid_filename.txt", "valid_filename.txt")]
[InlineData("file<name>test.txt", "file_name_test.txt")]
[InlineData("<>:\"|?*", "_______")]
public void ReplaceInvalidFileNameChars_Windows(string fileName, string expected)
{
var result = Utility.ReplaceInvalidFileNameChars(fileName);
Assert.Equal(expected, result);
}
#else
[Theory]
[InlineData("valid_filename.txt", "valid_filename.txt")]
[InlineData("file<name>test.txt", "file<name>test.txt")]
[InlineData("<>:\"|?*", "<>:\"|?*")]
public void ReplaceInvalidFileNameChars_NonWindows(string fileName, string expected)
{
var result = Utility.ReplaceInvalidFileNameChars(fileName);
Assert.Equal(expected, result);
}
#endif
#endregion
#region ToReadOnly Tests
[Fact]
public void ToReadOnly_IList_ReturnsReadOnlyCollection()
{
var list = new List<int> { 1, 2, 3, 4, 5 };
var result = list.ToReadOnly();
Assert.Equal(5, result.Count);
Assert.Equal(1, result[0]);
Assert.Equal(5, result[4]);
}
[Fact]
public void ToReadOnly_EmptyList_ReturnsEmptyReadOnlyCollection()
{
var list = new List<int>();
var result = list.ToReadOnly();
Assert.Empty(result);
}
#endregion
#region TrimNulls Tests
[Fact]
public void TrimNulls_StringWithNulls_ReplacesAndTrims()
{
var input = " hello\0world\0 ";
var result = Utility.TrimNulls(input);
Assert.Equal("hello world", result);
}
[Fact]
public void TrimNulls_StringWithoutNulls_TrimsWhitespace()
{
var input = " hello world ";
var result = Utility.TrimNulls(input);
Assert.Equal("hello world", result);
}
[Fact]
public void TrimNulls_OnlyNulls_ReturnsEmpty()
{
var input = "\0\0\0";
var result = Utility.TrimNulls(input);
Assert.Empty(result);
}
#endregion
}
/// <summary>
/// Helper class for testing non-seekable streams
/// </summary>
internal class NonSeekableStream : Stream
{
private readonly Stream _inner;
public NonSeekableStream(Stream inner)
{
_inner = inner;
}
public override bool CanRead => _inner.CanRead;
public override bool CanSeek => false; // Force non-seekable
public override bool CanWrite => _inner.CanWrite;
public override long Length => _inner.Length;
public override long Position
{
get => _inner.Position;
set => throw new NotSupportedException("Stream is not seekable");
}
public override void Flush() => _inner.Flush();
public override int Read(byte[] buffer, int offset, int count) =>
_inner.Read(buffer, offset, count);
public override long Seek(long offset, SeekOrigin origin) =>
throw new NotSupportedException("Stream is not seekable");
public override void SetLength(long value) =>
throw new NotSupportedException("Stream is not seekable");
public override void Write(byte[] buffer, int offset, int count) =>
_inner.Write(buffer, offset, count);
protected override void Dispose(bool disposing)
{
if (disposing)
{
_inner.Dispose();
}
base.Dispose(disposing);
}
}

View File

@@ -589,12 +589,10 @@ public class ZipArchiveTests : ArchiveTests
}
}
[SkippableFact]
public void Zip_Evil_Throws_Exception()
#if WINDOWS
[Fact]
public void Zip_Evil_Throws_Exception_Windows()
{
//windows only because of the paths
Skip.IfNot(Environment.OSVersion.Platform == PlatformID.Win32NT);
var zipFile = Path.Combine(TEST_ARCHIVES_PATH, "Zip.Evil.zip");
Assert.ThrowsAny<Exception>(() =>
@@ -609,6 +607,7 @@ public class ZipArchiveTests : ArchiveTests
}
});
}
#endif
private class NonSeekableMemoryStream : MemoryStream
{
@@ -734,8 +733,7 @@ public class ZipArchiveTests : ArchiveTests
{
var zipPath = Path.Combine(TEST_ARCHIVES_PATH, "Zip.uncompressed.zip");
using var stream = File.OpenRead(zipPath);
var archive = ArchiveFactory.Open(stream);
var reader = archive.ExtractAllEntries();
var reader = ReaderFactory.Open(stream);
var entries = 0;
while (reader.MoveToNextEntry())
{
@@ -763,8 +761,7 @@ public class ZipArchiveTests : ArchiveTests
};
var zipPath = Path.Combine(TEST_ARCHIVES_PATH, "Zip.uncompressed.zip");
using var stream = File.OpenRead(zipPath);
var archive = ArchiveFactory.Open(stream);
var reader = archive.ExtractAllEntries();
var reader = ReaderFactory.Open(stream);
var x = 0;
while (reader.MoveToNextEntry())
{
@@ -781,7 +778,7 @@ public class ZipArchiveTests : ArchiveTests
var zipPath = Path.Combine(TEST_ARCHIVES_PATH, "Zip.UnicodePathExtra.zip");
using (var stream = File.OpenRead(zipPath))
{
var archive = ArchiveFactory.Open(
var reader = ReaderFactory.Open(
stream,
new ReaderOptions
{
@@ -791,13 +788,12 @@ public class ZipArchiveTests : ArchiveTests
},
}
);
var reader = archive.ExtractAllEntries();
reader.MoveToNextEntry();
Assert.Equal("궖귛궖귙귪궖귗귪궖귙_wav.frq", reader.Entry.Key);
}
using (var stream = File.OpenRead(zipPath))
{
var archive = ArchiveFactory.Open(
var reader = ReaderFactory.Open(
stream,
new ReaderOptions
{
@@ -807,7 +803,6 @@ public class ZipArchiveTests : ArchiveTests
},
}
);
var reader = archive.ExtractAllEntries();
reader.MoveToNextEntry();
Assert.Equal("きょきゅんきゃんきゅ_wav.frq", reader.Entry.Key);
}

View File

@@ -362,6 +362,8 @@ public class ZipReaderTests : ReaderTests
while (reader.MoveToNextEntry()) { }
}
//this test uses a large 7zip file containing a zip file inside it to test zip64 support
// we probably shouldn't be allowing ExtractAllEntries here but it works for now.
[Fact]
public void Zip_Uncompressed_64bit()
{
@@ -383,11 +385,10 @@ public class ZipReaderTests : ReaderTests
[Fact]
public void Zip_Uncompressed_Encrypted_Read()
{
using var archive = ArchiveFactory.Open(
using var reader = ReaderFactory.Open(
Path.Combine(TEST_ARCHIVES_PATH, "Zip.none.encrypted.zip"),
new ReaderOptions { Password = "test" }
);
using var reader = archive.ExtractAllEntries();
reader.MoveToNextEntry();
Assert.Equal("first.txt", reader.Entry.Key);
Assert.Equal(199, reader.Entry.Size);

View File

@@ -4,20 +4,20 @@
".NETFramework,Version=v4.8": {
"AwesomeAssertions": {
"type": "Direct",
"requested": "[9.2.0, )",
"resolved": "9.2.0",
"contentHash": "RCkuFyKmesmZR74XLOzYvTpG/IbHfBeFapFTMvFskPzEK4z3YrVmHB2FIFJ0DhwjuIDdPL/hc8zS40IwMAN0BA==",
"requested": "[9.2.1, )",
"resolved": "9.2.1",
"contentHash": "lbwhyQNXxxEGx4oCbFqNfFy2DsywsvNhN6qoOjY4wwhMgI2L9+YrxjyF/M0io99yrvWV1Cjj12LP2QGcC43Uhw==",
"dependencies": {
"System.Threading.Tasks.Extensions": "4.5.4"
}
},
"Microsoft.NET.Test.Sdk": {
"type": "Direct",
"requested": "[17.13.0, )",
"resolved": "17.13.0",
"contentHash": "W19wCPizaIC9Zh47w8wWI/yxuqR7/dtABwOrc8r2jX/8mUNxM2vw4fXDh+DJTeogxV+KzKwg5jNNGQVwf3LXyA==",
"requested": "[18.0.0, )",
"resolved": "18.0.0",
"contentHash": "bvxj2Asb7nT+tqOFFerrhQeEjUYLwx0Poi0Rznu63WbqN+A4uDn1t5NWXfAOOQsF6lpmK6N2v+Vvgso7KWZS7g==",
"dependencies": {
"Microsoft.CodeCoverage": "17.13.0"
"Microsoft.CodeCoverage": "18.0.0"
}
},
"Microsoft.NETFramework.ReferenceAssemblies": {
@@ -49,20 +49,10 @@
"Microsoft.TestPlatform.ObjectModel": "17.13.0"
}
},
"Xunit.SkippableFact": {
"type": "Direct",
"requested": "[1.5.23, )",
"resolved": "1.5.23",
"contentHash": "JlKobLTlsGcuJ8OtoodxL63bUagHSVBnF+oQ2GgnkwNqK+XYjeYyhQasULi5Ebx1MNDGNbOMplQYr89mR+nItQ==",
"dependencies": {
"Validation": "2.5.51",
"xunit.extensibility.execution": "2.4.0"
}
},
"Microsoft.CodeCoverage": {
"type": "Transitive",
"resolved": "17.13.0",
"contentHash": "9LIUy0y+DvUmEPtbRDw6Bay3rzwqFV8P4efTrK4CZhQle3M/QwLPjISghfcolmEGAPWxuJi6m98ZEfk4VR4Lfg=="
"resolved": "18.0.0",
"contentHash": "DFPhMrsIofgJ1DDU3ModqqRArDm15/bNl4ecmcuBspZkZ4ONYnCC0R8U27WzK7cYv6r8l6Q/fRmvg7cb+I/dJA=="
},
"Microsoft.NETFramework.ReferenceAssemblies.net48": {
"type": "Transitive",
@@ -84,8 +74,8 @@
},
"System.Numerics.Vectors": {
"type": "Transitive",
"resolved": "4.5.0",
"contentHash": "QQTlPTl06J/iiDbJCiepZ4H//BVraReU4O4EoRw1U02H5TLUIT7xn3GnDp9AXPSlJUDyFs4uWjWafNX6WrAojQ=="
"resolved": "4.6.1",
"contentHash": "sQxefTnhagrhoq2ReR0D/6K0zJcr9Hrd6kikeXsA1I8kOCboTavcUC4r7TSfpKFeE163uMuxZcyfO1mGO3EN8Q=="
},
"System.Reflection.Metadata": {
"type": "Transitive",
@@ -97,8 +87,8 @@
},
"System.Runtime.CompilerServices.Unsafe": {
"type": "Transitive",
"resolved": "6.0.0",
"contentHash": "/iUeP3tq1S0XdNNoMz5C9twLSrM/TH+qElHkXWaPvuNOt+99G75NrV0OS2EqHx5wMN7popYjpc8oTjC1y16DLg=="
"resolved": "6.1.2",
"contentHash": "2hBr6zdbIBTDE3EhK7NSVNdX58uTK6iHW/P/Axmm9sl1xoGSLqDvMtpecn226TNwHByFokYwJmt/aQQNlO5CRw=="
},
"System.Threading.Tasks.Extensions": {
"type": "Transitive",
@@ -108,11 +98,6 @@
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
}
},
"Validation": {
"type": "Transitive",
"resolved": "2.5.51",
"contentHash": "g/Aug7PVWaenlJ0QUyt/mEetngkQNsMCuNeRVXbcJED1nZS7JcK+GTU4kz3jcQ7bFuKfi8PF4ExXH7XSFNuSLQ=="
},
"xunit.abstractions": {
"type": "Transitive",
"resolved": "2.0.3",
@@ -156,34 +141,47 @@
"sharpcompress": {
"type": "Project",
"dependencies": {
"System.Buffers": "[4.6.0, )",
"Microsoft.Bcl.AsyncInterfaces": "[8.0.0, )",
"System.Buffers": "[4.6.1, )",
"System.Memory": "[4.6.3, )",
"System.Text.Encoding.CodePages": "[8.0.0, )",
"ZstdSharp.Port": "[0.8.6, )"
}
},
"Microsoft.Bcl.AsyncInterfaces": {
"type": "CentralTransitive",
"requested": "[8.0.0, )",
"resolved": "5.0.0",
"contentHash": "W8DPQjkMScOMTtJbPwmPyj9c3zYSFGawDW3jwlBOOsnY+EzZFLgNQ/UMkK35JmkNOVPdCyPr2Tw7Vv9N+KA3ZQ==",
"resolved": "8.0.0",
"contentHash": "3WA9q9yVqJp222P3x1wYIGDAkpjAku0TMUaaQV22g6L67AI0LdOIrVS7Ht2vJfLHGSPVuqN94vIr15qn+HEkHw==",
"dependencies": {
"System.Threading.Tasks.Extensions": "4.5.4"
}
},
"System.Buffers": {
"type": "CentralTransitive",
"requested": "[4.6.0, )",
"resolved": "4.6.0",
"contentHash": "lN6tZi7Q46zFzAbRYXTIvfXcyvQQgxnY7Xm6C6xQ9784dEL1amjM6S6Iw4ZpsvesAKnRVsM4scrDQaDqSClkjA=="
"requested": "[4.6.1, )",
"resolved": "4.6.1",
"contentHash": "N8GXpmiLMtljq7gwvyS+1QvKT/W2J8sNAvx+HVg4NGmsG/H+2k/y9QI23auLJRterrzCiDH+IWAw4V/GPwsMlw=="
},
"System.Memory": {
"type": "CentralTransitive",
"requested": "[4.6.0, )",
"resolved": "4.5.5",
"contentHash": "XIWiDvKPXaTveaB7HVganDlOCRoj03l+jrwNvcge/t8vhGYKvqV+dMv6G4SAX2NoNmN0wZfVPTAlFwZcZvVOUw==",
"requested": "[4.6.3, )",
"resolved": "4.6.3",
"contentHash": "qdcDOgnFZY40+Q9876JUHnlHu7bosOHX8XISRoH94fwk6hgaeQGSgfZd8srWRZNt5bV9ZW2TljcegDNxsf+96A==",
"dependencies": {
"System.Buffers": "4.5.1",
"System.Numerics.Vectors": "4.5.0",
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
"System.Buffers": "4.6.1",
"System.Numerics.Vectors": "4.6.1",
"System.Runtime.CompilerServices.Unsafe": "6.1.2"
}
},
"System.Text.Encoding.CodePages": {
"type": "CentralTransitive",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "OZIsVplFGaVY90G2SbpgU7EnCoOO5pw1t4ic21dBF3/1omrJFpAGoNAVpPyMVOC90/hvgkGG3VFqR13YgZMQfg==",
"dependencies": {
"System.Memory": "4.5.5",
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
}
},
"ZstdSharp.Port": {
@@ -201,18 +199,18 @@
"net8.0": {
"AwesomeAssertions": {
"type": "Direct",
"requested": "[9.2.0, )",
"resolved": "9.2.0",
"contentHash": "RCkuFyKmesmZR74XLOzYvTpG/IbHfBeFapFTMvFskPzEK4z3YrVmHB2FIFJ0DhwjuIDdPL/hc8zS40IwMAN0BA=="
"requested": "[9.2.1, )",
"resolved": "9.2.1",
"contentHash": "lbwhyQNXxxEGx4oCbFqNfFy2DsywsvNhN6qoOjY4wwhMgI2L9+YrxjyF/M0io99yrvWV1Cjj12LP2QGcC43Uhw=="
},
"Microsoft.NET.Test.Sdk": {
"type": "Direct",
"requested": "[17.13.0, )",
"resolved": "17.13.0",
"contentHash": "W19wCPizaIC9Zh47w8wWI/yxuqR7/dtABwOrc8r2jX/8mUNxM2vw4fXDh+DJTeogxV+KzKwg5jNNGQVwf3LXyA==",
"requested": "[18.0.0, )",
"resolved": "18.0.0",
"contentHash": "bvxj2Asb7nT+tqOFFerrhQeEjUYLwx0Poi0Rznu63WbqN+A4uDn1t5NWXfAOOQsF6lpmK6N2v+Vvgso7KWZS7g==",
"dependencies": {
"Microsoft.CodeCoverage": "17.13.0",
"Microsoft.TestPlatform.TestHost": "17.13.0"
"Microsoft.CodeCoverage": "18.0.0",
"Microsoft.TestPlatform.TestHost": "18.0.0"
}
},
"Microsoft.NETFramework.ReferenceAssemblies": {
@@ -247,20 +245,10 @@
"resolved": "3.1.5",
"contentHash": "tKi7dSTwP4m5m9eXPM2Ime4Kn7xNf4x4zT9sdLO/G4hZVnQCRiMTWoSZqI/pYTVeI27oPPqHBKYI/DjJ9GsYgA=="
},
"Xunit.SkippableFact": {
"type": "Direct",
"requested": "[1.5.23, )",
"resolved": "1.5.23",
"contentHash": "JlKobLTlsGcuJ8OtoodxL63bUagHSVBnF+oQ2GgnkwNqK+XYjeYyhQasULi5Ebx1MNDGNbOMplQYr89mR+nItQ==",
"dependencies": {
"Validation": "2.5.51",
"xunit.extensibility.execution": "2.4.0"
}
},
"Microsoft.CodeCoverage": {
"type": "Transitive",
"resolved": "17.13.0",
"contentHash": "9LIUy0y+DvUmEPtbRDw6Bay3rzwqFV8P4efTrK4CZhQle3M/QwLPjISghfcolmEGAPWxuJi6m98ZEfk4VR4Lfg=="
"resolved": "18.0.0",
"contentHash": "DFPhMrsIofgJ1DDU3ModqqRArDm15/bNl4ecmcuBspZkZ4ONYnCC0R8U27WzK7cYv6r8l6Q/fRmvg7cb+I/dJA=="
},
"Microsoft.NETFramework.ReferenceAssemblies.net461": {
"type": "Transitive",
@@ -269,35 +257,38 @@
},
"Microsoft.TestPlatform.ObjectModel": {
"type": "Transitive",
"resolved": "17.13.0",
"contentHash": "bt0E0Dx+iqW97o4A59RCmUmz/5NarJ7LRL+jXbSHod72ibL5XdNm1Ke+UO5tFhBG4VwHLcSjqq9BUSblGNWamw==",
"resolved": "18.0.0",
"contentHash": "Al/a99ymb8UdEEh6DKNiaoFn5i8fvX5PdM9LfU9Z/Q8NJrlyHHzF+LRHLbR+t89gRsJ2fFMpwYxgEn3eH1BQwA==",
"dependencies": {
"System.Reflection.Metadata": "1.6.0"
"System.Reflection.Metadata": "8.0.0"
}
},
"Microsoft.TestPlatform.TestHost": {
"type": "Transitive",
"resolved": "17.13.0",
"contentHash": "9GGw08Dc3AXspjekdyTdZ/wYWFlxbgcF0s7BKxzVX+hzAwpifDOdxM+ceVaaJSQOwqt3jtuNlHn3XTpKUS9x9Q==",
"resolved": "18.0.0",
"contentHash": "aAxE8Thr9ZHGrljOYaDeLJqitQi75iE4xeEFn6CEGFirlHSn1KwpKPniuEn6zCLZ90Z3XqNlrC3ZJTuvBov45w==",
"dependencies": {
"Microsoft.TestPlatform.ObjectModel": "17.13.0",
"Newtonsoft.Json": "13.0.1"
"Microsoft.TestPlatform.ObjectModel": "18.0.0",
"Newtonsoft.Json": "13.0.3"
}
},
"Newtonsoft.Json": {
"type": "Transitive",
"resolved": "13.0.1",
"contentHash": "ppPFpBcvxdsfUonNcvITKqLl3bqxWbDCZIzDWHzjpdAHRFfZe0Dw9HmA0+za13IdyrgJwpkDTDA9fHaxOrt20A=="
"resolved": "13.0.3",
"contentHash": "HrC5BXdl00IP9zeV+0Z848QWPAoCr9P3bDEZguI+gkLcBKAOxix/tLEAAHC+UvDNPv4a2d18lOReHMOagPa+zQ=="
},
"System.Collections.Immutable": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "AurL6Y5BA1WotzlEvVaIDpqzpIPvYnnldxru8oXJU2yFxFUy3+pNXjXd1ymO+RA0rq0+590Q8gaz2l3Sr7fmqg=="
},
"System.Reflection.Metadata": {
"type": "Transitive",
"resolved": "1.6.0",
"contentHash": "COC1aiAJjCoA5GBF+QKL2uLqEBew4JsCkQmoHKbN3TlOZKa2fKLz5CpiRQKDz0RsAOEGsVKqOD5bomsXq/4STQ=="
},
"Validation": {
"type": "Transitive",
"resolved": "2.5.51",
"contentHash": "g/Aug7PVWaenlJ0QUyt/mEetngkQNsMCuNeRVXbcJED1nZS7JcK+GTU4kz3jcQ7bFuKfi8PF4ExXH7XSFNuSLQ=="
"resolved": "8.0.0",
"contentHash": "ptvgrFh7PvWI8bcVqG5rsA/weWM09EnthFHR5SCnS6IN+P4mj6rE1lBDC4U8HL9/57htKAqy4KQ3bBj84cfYyQ==",
"dependencies": {
"System.Collections.Immutable": "8.0.0"
}
},
"xunit.abstractions": {
"type": "Transitive",
@@ -342,16 +333,9 @@
"sharpcompress": {
"type": "Project",
"dependencies": {
"System.Buffers": "[4.6.0, )",
"ZstdSharp.Port": "[0.8.6, )"
}
},
"System.Buffers": {
"type": "CentralTransitive",
"requested": "[4.6.0, )",
"resolved": "4.6.0",
"contentHash": "lN6tZi7Q46zFzAbRYXTIvfXcyvQQgxnY7Xm6C6xQ9784dEL1amjM6S6Iw4ZpsvesAKnRVsM4scrDQaDqSClkjA=="
},
"ZstdSharp.Port": {
"type": "CentralTransitive",
"requested": "[0.8.6, )",