Compare commits

..

3 Commits

Author SHA1 Message Date
copilot-swe-agent[bot]
20f2c9e0d3 Add comprehensive RAR5 async extraction tests
- Created Rar5AsyncExtractionTests with 12 tests that exercise Unpack5Async code path
- Tests use AsyncOnlyStream to ensure async methods are called (not sync fallbacks)
- Cover basic archives, solid archives, encrypted archives, and various extraction methods
- Tests verify:
  * ExtractAllEntries for solid archives
  * Individual file extraction with WriteEntryToDirectoryAsync
  * OpenEntryStream for manual extraction
  * WriteToDirectory for full archive extraction
  * Encrypted files (both header+files and files only)
  * Special formats (Blake2 CRC, comments, uncompressed)
  * Skipping entries in solid archives
- All tests passing on both net48 and net10.0

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-02-11 17:02:24 +00:00
copilot-swe-agent[bot]
d9320c0930 Initial plan for RAR5 async test improvements
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-02-11 16:56:53 +00:00
copilot-swe-agent[bot]
6ddeb048f6 Initial plan 2026-02-11 16:55:12 +00:00
374 changed files with 4926 additions and 7935 deletions

View File

@@ -257,73 +257,60 @@ csharp_style_namespace_declarations = file_scoped
##########################################
[*.{cs,csx,cake,vb,vbx}]
dotnet_diagnostic.CA1000.severity = error # do not declare static members on generic types
dotnet_diagnostic.CA1001.severity = error # disposable field owners should be disposable
dotnet_diagnostic.CA1018.severity = error # mark custom attributes with AttributeUsage
dotnet_diagnostic.CA1000.severity = suggestion
dotnet_diagnostic.CA1001.severity = error
dotnet_diagnostic.CA1018.severity = error
dotnet_diagnostic.CA1036.severity = silent
dotnet_diagnostic.CA1051.severity = suggestion # do not declare visible instance fields
dotnet_diagnostic.CA1068.severity = error # cancellation token parameters must come last
dotnet_diagnostic.CA1069.severity = error # enums should not have duplicate values
dotnet_diagnostic.CA1304.severity = error # specify CultureInfo for culture-sensitive operations
dotnet_diagnostic.CA1305.severity = error # specify IFormatProvider
dotnet_diagnostic.CA1307.severity = error # specify StringComparison for clarity
dotnet_diagnostic.CA1309.severity = error # use ordinal StringComparison
dotnet_diagnostic.CA1310.severity = error # specify StringComparison for correctness
dotnet_diagnostic.CA1507.severity = error # use nameof in place of string literals
dotnet_diagnostic.CA1513.severity = suggestion # use ObjectDisposedException throw helper
dotnet_diagnostic.CA1707.severity = suggestion # identifiers should not contain underscores
dotnet_diagnostic.CA1708.severity = suggestion # identifiers should differ by more than case
dotnet_diagnostic.CA1711.severity = suggestion # identifiers should not have incorrect suffixes
dotnet_diagnostic.CA1716.severity = suggestion # identifiers should not match language keywords
dotnet_diagnostic.CA1720.severity = suggestion # identifiers should not contain type names
dotnet_diagnostic.CA1725.severity = error # parameter names should match base declaration
dotnet_diagnostic.CA1805.severity = suggestion # avoid unnecessary default value initialization
dotnet_diagnostic.CA1816.severity = suggestion # call GC.SuppressFinalize correctly
dotnet_diagnostic.CA1822.severity = suggestion # mark members static when possible
dotnet_diagnostic.CA1825.severity = error # avoid zero-length array allocations
dotnet_diagnostic.CA1051.severity = suggestion
dotnet_diagnostic.CA1068.severity = error
dotnet_diagnostic.CA1069.severity = error
dotnet_diagnostic.CA1304.severity = error
dotnet_diagnostic.CA1305.severity = suggestion
dotnet_diagnostic.CA1307.severity = suggestion
dotnet_diagnostic.CA1309.severity = suggestion
dotnet_diagnostic.CA1310.severity = error
dotnet_diagnostic.CA1507.severity = suggestion
dotnet_diagnostic.CA1513.severity = suggestion
dotnet_diagnostic.CA1707.severity = suggestion
dotnet_diagnostic.CA1708.severity = suggestion
dotnet_diagnostic.CA1711.severity = suggestion
dotnet_diagnostic.CA1716.severity = suggestion
dotnet_diagnostic.CA1720.severity = suggestion
dotnet_diagnostic.CA1725.severity = suggestion
dotnet_diagnostic.CA1805.severity = suggestion
dotnet_diagnostic.CA1816.severity = suggestion
dotnet_diagnostic.CA1822.severity = suggestion
dotnet_diagnostic.CA1825.severity = error
dotnet_diagnostic.CA1826.severity = silent
dotnet_diagnostic.CA1827.severity = error # use Any() instead of Count()/LongCount() checks
dotnet_diagnostic.CA1829.severity = error # use Length or Count property instead of LINQ Count()
dotnet_diagnostic.CA1834.severity = error # prefer StringBuilder.Append(char) for single chars
dotnet_diagnostic.CA1845.severity = error # use span-based string.Concat overloads
dotnet_diagnostic.CA1848.severity = error # use LoggerMessage for high-performance logging
dotnet_diagnostic.CA1852.severity = suggestion # seal types that are not intended for inheritance
dotnet_diagnostic.CA1827.severity = error
dotnet_diagnostic.CA1829.severity = suggestion
dotnet_diagnostic.CA1834.severity = error
dotnet_diagnostic.CA1845.severity = suggestion
dotnet_diagnostic.CA1848.severity = suggestion
dotnet_diagnostic.CA1852.severity = suggestion
dotnet_diagnostic.CA1860.severity = silent
dotnet_diagnostic.CA2016.severity = error # forward CancellationToken to invoked methods
dotnet_diagnostic.CA2201.severity = error # do not throw reserved or overly general exceptions
dotnet_diagnostic.CA2206.severity = error # enforce CA2206 usage guidance
dotnet_diagnostic.CA2208.severity = error # instantiate ArgumentException types correctly
dotnet_diagnostic.CA2211.severity = error # non-constant fields should not be visible
dotnet_diagnostic.CA2249.severity = error # prefer string.Contains over string.IndexOf checks
dotnet_diagnostic.CA2251.severity = error # use string.Equals over string.Compare equality checks
dotnet_diagnostic.CA2016.severity = suggestion
dotnet_diagnostic.CA2201.severity = error
dotnet_diagnostic.CA2206.severity = error
dotnet_diagnostic.CA2208.severity = error
dotnet_diagnostic.CA2211.severity = error
dotnet_diagnostic.CA2249.severity = error
dotnet_diagnostic.CA2251.severity = error
dotnet_diagnostic.CA2252.severity = none
dotnet_diagnostic.CA2254.severity = error # logging message templates should be static expressions
dotnet_diagnostic.CA2254.severity = suggestion
; High volume analyzers requiring extensive refactoring - set to suggestion temporarily
dotnet_diagnostic.CA1835.severity = suggestion # prefer Memory<T>-based async overloads
dotnet_diagnostic.CA1510.severity = error # use ArgumentNullException.ThrowIfNull
dotnet_diagnostic.CA1512.severity = error # use ArgumentOutOfRangeException throw helpers
dotnet_diagnostic.CA1844.severity = suggestion # provide memory-based async stream overrides
dotnet_diagnostic.CA1825.severity = error # avoid zero-length array allocations
dotnet_diagnostic.CA1712.severity = suggestion # do not prefix enum values with type name
dotnet_diagnostic.CA2022.severity = suggestion # avoid inexact reads with Stream.Read
dotnet_diagnostic.CA1850.severity = error # prefer static HashData over ComputeHash
dotnet_diagnostic.CA2263.severity = error # prefer generic overload when type is known
dotnet_diagnostic.CA2012.severity = error # use ValueTasks correctly
dotnet_diagnostic.CA1001.severity = error # disposable field owners should be disposable
dotnet_diagnostic.CS0169.severity = error # field is never used
dotnet_diagnostic.CS0219.severity = error # variable assigned but never used
dotnet_diagnostic.CS0649.severity = error # field is never assigned and remains default
dotnet_diagnostic.CS1998.severity = error # async method lacks await operators
dotnet_diagnostic.CS8602.severity = error # possible null reference dereference
dotnet_diagnostic.CS8604.severity = error # possible null reference argument
dotnet_diagnostic.CS8618.severity = error # non-nullable member is uninitialized
dotnet_diagnostic.CS0618.severity = error # obsolete member usage
dotnet_diagnostic.CS4014.severity = error # unawaited task call
dotnet_diagnostic.CS8600.severity = error # possible null to non-nullable conversion
dotnet_diagnostic.CS8603.severity = error # possible null reference return
dotnet_diagnostic.CS8625.severity = error # cannot assign null to non-nullable reference
dotnet_diagnostic.CS0169.severity = error
dotnet_diagnostic.CS0219.severity = error
dotnet_diagnostic.CS0649.severity = suggestion
dotnet_diagnostic.CS1998.severity = error
dotnet_diagnostic.CS8602.severity = error
dotnet_diagnostic.CS8604.severity = error
dotnet_diagnostic.CS8618.severity = error
dotnet_diagnostic.CS0618.severity = suggestion
dotnet_diagnostic.CS4014.severity = error
dotnet_diagnostic.CS8600.severity = error
dotnet_diagnostic.CS8603.severity = error
dotnet_diagnostic.CS8625.severity = error
dotnet_diagnostic.BL0005.severity = suggestion
@@ -331,9 +318,9 @@ dotnet_diagnostic.MVC1000.severity = suggestion
dotnet_diagnostic.RZ10012.severity = error
dotnet_diagnostic.IDE0004.severity = suggestion # redundant cast
dotnet_diagnostic.IDE0004.severity = error # redundant cast
dotnet_diagnostic.IDE0005.severity = suggestion
dotnet_diagnostic.IDE0007.severity = suggestion # Use var
dotnet_diagnostic.IDE0007.severity = error # Use var
dotnet_diagnostic.IDE0011.severity = error # Use braces on if statements
dotnet_diagnostic.IDE0010.severity = silent # populate switch
dotnet_diagnostic.IDE0017.severity = suggestion # initialization can be simplified
@@ -347,7 +334,7 @@ dotnet_diagnostic.IDE0028.severity = silent # expression body for accessors
dotnet_diagnostic.IDE0032.severity = suggestion # Use auto property
dotnet_diagnostic.IDE0033.severity = error # prefer tuple name
dotnet_diagnostic.IDE0037.severity = suggestion # simplify anonymous type
dotnet_diagnostic.IDE0040.severity = suggestion # modifiers required
dotnet_diagnostic.IDE0040.severity = error # modifiers required
dotnet_diagnostic.IDE0041.severity = error # simplify null
dotnet_diagnostic.IDE0042.severity = error # deconstruct variable
dotnet_diagnostic.IDE0044.severity = suggestion # make field only when possible
@@ -361,55 +348,6 @@ dotnet_diagnostic.IDE0060.severity = suggestion # unused parameters
dotnet_diagnostic.IDE0061.severity = suggestion # local expression body
dotnet_diagnostic.IDE0062.severity = suggestion # local to static
dotnet_diagnostic.IDE0063.severity = error # simplify using
[src/**/*.cs]
dotnet_diagnostic.VSTHRD002.severity = error # avoid sync waits on async operations
dotnet_diagnostic.VSTHRD100.severity = error # avoid async void methods
dotnet_diagnostic.VSTHRD101.severity = error # avoid unsupported async delegates
dotnet_diagnostic.VSTHRD102.severity = error # implement internal logic asynchronously
dotnet_diagnostic.VSTHRD103.severity = error # use async methods from async methods
dotnet_diagnostic.VSTHRD104.severity = error # offer async alternatives when possible
dotnet_diagnostic.VSTHRD107.severity = error # await task within using expression
dotnet_diagnostic.VSTHRD110.severity = error # observe result of async calls
dotnet_diagnostic.VSTHRD111.severity = error # use ConfigureAwait(bool)
dotnet_diagnostic.VSTHRD112.severity = error # implement System.IAsyncDisposable
dotnet_diagnostic.VSTHRD113.severity = error # check for System.IAsyncDisposable
dotnet_diagnostic.VSTHRD114.severity = error # avoid returning null from Task methods
dotnet_diagnostic.VSTHRD200.severity = suggestion # use Async suffix naming convention
[build/**/*.cs]
dotnet_diagnostic.VSTHRD001.severity = none # avoid legacy thread switching methods (disabled for build scripts)
dotnet_diagnostic.VSTHRD002.severity = none # avoid sync waits on async operations (disabled for build scripts)
dotnet_diagnostic.VSTHRD003.severity = none # avoid awaiting foreign tasks (disabled for build scripts)
dotnet_diagnostic.VSTHRD004.severity = none # await SwitchToMainThreadAsync (disabled for build scripts)
dotnet_diagnostic.VSTHRD010.severity = none # invoke single-threaded types on main thread (disabled for build scripts)
dotnet_diagnostic.VSTHRD011.severity = none # use AsyncLazy<T> (disabled for build scripts)
dotnet_diagnostic.VSTHRD012.severity = none # provide JoinableTaskFactory where allowed (disabled for build scripts)
dotnet_diagnostic.VSTHRD100.severity = none # avoid async void methods (disabled for build scripts)
dotnet_diagnostic.VSTHRD101.severity = none # avoid unsupported async delegates (disabled for build scripts)
dotnet_diagnostic.VSTHRD102.severity = none # implement internal logic asynchronously (disabled for build scripts)
dotnet_diagnostic.VSTHRD103.severity = none # use async methods from async methods (disabled for build scripts)
dotnet_diagnostic.VSTHRD104.severity = none # offer async alternatives when possible (disabled for build scripts)
dotnet_diagnostic.VSTHRD105.severity = none # avoid TaskScheduler.Current assumptions (disabled for build scripts)
dotnet_diagnostic.VSTHRD106.severity = none # use InvokeAsync for async events (disabled for build scripts)
dotnet_diagnostic.VSTHRD107.severity = none # await task within using expression (disabled for build scripts)
dotnet_diagnostic.VSTHRD108.severity = none # assert thread affinity unconditionally (disabled for build scripts)
dotnet_diagnostic.VSTHRD109.severity = none # switch instead of assert in async methods (disabled for build scripts)
dotnet_diagnostic.VSTHRD110.severity = none # observe result of async calls (disabled for build scripts)
dotnet_diagnostic.VSTHRD111.severity = none # use ConfigureAwait(bool) (disabled for build scripts)
dotnet_diagnostic.VSTHRD112.severity = none # implement System.IAsyncDisposable (disabled for build scripts)
dotnet_diagnostic.VSTHRD113.severity = none # check for System.IAsyncDisposable (disabled for build scripts)
dotnet_diagnostic.VSTHRD114.severity = none # avoid returning null from Task methods (disabled for build scripts)
dotnet_diagnostic.VSTHRD115.severity = none # avoid explicit null SynchronizationContext in JTC (disabled for build scripts)
dotnet_diagnostic.VSTHRD200.severity = none # use Async suffix naming convention (disabled for build scripts)
[tests/**/*.cs]
dotnet_diagnostic.CA1861.severity = suggestion # avoid constant arrays as arguments
dotnet_diagnostic.CA1305.severity = suggestion # specify IFormatProvider
dotnet_diagnostic.CA1307.severity = suggestion # specify StringComparison for clarity
dotnet_diagnostic.IDE0042.severity = suggestion
dotnet_diagnostic.IDE0051.severity = suggestion
dotnet_diagnostic.IDE0063.severity = suggestion
dotnet_diagnostic.IDE0066.severity = suggestion # switch expression
dotnet_diagnostic.IDE0072.severity = suggestion # Populate switch - forces population of all cases even when default specified
dotnet_diagnostic.IDE0078.severity = suggestion # use pattern matching
@@ -421,7 +359,7 @@ dotnet_diagnostic.IDE0200.severity = suggestion # lambda not needed
dotnet_diagnostic.IDE1006.severity = suggestion # Naming rule violation: These words cannot contain lower case characters
dotnet_diagnostic.IDE0260.severity = suggestion # Use pattern matching
dotnet_diagnostic.IDE0270.severity = suggestion # Null check simplifcation
dotnet_diagnostic.IDE0290.severity = suggestion # Primary Constructor
dotnet_diagnostic.IDE0290.severity = error # Primary Constructor
dotnet_diagnostic.IDE0300.severity = suggestion # Collection
dotnet_diagnostic.IDE0305.severity = suggestion # Collection ToList
@@ -429,8 +367,8 @@ dotnet_diagnostic.NX0001.severity = error
dotnet_diagnostic.NX0002.severity = silent
dotnet_diagnostic.NX0003.severity = silent
dotnet_diagnostic.VSTHRD110.severity = error # observe result of async calls
dotnet_diagnostic.VSTHRD107.severity = error # await task within using expression
dotnet_diagnostic.VSTHRD110.severity = error
dotnet_diagnostic.VSTHRD107.severity = error
##########################################
# Styles

View File

@@ -53,9 +53,9 @@ jobs:
name: ${{ matrix.os }}-nuget-package
path: artifacts/*.nupkg
# Push to NuGet.org only for version tag pushes (Windows only)
# Push to NuGet.org using C# build target (Windows only, not on PRs)
- name: Push to NuGet
if: success() && matrix.os == 'windows-latest' && startsWith(github.ref, 'refs/tags/')
if: success() && matrix.os == 'windows-latest' && github.event_name != 'pull_request'
run: dotnet run --project build/build.csproj -- push-to-nuget
env:
NUGET_API_KEY: ${{ secrets.NUGET_API_KEY }}

View File

@@ -103,11 +103,8 @@ tests/
### Factory Pattern
Factory implementations can implement one or more interfaces (`IArchiveFactory`, `IReaderFactory`, `IWriterFactory`) depending on format capabilities:
- `ArchiveFactory.OpenArchive()` - Opens archive API objects from seekable streams/files
- `ArchiveFactory.OpenAsyncArchive()` - Opens async archive API objects for async archive use cases
- `ReaderFactory.OpenReader()` - Auto-detects and opens forward-only readers
- `ReaderFactory.OpenAsyncReader()` - Auto-detects and opens forward-only async readers
- `WriterFactory.OpenWriter()` - Creates a writer for a specified `ArchiveType`
- `WriterFactory.OpenAsyncWriter()` - Creates an async writer for async write scenarios
- Factories located in: `src/SharpCompress/Factories/`
## Nullable Reference Types
@@ -135,9 +132,6 @@ SharpCompress supports multiple archive and compression formats:
### Async/Await Patterns
- All I/O operations support async/await with `CancellationToken`
- Async methods follow the naming convention: `MethodNameAsync`
- For async archive scenarios, prefer `ArchiveFactory.OpenAsyncArchive(...)` over sync `OpenArchive(...)`.
- For async forward-only read scenarios, prefer `ReaderFactory.OpenAsyncReader(...)` over sync `OpenReader(...)`.
- For async write scenarios, prefer `WriterFactory.OpenAsyncWriter(...)` over sync `OpenWriter(...)`.
- Key async methods:
- `WriteEntryToAsync` - Extract entry asynchronously
- `WriteAllToDirectoryAsync` - Extract all entries asynchronously
@@ -205,8 +199,7 @@ SharpCompress supports multiple archive and compression formats:
## Common Pitfalls
1. **Don't mix Archive and Reader APIs** - Archive needs seekable stream, Reader doesn't
2. **Don't mix sync and async open paths** - For async workflows use `OpenAsyncArchive`/`OpenAsyncReader`/`OpenAsyncWriter`, not `OpenArchive`/`OpenReader`/`OpenWriter`
3. **Solid archives (Rar, 7Zip)** - Use `ExtractAllEntries()` for best performance, not individual entry extraction
4. **Stream disposal** - Always set `LeaveStreamOpen` explicitly when needed (default is to close)
5. **Tar + non-seekable stream** - Must provide file size or it will throw
6. **Format detection** - Use `ReaderFactory.OpenReader()` / `ReaderFactory.OpenAsyncReader()` for auto-detection, test with actual archive files
2. **Solid archives (Rar, 7Zip)** - Use `ExtractAllEntries()` for best performance, not individual entry extraction
3. **Stream disposal** - Always set `LeaveStreamOpen` explicitly when needed (default is to close)
4. **Tar + non-seekable stream** - Must provide file size or it will throw
5. **Format detection** - Use `ReaderFactory.OpenReader()` for auto-detection, test with actual archive files

View File

@@ -8,6 +8,8 @@
<CodeAnalysisTreatWarningsAsErrors>true</CodeAnalysisTreatWarningsAsErrors>
<EnforceCodeStyleInBuild>true</EnforceCodeStyleInBuild>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
<RunAnalyzersDuringLiveAnalysis>False</RunAnalyzersDuringLiveAnalysis>
<RunAnalyzersDuringBuild>False</RunAnalyzersDuringBuild>
<ManagePackageVersionsCentrally>true</ManagePackageVersionsCentrally>
<RestorePackagesWithLockFile>true</RestorePackagesWithLockFile>
<CentralPackageTransitivePinningEnabled>true</CentralPackageTransitivePinningEnabled>

View File

@@ -5,7 +5,7 @@
<PackageVersion Include="AwesomeAssertions" Version="9.3.0" />
<PackageVersion Include="Glob" Version="1.1.9" />
<PackageVersion Include="JetBrains.Profiler.SelfApi" Version="2.5.16" />
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="8.0.0" />
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="10.0.0" />
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="18.0.1" />
<PackageVersion Include="Mono.Posix.NETStandard" Version="1.0.0" />
<PackageVersion Include="SimpleExec" Version="13.0.0" />

View File

@@ -16,10 +16,6 @@ Post Issues on Github!
Check the [Supported Formats](docs/FORMATS.md) and [Basic Usage.](docs/USAGE.md)
## Custom Compression Providers
If you need to swap out SharpCompresss built-in codecs, the `Providers` property (and `WithProviders(...)` extensions) on `ReaderOptions` and `WriterOptions` lets you supply a `CompressionProviderRegistry`. The selected registry is used by Reader/Writer APIs, Archive APIs, and async extraction paths, so the same provider choice is applied consistently across open/read/write flows. The default registry is already wired up, so customization is only necessary when you want to plug in alternatives such as `SystemGZipCompressionProvider` or a third-party `CompressionProvider`. See [docs/USAGE.md#custom-compression-providers](docs/USAGE.md#custom-compression-providers) for guided examples.
## Recommended Formats
In general, I recommend GZip (Deflate)/BZip2 (BZip)/LZip (LZMA) as the simplicity of the formats lend to better long term archival as well as the streamability. Tar is often used in conjunction for multiple files in a single archive (e.g. `.tar.gz`)

View File

@@ -1,6 +1,5 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices;
@@ -115,19 +114,14 @@ Target(
{
var (version, isPrerelease) = await GetVersion();
Console.WriteLine($"VERSION={version}");
Console.WriteLine(
$"PRERELEASE={isPrerelease.ToString().ToLower(CultureInfo.InvariantCulture)}"
);
Console.WriteLine($"PRERELEASE={isPrerelease.ToString().ToLower()}");
// Write to environment file for GitHub Actions
var githubOutput = Environment.GetEnvironmentVariable("GITHUB_OUTPUT");
if (!string.IsNullOrEmpty(githubOutput))
{
File.AppendAllText(githubOutput, $"version={version}\n");
File.AppendAllText(
githubOutput,
$"prerelease={isPrerelease.ToString().ToLower(CultureInfo.InvariantCulture)}\n"
);
File.AppendAllText(githubOutput, $"prerelease={isPrerelease.ToString().ToLower()}\n");
}
}
);
@@ -369,13 +363,9 @@ Target(
: "⚪";
if (timeChange > 25 || memChange > 25)
{
hasRegressions = true;
}
if (timeChange < -25 || memChange < -25)
{
hasImprovements = true;
}
output.Add(
$"| {method} | {baseline.Mean} | {current.Mean} | {timeIcon} {timeChange:+0.0;-0.0;0}% | {baseline.Memory} | {current.Memory} | {memIcon} {memChange:+0.0;-0.0;0}% |"
@@ -555,10 +545,7 @@ static async Task<string> GetGitOutput(string command, string args)
}
catch (Exception ex)
{
throw new InvalidOperationException(
$"Git command failed: git {command} {args}\n{ex.Message}",
ex
);
throw new Exception($"Git command failed: git {command} {args}\n{ex.Message}", ex);
}
}
@@ -588,12 +575,12 @@ static Dictionary<string, BenchmarkMetric> ParseBenchmarkResults(string markdown
var line = lines[i].Trim();
// Look for table rows with benchmark data
if (line.StartsWith('|') && line.Contains("&#39;", StringComparison.Ordinal) && i > 0)
if (line.StartsWith("|") && line.Contains("&#39;") && i > 0)
{
var parts = line.Split('|', StringSplitOptions.TrimEntries);
if (parts.Length >= 5)
{
var method = parts[1].Replace("&#39;", "'", StringComparison.Ordinal);
var method = parts[1].Replace("&#39;", "'");
var meanStr = parts[2];
// Find Allocated column - it's usually the last column or labeled "Allocated"
@@ -601,10 +588,10 @@ static Dictionary<string, BenchmarkMetric> ParseBenchmarkResults(string markdown
for (int j = parts.Length - 2; j >= 2; j--)
{
if (
parts[j].Contains("KB", StringComparison.Ordinal)
|| parts[j].Contains("MB", StringComparison.Ordinal)
|| parts[j].Contains("GB", StringComparison.Ordinal)
|| parts[j].Contains('B', StringComparison.Ordinal)
parts[j].Contains("KB")
|| parts[j].Contains("MB")
|| parts[j].Contains("GB")
|| parts[j].Contains("B")
)
{
memoryStr = parts[j];
@@ -637,21 +624,17 @@ static Dictionary<string, BenchmarkMetric> ParseBenchmarkResults(string markdown
static double ParseTimeValue(string timeStr)
{
if (string.IsNullOrWhiteSpace(timeStr) || timeStr == "N/A" || timeStr == "NA")
{
return 0;
}
// Remove thousands separators and parse
timeStr = timeStr.Replace(",", "", StringComparison.Ordinal).Trim();
timeStr = timeStr.Replace(",", "").Trim();
var match = Regex.Match(timeStr, @"([\d.]+)\s*(\w+)");
if (!match.Success)
{
return 0;
}
var value = double.Parse(match.Groups[1].Value, CultureInfo.InvariantCulture);
var unit = match.Groups[2].Value.ToLower(CultureInfo.InvariantCulture);
var value = double.Parse(match.Groups[1].Value);
var unit = match.Groups[2].Value.ToLower();
// Convert to microseconds for comparison
return unit switch
@@ -667,20 +650,16 @@ static double ParseTimeValue(string timeStr)
static double ParseMemoryValue(string memStr)
{
if (string.IsNullOrWhiteSpace(memStr) || memStr == "N/A" || memStr == "NA")
{
return 0;
}
memStr = memStr.Replace(",", "", StringComparison.Ordinal).Trim();
memStr = memStr.Replace(",", "").Trim();
var match = Regex.Match(memStr, @"([\d.]+)\s*(\w+)");
if (!match.Success)
{
return 0;
}
var value = double.Parse(match.Groups[1].Value, CultureInfo.InvariantCulture);
var unit = match.Groups[2].Value.ToUpper(CultureInfo.InvariantCulture);
var value = double.Parse(match.Groups[1].Value);
var unit = match.Groups[2].Value.ToUpper();
// Convert to KB for comparison
return unit switch
@@ -696,9 +675,7 @@ static double ParseMemoryValue(string memStr)
static double CalculateChange(double baseline, double current)
{
if (baseline == 0)
{
return 0;
}
return ((current - baseline) / baseline) * 100;
}

View File

@@ -95,7 +95,7 @@ using (var archive = ZipArchive.OpenArchive("file.zip"))
}
// Async extraction (requires IAsyncArchive)
await using (var asyncArchive = await ZipArchive.OpenAsyncArchive("file.zip"))
using (var asyncArchive = await ZipArchive.OpenAsyncArchive("file.zip"))
{
await asyncArchive.WriteToDirectoryAsync(
@"C:\output",
@@ -177,7 +177,7 @@ using (var reader = ReaderFactory.OpenReader(stream))
// Async variants (use OpenAsyncReader to get IAsyncReader)
using (var stream = File.OpenRead("file.zip"))
await using (var reader = await ReaderFactory.OpenAsyncReader(stream))
using (var reader = await ReaderFactory.OpenAsyncReader(stream))
{
while (await reader.MoveToNextEntryAsync())
{
@@ -318,24 +318,6 @@ WriterOptions: write-time behavior (compression type/level, encoding, stream own
ZipWriterEntryOptions: per-entry ZIP overrides (compression, level, timestamps, comments, zip64)
```
### Compression Providers
`ReaderOptions` and `WriterOptions` expose a `Providers` registry that controls which `ICompressionProvider` implementations are used for each `CompressionType`. The registry defaults to `CompressionProviderRegistry.Default`, so you only need to set it if you want to swap in a custom provider (for example the `SystemGZipCompressionProvider`). The selected registry is honored by Reader/Writer APIs, Archive APIs, and async entry-stream extraction paths.
```csharp
var registry = CompressionProviderRegistry.Default.With(new SystemGZipCompressionProvider());
var readerOptions = ReaderOptions.ForOwnedFile().WithProviders(registry);
var writerOptions = new WriterOptions(CompressionType.GZip)
{
CompressionLevel = 6,
}.WithProviders(registry);
using var reader = ReaderFactory.OpenReader(input, readerOptions);
using var writer = WriterFactory.OpenWriter(output, ArchiveType.GZip, writerOptions);
```
When a format needs additional initialization/finalization data (LZMA, PPMd, etc.) the registry exposes `GetCompressingProvider` which returns the `ICompressionProviderHooks` contract; the rest of the API continues to flow through `Providers`, including pre/properties/post compression hook data.
---
## Compression Types
@@ -427,7 +409,7 @@ cts.CancelAfter(TimeSpan.FromMinutes(5));
try
{
await using (var archive = await ZipArchive.OpenAsyncArchive("archive.zip"))
using (var archive = await ZipArchive.OpenAsyncArchive("archive.zip"))
{
await archive.WriteToDirectoryAsync(
@"C:\output",
@@ -485,31 +467,6 @@ using (var archive = ZipArchive.CreateArchive())
}
```
### Buffered Forward-Only Streams
`SharpCompressStream` can wrap streams with buffering for forward-only scenarios:
```csharp
// Wrap a non-seekable stream with buffering
using (var bufferedStream = new SharpCompressStream(rawStream))
{
// Provides ring buffer functionality for reading ahead
// and seeking within buffered data
using (var reader = ReaderFactory.OpenReader(bufferedStream))
{
while (reader.MoveToNextEntry())
{
reader.WriteEntryToDirectory(@"C:\output");
}
}
}
```
Useful for:
- Non-seekable streams (network streams, pipes)
- Forward-only reading with limited look-ahead
- Buffering unbuffered streams for better performance
### Extract Specific Files
```csharp

View File

@@ -206,29 +206,6 @@ foreach(var entry in archive.Entries)
}
```
## Custom Compression Providers
By default `ReaderOptions` and `WriterOptions` already include `CompressionProviderRegistry.Default` via their `Providers` property, so you can read and write without touching the registry yet still get SharpCompresss built-in implementations.
The configured registry is used consistently across Reader APIs, Writer APIs, Archive APIs, and async entry-stream extraction, including compressed TAR wrappers and ZIP async decompression.
To replace a specific algorithm (for example to use `System.IO.Compression` for GZip or Deflate), create a modified registry and pass it through the same options:
```C#
var systemGZip = new SystemGZipCompressionProvider();
var customRegistry = CompressionProviderRegistry.Default.With(systemGZip);
var readerOptions = ReaderOptions.ForOwnedFile()
.WithProviders(customRegistry);
using var reader = ReaderFactory.OpenReader(stream, readerOptions);
var writerOptions = new WriterOptions(CompressionType.GZip)
.WithProviders(customRegistry);
using var writer = WriterFactory.OpenWriter(outputStream, ArchiveType.GZip, writerOptions);
```
The registry also exposes `GetCompressingProvider` (now returning `ICompressionProviderHooks`) when a compression format needs pre- or post-stream data (e.g., LZMA/PPMd). Implementations that need extra headers can supply those bytes through the `ICompressionProviderHooks` members while the rest of the API still works through the `Providers` property.
## Async Examples
### Async Reader Examples

View File

@@ -22,9 +22,7 @@ public static partial class ArchiveFactory
readerOptions ??= ReaderOptions.ForExternalStream;
var factory = await FindFactoryAsync<IArchiveFactory>(stream, cancellationToken)
.ConfigureAwait(false);
return await factory
.OpenAsyncArchive(stream, readerOptions, cancellationToken)
.ConfigureAwait(false);
return factory.OpenAsyncArchive(stream, readerOptions);
}
public static ValueTask<IAsyncArchive> OpenAsyncArchive(
@@ -47,9 +45,7 @@ public static partial class ArchiveFactory
var factory = await FindFactoryAsync<IArchiveFactory>(fileInfo, cancellationToken)
.ConfigureAwait(false);
return await factory
.OpenAsyncArchive(fileInfo, options, cancellationToken)
.ConfigureAwait(false);
return factory.OpenAsyncArchive(fileInfo, options);
}
public static async ValueTask<IAsyncArchive> OpenAsyncArchive(
@@ -62,7 +58,7 @@ public static partial class ArchiveFactory
var filesArray = fileInfos.ToArray();
if (filesArray.Length == 0)
{
throw new ArchiveOperationException("No files to open");
throw new InvalidOperationException("No files to open");
}
var fileInfo = filesArray[0];
@@ -91,7 +87,7 @@ public static partial class ArchiveFactory
var streamsArray = streams.ToArray();
if (streamsArray.Length == 0)
{
throw new ArchiveOperationException("No streams");
throw new InvalidOperationException("No streams");
}
var firstStream = streamsArray[0];
@@ -164,7 +160,7 @@ public static partial class ArchiveFactory
var extensions = string.Join(", ", factories.Select(item => item.Name));
throw new ArchiveOperationException(
throw new InvalidOperationException(
$"Cannot determine compressed stream type. Supported Archive Formats: {extensions}"
);
}

View File

@@ -2,9 +2,12 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.Factories;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
@@ -54,7 +57,7 @@ public static partial class ArchiveFactory
var filesArray = fileInfos.ToArray();
if (filesArray.Length == 0)
{
throw new ArchiveOperationException("No files to open");
throw new InvalidOperationException("No files to open");
}
var fileInfo = filesArray[0];
@@ -75,7 +78,7 @@ public static partial class ArchiveFactory
var streamsArray = streams.ToArray();
if (streamsArray.Length == 0)
{
throw new ArchiveOperationException("No streams");
throw new InvalidOperationException("No streams");
}
var firstStream = streamsArray[0];
@@ -143,7 +146,7 @@ public static partial class ArchiveFactory
var extensions = string.Join(", ", factories.Select(item => item.Name));
throw new ArchiveOperationException(
throw new InvalidOperationException(
$"Cannot determine compressed stream type. Supported Archive Formats: {extensions}"
);
}

View File

@@ -1,7 +1,6 @@
using System;
using System.IO;
using System.Text.RegularExpressions;
using SharpCompress.Common;
namespace SharpCompress.Archives;
@@ -20,9 +19,7 @@ internal abstract class ArchiveVolumeFactory
part1.DirectoryName!,
String.Concat(
m.Groups[1].Value,
(index + 1)
.ToString(Constants.DefaultCultureInfo)
.PadLeft(m.Groups[2].Value.Length, '0')
(index + 1).ToString().PadLeft(m.Groups[2].Value.Length, '0')
)
)
);

View File

@@ -40,7 +40,10 @@ public partial class GZipArchive
{
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
}
await using var writer = new GZipWriter(stream, options);
await using var writer = new GZipWriter(
stream,
options as GZipWriterOptions ?? new GZipWriterOptions(options)
);
await foreach (
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
)
@@ -74,7 +77,7 @@ public partial class GZipArchive
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return new((IAsyncReader)GZipReader.OpenReader(stream, ReaderOptions));
return new((IAsyncReader)GZipReader.OpenReader(stream));
}
protected override async IAsyncEnumerable<GZipArchiveEntry> LoadEntriesAsync(
@@ -85,7 +88,7 @@ public partial class GZipArchive
yield return new GZipArchiveEntry(
this,
await GZipFilePart
.CreateAsync(stream, ReaderOptions.ArchiveEncoding, ReaderOptions.Providers)
.CreateAsync(stream, ReaderOptions.ArchiveEncoding)
.ConfigureAwait(false),
ReaderOptions
);

View File

@@ -20,15 +20,14 @@ public partial class GZipArchive
>
#endif
{
public static ValueTask<IWritableAsyncArchive<GZipWriterOptions>> OpenAsyncArchive(
public static IWritableAsyncArchive<GZipWriterOptions> OpenAsyncArchive(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return OpenAsyncArchive(new FileInfo(path), readerOptions, cancellationToken);
return (IWritableAsyncArchive<GZipWriterOptions>)
OpenArchive(new FileInfo(path), readerOptions ?? new ReaderOptions());
}
public static IWritableArchive<GZipWriterOptions> OpenArchive(
@@ -104,50 +103,30 @@ public partial class GZipArchive
);
}
public static ValueTask<IWritableAsyncArchive<GZipWriterOptions>> OpenAsyncArchive(
public static IWritableAsyncArchive<GZipWriterOptions> OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IWritableAsyncArchive<GZipWriterOptions>)OpenArchive(stream, readerOptions));
}
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<GZipWriterOptions>)OpenArchive(stream, readerOptions);
public static ValueTask<IWritableAsyncArchive<GZipWriterOptions>> OpenAsyncArchive(
public static IWritableAsyncArchive<GZipWriterOptions> OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IWritableAsyncArchive<GZipWriterOptions>)OpenArchive(fileInfo, readerOptions));
}
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<GZipWriterOptions>)OpenArchive(fileInfo, readerOptions);
public static ValueTask<IWritableAsyncArchive<GZipWriterOptions>> OpenAsyncArchive(
public static IWritableAsyncArchive<GZipWriterOptions> OpenAsyncArchive(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IWritableAsyncArchive<GZipWriterOptions>)OpenArchive(streams, readerOptions));
}
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<GZipWriterOptions>)OpenArchive(streams, readerOptions);
public static ValueTask<IWritableAsyncArchive<GZipWriterOptions>> OpenAsyncArchive(
public static IWritableAsyncArchive<GZipWriterOptions> OpenAsyncArchive(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IWritableAsyncArchive<GZipWriterOptions>)OpenArchive(fileInfos, readerOptions));
}
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<GZipWriterOptions>)OpenArchive(fileInfos, readerOptions);
public static IWritableArchive<GZipWriterOptions> CreateArchive() => new GZipArchive();
public static ValueTask<IWritableAsyncArchive<GZipWriterOptions>> CreateAsyncArchive() =>
new(new GZipArchive());
public static IWritableAsyncArchive<GZipWriterOptions> CreateAsyncArchive() =>
new GZipArchive();
public static bool IsGZipFile(string filePath) => IsGZipFile(new FileInfo(filePath));

View File

@@ -37,7 +37,7 @@ public partial class GZipArchive
}
protected override GZipArchiveEntry CreateEntryInternal(
string key,
string filePath,
Stream source,
long size,
DateTime? modified,
@@ -48,11 +48,13 @@ public partial class GZipArchive
{
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
}
return new GZipWritableArchiveEntry(this, source, key, size, modified, closeStream);
return new GZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
}
protected override GZipArchiveEntry CreateDirectoryEntry(string key, DateTime? modified) =>
throw new NotSupportedException("GZip archives do not support directory entries.");
protected override GZipArchiveEntry CreateDirectoryEntry(
string directoryPath,
DateTime? modified
) => throw new NotSupportedException("GZip archives do not support directory entries.");
protected override void SaveTo(
Stream stream,
@@ -65,7 +67,10 @@ public partial class GZipArchive
{
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
}
using var writer = new GZipWriter(stream, options);
using var writer = new GZipWriter(
stream,
options as GZipWriterOptions ?? new GZipWriterOptions(options)
);
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
@@ -82,7 +87,7 @@ public partial class GZipArchive
var stream = volumes.Single().Stream;
yield return new GZipArchiveEntry(
this,
GZipFilePart.Create(stream, ReaderOptions.ArchiveEncoding, ReaderOptions.Providers),
GZipFilePart.Create(stream, ReaderOptions.ArchiveEncoding),
ReaderOptions
);
}
@@ -91,6 +96,6 @@ public partial class GZipArchive
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return GZipReader.OpenReader(stream, ReaderOptions);
return GZipReader.OpenReader(stream);
}
}

View File

@@ -24,20 +24,10 @@ public class GZipArchiveEntry : GZipEntry, IArchiveEntry
return Parts.Single().GetCompressedStream().NotNull();
}
public async ValueTask<Stream> OpenEntryStreamAsync(
CancellationToken cancellationToken = default
)
public ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
{
// Reset the stream position if seekable
var part = (GZipFilePart)Parts.Single();
var rawStream = part.GetRawStream();
if (rawStream.CanSeek && rawStream.Position != part.EntryStartPosition)
{
rawStream.Position = part.EntryStartPosition;
}
return (
await Parts.Single().GetCompressedStreamAsync(cancellationToken).ConfigureAwait(false)
).NotNull();
// GZip synchronous implementation is fast enough, just wrap it
return new(OpenEntryStream());
}
#region IArchiveEntry Members

View File

@@ -1,6 +1,5 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Factories;
using SharpCompress.Readers;
@@ -33,13 +32,7 @@ public interface IArchiveFactory : IFactory
/// </summary>
/// <param name="stream">An open, readable and seekable stream.</param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>A <see cref="ValueTask{TResult}"/> containing the opened async archive.</returns>
ValueTask<IAsyncArchive> OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
IAsyncArchive OpenAsyncArchive(Stream stream, ReaderOptions? readerOptions = null);
/// <summary>
/// Constructor with a FileInfo object to an existing file.
@@ -54,10 +47,5 @@ public interface IArchiveFactory : IFactory
/// <param name="fileInfo">the file to open.</param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>A <see cref="ValueTask{TResult}"/> containing the opened async archive.</returns>
ValueTask<IAsyncArchive> OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
IAsyncArchive OpenAsyncArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null);
}

View File

@@ -1,7 +1,6 @@
#if NET8_0_OR_GREATER
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
@@ -19,22 +18,19 @@ public interface IArchiveOpenable<TSync, TASync>
public static abstract TSync OpenArchive(Stream stream, ReaderOptions? readerOptions = null);
public static abstract ValueTask<TASync> OpenAsyncArchive(
public static abstract TASync OpenAsyncArchive(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
);
public static abstract ValueTask<TASync> OpenAsyncArchive(
public static abstract TASync OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
);
public static abstract ValueTask<TASync> OpenAsyncArchive(
public static abstract TASync OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
);
}

View File

@@ -2,7 +2,6 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
@@ -21,16 +20,14 @@ public interface IMultiArchiveOpenable<TSync, TASync>
ReaderOptions? readerOptions = null
);
public static abstract ValueTask<TASync> OpenAsyncArchive(
public static abstract TASync OpenAsyncArchive(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
);
public static abstract ValueTask<TASync> OpenAsyncArchive(
public static abstract TASync OpenAsyncArchive(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
);
}
#endif

View File

@@ -1,4 +1,3 @@
using System.Threading.Tasks;
using SharpCompress.Common.Options;
#if NET8_0_OR_GREATER
@@ -9,6 +8,6 @@ public interface IWritableArchiveOpenable<TOptions>
where TOptions : IWriterOptions
{
public static abstract IWritableArchive<TOptions> CreateArchive();
public static abstract ValueTask<IWritableAsyncArchive<TOptions>> CreateAsyncArchive();
public static abstract IWritableAsyncArchive<TOptions> CreateAsyncArchive();
}
#endif

View File

@@ -20,18 +20,16 @@ public partial class RarArchive
IMultiArchiveOpenable<IRarArchive, IRarAsyncArchive>
#endif
{
public static ValueTask<IRarAsyncArchive> OpenAsyncArchive(
public static IRarAsyncArchive OpenAsyncArchive(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return new((IRarAsyncArchive)OpenArchive(new FileInfo(path), readerOptions));
return (IRarAsyncArchive)OpenArchive(new FileInfo(path), readerOptions);
}
public static IRarArchive OpenArchive(string filePath, ReaderOptions? readerOptions = null)
public static IRarArchive OpenArchive(string filePath, ReaderOptions? options = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
var fileInfo = new FileInfo(filePath);
@@ -39,24 +37,24 @@ public partial class RarArchive
new SourceStream(
fileInfo,
i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
options ?? new ReaderOptions()
)
);
}
public static IRarArchive OpenArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null)
public static IRarArchive OpenArchive(FileInfo fileInfo, ReaderOptions? options = null)
{
fileInfo.NotNull(nameof(fileInfo));
return new RarArchive(
new SourceStream(
fileInfo,
i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
options ?? new ReaderOptions()
)
);
}
public static IRarArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null)
public static IRarArchive OpenArchive(Stream stream, ReaderOptions? options = null)
{
stream.NotNull(nameof(stream));
@@ -65,9 +63,7 @@ public partial class RarArchive
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
return new RarArchive(
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
);
return new RarArchive(new SourceStream(stream, _ => null, options ?? new ReaderOptions()));
}
public static IRarArchive OpenArchive(
@@ -102,44 +98,36 @@ public partial class RarArchive
);
}
public static ValueTask<IRarAsyncArchive> OpenAsyncArchive(
public static IRarAsyncArchive OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IRarAsyncArchive)OpenArchive(stream, readerOptions));
return (IRarAsyncArchive)OpenArchive(stream, readerOptions);
}
public static ValueTask<IRarAsyncArchive> OpenAsyncArchive(
public static IRarAsyncArchive OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IRarAsyncArchive)OpenArchive(fileInfo, readerOptions));
return (IRarAsyncArchive)OpenArchive(fileInfo, readerOptions);
}
public static ValueTask<IRarAsyncArchive> OpenAsyncArchive(
public static IRarAsyncArchive OpenAsyncArchive(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IRarAsyncArchive)OpenArchive(streams, readerOptions));
return (IRarAsyncArchive)OpenArchive(streams, readerOptions);
}
public static ValueTask<IRarAsyncArchive> OpenAsyncArchive(
public static IRarAsyncArchive OpenAsyncArchive(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IRarAsyncArchive)OpenArchive(fileInfos, readerOptions));
return (IRarAsyncArchive)OpenArchive(fileInfos, readerOptions);
}
public static bool IsRarFile(string filePath) => IsRarFile(new FileInfo(filePath));

View File

@@ -1,7 +1,6 @@
using System;
using System.IO;
using System.Text.RegularExpressions;
using SharpCompress.Common;
namespace SharpCompress.Archives.Rar;
@@ -20,9 +19,7 @@ internal static class RarArchiveVolumeFactory
part1.DirectoryName!,
String.Concat(
m.Groups[1].Value,
(index + 1)
.ToString(Constants.DefaultCultureInfo)
.PadLeft(m.Groups[2].Value.Length, '0'),
(index + 1).ToString().PadLeft(m.Groups[2].Value.Length, '0'),
m.Groups[3].Value
)
)
@@ -42,15 +39,7 @@ internal static class RarArchiveVolumeFactory
index == 0
? m.Groups[2].Value + m.Groups[3].Value
: (char)(m.Groups[2].Value[0] + ((index - 1) / 100))
+ (index - 1)
.ToString(
"D4",
global::SharpCompress
.Common
.Constants
.DefaultCultureInfo
)
.Substring(2)
+ (index - 1).ToString("D4").Substring(2)
)
)
);

View File

@@ -16,17 +16,10 @@ public partial class SevenZipArchive
IMultiArchiveOpenable<IArchive, IAsyncArchive>
#endif
{
public static ValueTask<IAsyncArchive> OpenAsyncArchive(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
public static IAsyncArchive OpenAsyncArchive(string path, ReaderOptions? readerOptions = null)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty("path");
return new(
(IAsyncArchive)OpenArchive(new FileInfo(path), readerOptions ?? new ReaderOptions())
);
return (IAsyncArchive)OpenArchive(new FileInfo(path), readerOptions ?? new ReaderOptions());
}
public static IArchive OpenArchive(string filePath, ReaderOptions? readerOptions = null)
@@ -37,7 +30,7 @@ public partial class SevenZipArchive
public static IArchive OpenArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.NotNull(nameof(fileInfo));
fileInfo.NotNull("fileInfo");
return new SevenZipArchive(
new SourceStream(
fileInfo,
@@ -81,7 +74,7 @@ public partial class SevenZipArchive
public static IArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null)
{
stream.NotNull(nameof(stream));
stream.NotNull("stream");
if (stream is not { CanSeek: true })
{
@@ -93,44 +86,33 @@ public partial class SevenZipArchive
);
}
public static ValueTask<IAsyncArchive> OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
public static IAsyncArchive OpenAsyncArchive(Stream stream, ReaderOptions? readerOptions = null)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncArchive)OpenArchive(stream, readerOptions));
return (IAsyncArchive)OpenArchive(stream, readerOptions);
}
public static ValueTask<IAsyncArchive> OpenAsyncArchive(
public static IAsyncArchive OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncArchive)OpenArchive(fileInfo, readerOptions));
return (IAsyncArchive)OpenArchive(fileInfo, readerOptions);
}
public static ValueTask<IAsyncArchive> OpenAsyncArchive(
public static IAsyncArchive OpenAsyncArchive(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncArchive)OpenArchive(streams, readerOptions));
return (IAsyncArchive)OpenArchive(streams, readerOptions);
}
public static ValueTask<IAsyncArchive> OpenAsyncArchive(
public static IAsyncArchive OpenAsyncArchive(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncArchive)OpenArchive(fileInfos, readerOptions));
return (IAsyncArchive)OpenArchive(fileInfos, readerOptions);
}
public static bool IsSevenZipFile(string filePath) => IsSevenZipFile(new FileInfo(filePath));

View File

@@ -6,7 +6,7 @@ using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.SevenZip;
using SharpCompress.Compressors.LZMA.Utilities;
using SharpCompress.Compressors.LZMA.Utilites;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -164,12 +164,6 @@ public partial class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, Sev
var folder = entry.FilePart.Folder;
// If folder is null (empty stream entry), return empty stream
if (folder is null)
{
return CreateEntryStream(Stream.Null);
}
// Check if we're starting a new folder - dispose old folder stream if needed
if (folder != _currentFolder)
{

View File

@@ -25,7 +25,10 @@ public partial class TarArchive
CancellationToken cancellationToken = default
)
{
using var writer = new TarWriter(stream, options);
using var writer = new TarWriter(
stream,
options as TarWriterOptions ?? new TarWriterOptions(options)
);
await foreach (
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
)
@@ -86,32 +89,26 @@ public partial class TarArchive
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return new((IAsyncReader)new TarReader(stream, ReaderOptions, _compressionType));
return new((IAsyncReader)TarReader.OpenReader(stream));
}
protected override async IAsyncEnumerable<TarArchiveEntry> LoadEntriesAsync(
IAsyncEnumerable<TarVolume> volumes
)
{
var sourceStream = (await volumes.SingleAsync().ConfigureAwait(false)).Stream;
var stream = await GetStreamAsync(sourceStream).ConfigureAwait(false);
var stream = (await volumes.SingleAsync().ConfigureAwait(false)).Stream;
if (stream.CanSeek)
{
stream.Position = 0;
}
var streamingMode =
_compressionType == CompressionType.None
? StreamingMode.Seekable
: StreamingMode.Streaming;
// Always use async header reading in LoadEntriesAsync for consistency
{
// Use async header reading for async-only streams
TarHeader? previousHeader = null;
await foreach (
var header in TarHeaderFactory.ReadHeaderAsync(
streamingMode,
StreamingMode.Seekable,
stream,
ReaderOptions.ArchiveEncoding
)
@@ -129,10 +126,7 @@ public partial class TarArchive
{
var entry = new TarArchiveEntry(
this,
new TarFilePart(
previousHeader,
_compressionType == CompressionType.None ? stream : null
),
new TarFilePart(previousHeader, stream),
CompressionType.None,
ReaderOptions
);
@@ -157,10 +151,7 @@ public partial class TarArchive
}
yield return new TarArchiveEntry(
this,
new TarFilePart(
header,
_compressionType == CompressionType.None ? stream : null
),
new TarFilePart(header, stream),
CompressionType.None,
ReaderOptions
);

View File

@@ -7,7 +7,6 @@ using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Factories;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Writers.Tar;
@@ -38,9 +37,12 @@ public partial class TarArchive
)
{
fileInfo.NotNull(nameof(fileInfo));
return OpenArchive(
[fileInfo],
readerOptions ?? new ReaderOptions() { LeaveStreamOpen = false }
return new TarArchive(
new SourceStream(
fileInfo,
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions() { LeaveStreamOpen = false }
)
);
}
@@ -51,17 +53,13 @@ public partial class TarArchive
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
var sourceStream = new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions() { LeaveStreamOpen = false }
return new TarArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions() { LeaveStreamOpen = false }
)
);
var compressionType = TarFactory.GetCompressionType(
sourceStream,
sourceStream.ReaderOptions.Providers
);
sourceStream.Seek(0, SeekOrigin.Begin);
return new TarArchive(sourceStream, compressionType);
}
public static IWritableArchive<TarWriterOptions> OpenArchive(
@@ -71,17 +69,13 @@ public partial class TarArchive
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
var sourceStream = new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
return new TarArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
var compressionType = TarFactory.GetCompressionType(
sourceStream,
sourceStream.ReaderOptions.Providers
);
sourceStream.Seek(0, SeekOrigin.Begin);
return new TarArchive(sourceStream, compressionType);
}
public static IWritableArchive<TarWriterOptions> OpenArchive(
@@ -96,113 +90,35 @@ public partial class TarArchive
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
return OpenArchive([stream], readerOptions);
return new TarArchive(
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
);
}
public static async ValueTask<IWritableAsyncArchive<TarWriterOptions>> OpenAsyncArchive(
public static IWritableAsyncArchive<TarWriterOptions> OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
stream.NotNull(nameof(stream));
var sourceStream = new SourceStream(
stream,
i => null,
readerOptions ?? new ReaderOptions()
);
var compressionType = await TarFactory
.GetCompressionTypeAsync(
sourceStream,
sourceStream.ReaderOptions.Providers,
cancellationToken
)
.ConfigureAwait(false);
sourceStream.Seek(0, SeekOrigin.Begin);
return new TarArchive(sourceStream, compressionType);
}
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<TarWriterOptions>)OpenArchive(stream, readerOptions);
public static ValueTask<IWritableAsyncArchive<TarWriterOptions>> OpenAsyncArchive(
public static IWritableAsyncArchive<TarWriterOptions> OpenAsyncArchive(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return OpenAsyncArchive(new FileInfo(path), readerOptions, cancellationToken);
}
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<TarWriterOptions>)OpenArchive(new FileInfo(path), readerOptions);
public static async ValueTask<IWritableAsyncArchive<TarWriterOptions>> OpenAsyncArchive(
public static IWritableAsyncArchive<TarWriterOptions> OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
fileInfo.NotNull(nameof(fileInfo));
readerOptions ??= new ReaderOptions() { LeaveStreamOpen = false };
var sourceStream = new SourceStream(fileInfo, i => null, readerOptions);
var compressionType = await TarFactory
.GetCompressionTypeAsync(
sourceStream,
sourceStream.ReaderOptions.Providers,
cancellationToken
)
.ConfigureAwait(false);
sourceStream.Seek(0, SeekOrigin.Begin);
return new TarArchive(sourceStream, compressionType);
}
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<TarWriterOptions>)OpenArchive(fileInfo, readerOptions);
public static async ValueTask<IWritableAsyncArchive<TarWriterOptions>> OpenAsyncArchive(
public static IWritableAsyncArchive<TarWriterOptions> OpenAsyncArchive(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
var sourceStream = new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
);
var compressionType = await TarFactory
.GetCompressionTypeAsync(
sourceStream,
sourceStream.ReaderOptions.Providers,
cancellationToken
)
.ConfigureAwait(false);
sourceStream.Seek(0, SeekOrigin.Begin);
return new TarArchive(sourceStream, compressionType);
}
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<TarWriterOptions>)OpenArchive(streams, readerOptions);
public static async ValueTask<IWritableAsyncArchive<TarWriterOptions>> OpenAsyncArchive(
public static IWritableAsyncArchive<TarWriterOptions> OpenAsyncArchive(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
var sourceStream = new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions() { LeaveStreamOpen = false }
);
var compressionType = await TarFactory
.GetCompressionTypeAsync(
sourceStream,
sourceStream.ReaderOptions.Providers,
cancellationToken
)
.ConfigureAwait(false);
sourceStream.Seek(0, SeekOrigin.Begin);
return new TarArchive(sourceStream, compressionType);
}
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<TarWriterOptions>)OpenArchive(fileInfos, readerOptions);
public static bool IsTarFile(string filePath) => IsTarFile(new FileInfo(filePath));
@@ -226,7 +142,7 @@ public partial class TarArchive
var isEmptyArchive =
tarHeader.Name?.Length == 0
&& tarHeader.Size == 0
&& IsDefined(tarHeader.EntryType);
&& Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
return readSucceeded || isEmptyArchive;
}
catch (Exception)
@@ -254,7 +170,7 @@ public partial class TarArchive
var isEmptyArchive =
tarHeader.Name?.Length == 0
&& tarHeader.Size == 0
&& IsDefined(tarHeader.EntryType);
&& Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
return readSucceeded || isEmptyArchive;
}
catch (Exception)
@@ -267,15 +183,5 @@ public partial class TarArchive
public static IWritableArchive<TarWriterOptions> CreateArchive() => new TarArchive();
public static ValueTask<IWritableAsyncArchive<TarWriterOptions>> CreateAsyncArchive() =>
new(new TarArchive());
private static bool IsDefined(EntryType value)
{
#if LEGACY_DOTNET
return Enum.IsDefined(typeof(EntryType), value);
#else
return Enum.IsDefined(value);
#endif
}
public static IWritableAsyncArchive<TarWriterOptions> CreateAsyncArchive() => new TarArchive();
}

View File

@@ -5,113 +5,35 @@ using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.Common.Tar;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
using SharpCompress.Providers;
using SharpCompress.Readers;
using SharpCompress.Readers.Tar;
using SharpCompress.Writers;
using SharpCompress.Writers.Tar;
using Constants = SharpCompress.Common.Constants;
namespace SharpCompress.Archives.Tar;
public partial class TarArchive
: AbstractWritableArchive<TarArchiveEntry, TarVolume, TarWriterOptions>
{
private readonly CompressionType _compressionType;
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream sourceStream)
{
sourceStream.NotNull("SourceStream is null").LoadAllParts();
return new TarVolume(sourceStream, ReaderOptions, 1).AsEnumerable();
}
internal TarArchive(SourceStream sourceStream, CompressionType compressionType)
: base(ArchiveType.Tar, sourceStream)
{
_compressionType = compressionType;
}
private TarArchive(SourceStream sourceStream)
: base(ArchiveType.Tar, sourceStream) { }
private TarArchive()
: base(ArchiveType.Tar) { }
private Stream GetStream(Stream stream) =>
_compressionType switch
{
CompressionType.BZip2 => ReaderOptions.Providers.CreateDecompressStream(
CompressionType.BZip2,
stream
),
CompressionType.GZip => ReaderOptions.Providers.CreateDecompressStream(
CompressionType.GZip,
stream,
CompressionContext.FromStream(stream).WithReaderOptions(ReaderOptions)
),
CompressionType.ZStandard => ReaderOptions.Providers.CreateDecompressStream(
CompressionType.ZStandard,
stream
),
CompressionType.LZip => ReaderOptions.Providers.CreateDecompressStream(
CompressionType.LZip,
stream
),
CompressionType.Xz => ReaderOptions.Providers.CreateDecompressStream(
CompressionType.Xz,
stream
),
CompressionType.Lzw => ReaderOptions.Providers.CreateDecompressStream(
CompressionType.Lzw,
stream
),
CompressionType.None => stream,
_ => throw new NotSupportedException("Invalid compression type: " + _compressionType),
};
private ValueTask<Stream> GetStreamAsync(
Stream stream,
CancellationToken cancellationToken = default
) =>
_compressionType switch
{
CompressionType.BZip2 => ReaderOptions.Providers.CreateDecompressStreamAsync(
CompressionType.BZip2,
stream,
cancellationToken
),
CompressionType.GZip => ReaderOptions.Providers.CreateDecompressStreamAsync(
CompressionType.GZip,
stream,
CompressionContext.FromStream(stream).WithReaderOptions(ReaderOptions),
cancellationToken
),
CompressionType.ZStandard => ReaderOptions.Providers.CreateDecompressStreamAsync(
CompressionType.ZStandard,
stream,
cancellationToken
),
CompressionType.LZip => ReaderOptions.Providers.CreateDecompressStreamAsync(
CompressionType.LZip,
stream,
cancellationToken
),
CompressionType.Xz => ReaderOptions.Providers.CreateDecompressStreamAsync(
CompressionType.Xz,
stream,
cancellationToken
),
CompressionType.Lzw => ReaderOptions.Providers.CreateDecompressStreamAsync(
CompressionType.Lzw,
stream,
cancellationToken
),
CompressionType.None => new ValueTask<Stream>(stream),
_ => throw new NotSupportedException("Invalid compression type: " + _compressionType),
};
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
{
var stream = GetStream(volumes.Single().Stream);
var stream = volumes.Single().Stream;
if (stream.CanSeek)
{
stream.Position = 0;
@@ -119,9 +41,7 @@ public partial class TarArchive
TarHeader? previousHeader = null;
foreach (
var header in TarHeaderFactory.ReadHeader(
_compressionType == CompressionType.None
? StreamingMode.Seekable
: StreamingMode.Streaming,
StreamingMode.Seekable,
stream,
ReaderOptions.ArchiveEncoding
)
@@ -139,10 +59,7 @@ public partial class TarArchive
{
var entry = new TarArchiveEntry(
this,
new TarFilePart(
previousHeader,
_compressionType == CompressionType.None ? stream : null
),
new TarFilePart(previousHeader, stream),
CompressionType.None,
ReaderOptions
);
@@ -165,10 +82,7 @@ public partial class TarArchive
}
yield return new TarArchiveEntry(
this,
new TarFilePart(
header,
_compressionType == CompressionType.None ? stream : null
),
new TarFilePart(header, stream),
CompressionType.None,
ReaderOptions
);
@@ -182,7 +96,7 @@ public partial class TarArchive
}
protected override TarArchiveEntry CreateEntryInternal(
string key,
string filePath,
Stream source,
long size,
DateTime? modified,
@@ -192,14 +106,16 @@ public partial class TarArchive
this,
source,
CompressionType.Unknown,
key,
filePath,
size,
modified,
closeStream
);
protected override TarArchiveEntry CreateDirectoryEntry(string key, DateTime? modified) =>
new TarWritableArchiveEntry(this, key, modified);
protected override TarArchiveEntry CreateDirectoryEntry(
string directoryPath,
DateTime? modified
) => new TarWritableArchiveEntry(this, directoryPath, modified);
protected override void SaveTo(
Stream stream,
@@ -208,7 +124,10 @@ public partial class TarArchive
IEnumerable<TarArchiveEntry> newEntries
)
{
using var writer = new TarWriter(stream, options);
using var writer = new TarWriter(
stream,
options as TarWriterOptions ?? new TarWriterOptions(options)
);
foreach (var entry in oldEntries.Concat(newEntries))
{
if (entry.IsDirectory)
@@ -235,6 +154,6 @@ public partial class TarArchive
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return new TarReader(stream, ReaderOptions, _compressionType);
return TarReader.OpenReader(stream);
}
}

View File

@@ -55,12 +55,7 @@ public partial class ZipArchive
yield return new ZipArchiveEntry(
this,
new SeekableZipFilePart(
headerFactory.NotNull(),
deh,
s,
ReaderOptions.Providers
),
new SeekableZipFilePart(headerFactory.NotNull(), deh, s),
ReaderOptions
);
}
@@ -84,7 +79,10 @@ public partial class ZipArchive
CancellationToken cancellationToken = default
)
{
using var writer = new ZipWriter(stream, options);
using var writer = new ZipWriter(
stream,
options as ZipWriterOptions ?? new ZipWriterOptions(options)
);
await foreach (
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
)

View File

@@ -95,55 +95,30 @@ public partial class ZipArchive
);
}
public static ValueTask<IWritableAsyncArchive<ZipWriterOptions>> OpenAsyncArchive(
public static IWritableAsyncArchive<ZipWriterOptions> OpenAsyncArchive(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(path, readerOptions));
}
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(path, readerOptions);
public static ValueTask<IWritableAsyncArchive<ZipWriterOptions>> OpenAsyncArchive(
public static IWritableAsyncArchive<ZipWriterOptions> OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(stream, readerOptions));
}
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(stream, readerOptions);
public static ValueTask<IWritableAsyncArchive<ZipWriterOptions>> OpenAsyncArchive(
public static IWritableAsyncArchive<ZipWriterOptions> OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(fileInfo, readerOptions));
}
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(fileInfo, readerOptions);
public static ValueTask<IWritableAsyncArchive<ZipWriterOptions>> OpenAsyncArchive(
public static IWritableAsyncArchive<ZipWriterOptions> OpenAsyncArchive(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(streams, readerOptions));
}
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(streams, readerOptions);
public static ValueTask<IWritableAsyncArchive<ZipWriterOptions>> OpenAsyncArchive(
public static IWritableAsyncArchive<ZipWriterOptions> OpenAsyncArchive(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(fileInfos, readerOptions));
}
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(fileInfos, readerOptions);
public static bool IsZipFile(string filePath, string? password = null) =>
IsZipFile(new FileInfo(filePath), password);
@@ -170,7 +145,7 @@ public partial class ZipArchive
{
return false;
}
return IsDefined(header.ZipHeaderType);
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
@@ -203,7 +178,7 @@ public partial class ZipArchive
return false;
}
}
return IsDefined(header.ZipHeaderType);
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
@@ -234,7 +209,7 @@ public partial class ZipArchive
{
return false;
}
return IsDefined(header.ZipHeaderType);
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
@@ -248,8 +223,7 @@ public partial class ZipArchive
public static IWritableArchive<ZipWriterOptions> CreateArchive() => new ZipArchive();
public static ValueTask<IWritableAsyncArchive<ZipWriterOptions>> CreateAsyncArchive() =>
new(new ZipArchive());
public static IWritableAsyncArchive<ZipWriterOptions> CreateAsyncArchive() => new ZipArchive();
public static async ValueTask<bool> IsZipMultiAsync(
Stream stream,
@@ -261,11 +235,9 @@ public partial class ZipArchive
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
var header = await headerFactory
.ReadStreamHeaderAsync(stream)
.Where(x => x.ZipHeaderType != ZipHeaderType.Split)
.FirstOrDefaultAsync(cancellationToken)
.ConfigureAwait(false);
var header = headerFactory
.ReadStreamHeader(stream)
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
if (stream.CanSeek)
@@ -288,7 +260,7 @@ public partial class ZipArchive
return false;
}
}
return IsDefined(header.ZipHeaderType);
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
@@ -299,13 +271,4 @@ public partial class ZipArchive
return false;
}
}
private static bool IsDefined(ZipHeaderType value)
{
#if LEGACY_DOTNET
return Enum.IsDefined(typeof(ZipHeaderType), value);
#else
return Enum.IsDefined(value);
#endif
}
}

View File

@@ -34,14 +34,14 @@ public partial class ZipArchive
internal ZipArchive()
: base(ArchiveType.Zip) { }
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream sourceStream)
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream stream)
{
sourceStream.LoadAllParts();
stream.LoadAllParts();
//stream.Position = 0;
var streams = sourceStream.Streams.ToList();
var streams = stream.Streams.ToList();
var idx = 0;
if (streams.Count > 1)
if (streams.Count() > 1)
{
//check if second stream is zip header without changing position
var headerProbeStream = streams[1];
@@ -51,7 +51,7 @@ public partial class ZipArchive
headerProbeStream.Position = startPosition;
if (isZip)
{
sourceStream.IsVolumes = true;
stream.IsVolumes = true;
var tmp = streams[0];
streams.RemoveAt(0);
@@ -61,7 +61,7 @@ public partial class ZipArchive
}
}
return new ZipVolume(sourceStream, ReaderOptions, idx++).AsEnumerable();
return new ZipVolume(stream, ReaderOptions, idx++).AsEnumerable();
}
protected override IEnumerable<ZipArchiveEntry> LoadEntries(IEnumerable<ZipVolume> volumes)
@@ -96,12 +96,7 @@ public partial class ZipArchive
yield return new ZipArchiveEntry(
this,
new SeekableZipFilePart(
headerFactory.NotNull(),
deh,
s,
ReaderOptions.Providers
),
new SeekableZipFilePart(headerFactory.NotNull(), deh, s),
ReaderOptions
);
}
@@ -127,7 +122,10 @@ public partial class ZipArchive
IEnumerable<ZipArchiveEntry> newEntries
)
{
using var writer = new ZipWriter(stream, options);
using var writer = new ZipWriter(
stream,
options as ZipWriterOptions ?? new ZipWriterOptions(options)
);
foreach (var entry in oldEntries.Concat(newEntries))
{
if (entry.IsDirectory)
@@ -150,15 +148,17 @@ public partial class ZipArchive
}
protected override ZipArchiveEntry CreateEntryInternal(
string key,
string filePath,
Stream source,
long size,
DateTime? modified,
bool closeStream
) => new ZipWritableArchiveEntry(this, source, key, size, modified, closeStream);
) => new ZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
protected override ZipArchiveEntry CreateDirectoryEntry(string key, DateTime? modified) =>
new ZipWritableArchiveEntry(this, key, modified);
protected override ZipArchiveEntry CreateDirectoryEntry(
string directoryPath,
DateTime? modified
) => new ZipWritableArchiveEntry(this, directoryPath, modified);
protected override IReader CreateReaderForSolidExtraction()
{
@@ -171,6 +171,6 @@ public partial class ZipArchive
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return new((IAsyncReader)ZipReader.OpenReader(stream, ReaderOptions, Entries));
return new((IAsyncReader)ZipReader.OpenReader(stream));
}
}

View File

@@ -1,7 +1,6 @@
using System;
using System.IO;
using System.Text.RegularExpressions;
using SharpCompress.Common;
namespace SharpCompress.Archives.Zip;
@@ -22,7 +21,7 @@ internal static class ZipArchiveVolumeFactory
String.Concat(
m.Groups[1].Value,
Regex.Replace(m.Groups[2].Value, @"[^xz]", ""),
index.ToString(Constants.DefaultCultureInfo).PadLeft(2, '0')
index.ToString().PadLeft(2, '0')
)
)
);

View File

@@ -14,11 +14,11 @@ public sealed partial class AceFileHeader
/// Supports both ACE 1.0 and ACE 2.0 formats.
/// </summary>
public override async ValueTask<AceHeader?> ReadAsync(
Stream reader,
Stream stream,
CancellationToken cancellationToken = default
)
{
var headerData = await ReadHeaderAsync(reader, cancellationToken).ConfigureAwait(false);
var headerData = await ReadHeaderAsync(stream, cancellationToken).ConfigureAwait(false);
if (headerData.Length == 0)
{
return null;
@@ -104,7 +104,7 @@ public sealed partial class AceFileHeader
}
// Store the data start position
DataStartPosition = reader.Position;
DataStartPosition = stream.Position;
return this;
}

View File

@@ -56,9 +56,9 @@ public sealed partial class AceFileHeader : AceHeader
/// Returns null if no more entries or end of archive.
/// Supports both ACE 1.0 and ACE 2.0 formats.
/// </summary>
public override AceHeader? Read(Stream reader)
public override AceHeader? Read(Stream stream)
{
var headerData = ReadHeader(reader);
var headerData = ReadHeader(stream);
if (headerData.Length == 0)
{
return null;
@@ -144,7 +144,7 @@ public sealed partial class AceFileHeader : AceHeader
}
// Store the data start position
DataStartPosition = reader.Position;
DataStartPosition = stream.Position;
return this;
}

View File

@@ -48,7 +48,7 @@ public abstract partial class AceHeader
var checksum = AceCrc.AceCrc16(body);
if (checksum != HeaderCrc)
{
throw new InvalidFormatException("Header checksum is invalid");
throw new InvalidDataException("Header checksum is invalid");
}
return body;
}

View File

@@ -89,7 +89,7 @@ public abstract partial class AceHeader
var checksum = AceCrc.AceCrc16(body);
if (checksum != HeaderCrc)
{
throw new InvalidFormatException("Header checksum is invalid");
throw new InvalidDataException("Header checksum is invalid");
}
return body;
}

View File

@@ -15,11 +15,11 @@ public sealed partial class AceMainHeader
/// Supports both ACE 1.0 and ACE 2.0 formats.
/// </summary>
public override async ValueTask<AceHeader?> ReadAsync(
Stream reader,
Stream stream,
CancellationToken cancellationToken = default
)
{
var headerData = await ReadHeaderAsync(reader, cancellationToken).ConfigureAwait(false);
var headerData = await ReadHeaderAsync(stream, cancellationToken).ConfigureAwait(false);
if (headerData.Length == 0)
{
return null;
@@ -39,7 +39,7 @@ public sealed partial class AceMainHeader
// Skip signature "**ACE**" (7 bytes)
if (!CheckMagicBytes(headerData, offset))
{
throw new InvalidFormatException("Invalid ACE archive signature.");
throw new InvalidDataException("Invalid ACE archive signature.");
}
offset += 7;

View File

@@ -32,9 +32,9 @@ public sealed partial class AceMainHeader : AceHeader
/// Returns header if this is a valid ACE archive.
/// Supports both ACE 1.0 and ACE 2.0 formats.
/// </summary>
public override AceHeader? Read(Stream reader)
public override AceHeader? Read(Stream stream)
{
var headerData = ReadHeader(reader);
var headerData = ReadHeader(stream);
if (headerData.Length == 0)
{
return null;
@@ -54,7 +54,7 @@ public sealed partial class AceMainHeader : AceHeader
// Skip signature "**ACE**" (7 bytes)
if (!CheckMagicBytes(headerData, offset))
{
throw new InvalidFormatException("Invalid ACE archive signature.");
throw new InvalidDataException("Invalid ACE archive signature.");
}
offset += 7;

View File

@@ -2,7 +2,6 @@ using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.ArcLzw;
using SharpCompress.Compressors.Lzw;
using SharpCompress.Compressors.RLE90;
using SharpCompress.Compressors.Squeezed;

View File

@@ -8,7 +8,6 @@ using SharpCompress.Common.GZip;
using SharpCompress.Common.Tar;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors.ArcLzw;
using SharpCompress.Compressors.Lzw;
using SharpCompress.Compressors.RLE90;
using SharpCompress.Compressors.Squeezed;

View File

@@ -26,34 +26,43 @@ public class ArjFilePart : FilePart
internal override Stream GetCompressedStream()
{
Stream compressedStream;
switch (Header.CompressionMethod)
if (_stream != null)
{
case CompressionMethod.Stored:
compressedStream = new ReadOnlySubStream(
_stream,
Header.DataStartPosition,
Header.CompressedSize
);
break;
case CompressionMethod.CompressedMost:
case CompressionMethod.Compressed:
case CompressionMethod.CompressedFaster:
if (Header.OriginalSize > 128 * 1024)
{
throw new NotSupportedException(
"CompressionMethod: " + Header.CompressionMethod + " with size > 128KB"
Stream compressedStream;
switch (Header.CompressionMethod)
{
case CompressionMethod.Stored:
compressedStream = new ReadOnlySubStream(
_stream,
Header.DataStartPosition,
Header.CompressedSize
);
}
compressedStream = new LhaStream<Lh7DecoderCfg>(_stream, (int)Header.OriginalSize);
break;
case CompressionMethod.CompressedFastest:
compressedStream = new LHDecoderStream(_stream, (int)Header.OriginalSize);
break;
default:
throw new NotSupportedException("CompressionMethod: " + Header.CompressionMethod);
break;
case CompressionMethod.CompressedMost:
case CompressionMethod.Compressed:
case CompressionMethod.CompressedFaster:
if (Header.OriginalSize > 128 * 1024)
{
throw new NotSupportedException(
"CompressionMethod: " + Header.CompressionMethod + " with size > 128KB"
);
}
compressedStream = new LhaStream<Lh7DecoderCfg>(
_stream,
(int)Header.OriginalSize
);
break;
case CompressionMethod.CompressedFastest:
compressedStream = new LHDecoderStream(_stream, (int)Header.OriginalSize);
break;
default:
throw new NotSupportedException(
"CompressionMethod: " + Header.CompressionMethod
);
}
return compressedStream;
}
return compressedStream;
return _stream.NotNull();
}
internal override Stream GetRawStream() => _stream;

View File

@@ -28,7 +28,7 @@ public abstract partial class ArjHeader
if (!CheckMagicBytes(magic))
{
throw new InvalidFormatException("Not an ARJ file (wrong magic bytes)");
throw new InvalidDataException("Not an ARJ file (wrong magic bytes)");
}
// read header_size
@@ -55,7 +55,7 @@ public abstract partial class ArjHeader
// Compute the hash value
if (checksum != BitConverter.ToUInt32(crc, 0))
{
throw new InvalidFormatException("Header checksum is invalid");
throw new InvalidDataException("Header checksum is invalid");
}
return body;
}
@@ -75,7 +75,7 @@ public abstract partial class ArjHeader
.ConfigureAwait(false);
if (bytesRead < 2)
{
throw new IncompleteArchiveException(
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header size."
);
}
@@ -92,7 +92,7 @@ public abstract partial class ArjHeader
.ConfigureAwait(false);
if (bytesRead < extHeaderSize)
{
throw new IncompleteArchiveException(
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header data."
);
}
@@ -103,7 +103,7 @@ public abstract partial class ArjHeader
.ConfigureAwait(false);
if (bytesRead < 4)
{
throw new IncompleteArchiveException(
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header CRC."
);
}
@@ -111,7 +111,7 @@ public abstract partial class ArjHeader
var checksum = Crc32Stream.Compute(header);
if (checksum != BitConverter.ToUInt32(crcextended, 0))
{
throw new InvalidFormatException("Extended header checksum is invalid");
throw new InvalidDataException("Extended header checksum is invalid");
}
extendedHeader.Add(header);

View File

@@ -18,6 +18,7 @@ public enum ArjHeaderType
public abstract partial class ArjHeader
{
private const int FIRST_HDR_SIZE = 34;
private const ushort ARJ_MAGIC = 0xEA60;
public ArjHeader(ArjHeaderType type)
@@ -44,7 +45,7 @@ public abstract partial class ArjHeader
if (!CheckMagicBytes(magic))
{
throw new InvalidFormatException("Not an ARJ file (wrong magic bytes)");
throw new InvalidDataException("Not an ARJ file (wrong magic bytes)");
}
// read header_size
@@ -69,7 +70,7 @@ public abstract partial class ArjHeader
// Compute the hash value
if (checksum != BitConverter.ToUInt32(crc, 0))
{
throw new InvalidFormatException("Header checksum is invalid");
throw new InvalidDataException("Header checksum is invalid");
}
return body;
}
@@ -86,7 +87,7 @@ public abstract partial class ArjHeader
int bytesRead = reader.Read(buffer, 0, 2);
if (bytesRead < 2)
{
throw new IncompleteArchiveException(
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header size."
);
}
@@ -101,7 +102,7 @@ public abstract partial class ArjHeader
bytesRead = reader.Read(header, 0, extHeaderSize);
if (bytesRead < extHeaderSize)
{
throw new IncompleteArchiveException(
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header data."
);
}
@@ -110,7 +111,7 @@ public abstract partial class ArjHeader
bytesRead = reader.Read(crc, 0, 4);
if (bytesRead < 4)
{
throw new IncompleteArchiveException(
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header CRC."
);
}
@@ -118,7 +119,7 @@ public abstract partial class ArjHeader
var checksum = Crc32Stream.Compute(header);
if (checksum != BitConverter.ToUInt32(crc, 0))
{
throw new InvalidFormatException("Extended header checksum is invalid");
throw new InvalidDataException("Extended header checksum is invalid");
}
extendedHeader.Add(header);
@@ -137,11 +138,7 @@ public abstract partial class ArjHeader
public static FileType FileTypeFromByte(byte value)
{
#if LEGACY_DOTNET
return Enum.IsDefined(typeof(FileType), value) ? (FileType)value : Headers.FileType.Unknown;
#else
return Enum.IsDefined((FileType)value) ? (FileType)value : Headers.FileType.Unknown;
#endif
}
public static bool IsArchive(Stream stream)

View File

@@ -7,16 +7,16 @@ namespace SharpCompress.Common.Arj.Headers;
public partial class ArjLocalHeader
{
public override async ValueTask<ArjHeader?> ReadAsync(
Stream reader,
Stream stream,
CancellationToken cancellationToken = default
)
{
var body = await ReadHeaderAsync(reader, cancellationToken).ConfigureAwait(false);
var body = await ReadHeaderAsync(stream, cancellationToken).ConfigureAwait(false);
if (body.Length > 0)
{
await ReadExtendedHeadersAsync(reader, cancellationToken).ConfigureAwait(false);
await ReadExtendedHeadersAsync(stream, cancellationToken).ConfigureAwait(false);
var header = LoadFrom(body);
header.DataStartPosition = reader.Position;
header.DataStartPosition = stream.Position;
return header;
}
return null;

View File

@@ -43,14 +43,14 @@ public partial class ArjLocalHeader : ArjHeader
archiveEncoding ?? throw new ArgumentNullException(nameof(archiveEncoding));
}
public override ArjHeader? Read(Stream reader)
public override ArjHeader? Read(Stream stream)
{
var body = ReadHeader(reader);
var body = ReadHeader(stream);
if (body.Length > 0)
{
ReadExtendedHeaders(reader);
ReadExtendedHeaders(stream);
var header = LoadFrom(body);
header.DataStartPosition = reader.Position;
header.DataStartPosition = stream.Position;
return header;
}
return null;
@@ -66,7 +66,7 @@ public partial class ArjLocalHeader : ArjHeader
{
if (offset + 1 >= headerBytes.Length)
{
throw new IncompleteArchiveException("Unexpected end of stream.");
throw new EndOfStreamException();
}
var v = headerBytes[offset] & 0xFF | (headerBytes[offset + 1] & 0xFF) << 8;
offset += 2;
@@ -76,7 +76,7 @@ public partial class ArjLocalHeader : ArjHeader
{
if (offset + 3 >= headerBytes.Length)
{
throw new IncompleteArchiveException("Unexpected end of stream.");
throw new EndOfStreamException();
}
long v =
headerBytes[offset] & 0xFF

View File

@@ -7,12 +7,12 @@ namespace SharpCompress.Common.Arj.Headers;
public partial class ArjMainHeader
{
public override async ValueTask<ArjHeader?> ReadAsync(
Stream reader,
Stream stream,
CancellationToken cancellationToken = default
)
{
var body = await ReadHeaderAsync(reader, cancellationToken).ConfigureAwait(false);
await ReadExtendedHeadersAsync(reader, cancellationToken).ConfigureAwait(false);
var body = await ReadHeaderAsync(stream, cancellationToken).ConfigureAwait(false);
await ReadExtendedHeadersAsync(stream, cancellationToken).ConfigureAwait(false);
return LoadFrom(body);
}
}

View File

@@ -10,6 +10,9 @@ namespace SharpCompress.Common.Arj.Headers;
public partial class ArjMainHeader : ArjHeader
{
private const int FIRST_HDR_SIZE = 34;
private const ushort ARJ_MAGIC = 0xEA60;
public ArchiveEncoding ArchiveEncoding { get; }
public int ArchiverVersionNumber { get; private set; }
@@ -37,10 +40,10 @@ public partial class ArjMainHeader : ArjHeader
archiveEncoding ?? throw new ArgumentNullException(nameof(archiveEncoding));
}
public override ArjHeader? Read(Stream reader)
public override ArjHeader? Read(Stream stream)
{
var body = ReadHeader(reader);
ReadExtendedHeaders(reader);
var body = ReadHeader(stream);
ReadExtendedHeaders(stream);
return LoadFrom(body);
}
@@ -54,7 +57,7 @@ public partial class ArjMainHeader : ArjHeader
{
if (offset >= headerBytes.Length)
{
throw new IncompleteArchiveException("Unexpected end of stream.");
throw new EndOfStreamException();
}
return (byte)(headerBytes[offset++] & 0xFF);
}
@@ -63,7 +66,7 @@ public partial class ArjMainHeader : ArjHeader
{
if (offset + 1 >= headerBytes.Length)
{
throw new IncompleteArchiveException("Unexpected end of stream.");
throw new EndOfStreamException();
}
var v = headerBytes[offset] & 0xFF | (headerBytes[offset + 1] & 0xFF) << 8;
offset += 2;
@@ -74,7 +77,7 @@ public partial class ArjMainHeader : ArjHeader
{
if (offset + 3 >= headerBytes.Length)
{
throw new IncompleteArchiveException("Unexpected end of stream.");
throw new EndOfStreamException();
}
long v =
headerBytes[offset] & 0xFF

View File

@@ -32,6 +32,5 @@ public class DosDateTime
}
}
public override string ToString() =>
DateTime.ToString("yyyy-MM-dd HH:mm:ss", Constants.DefaultCultureInfo);
public override string ToString() => DateTime.ToString("yyyy-MM-dd HH:mm:ss");
}

View File

@@ -1,5 +1,3 @@
using System.Globalization;
namespace SharpCompress.Common;
public static class Constants
@@ -40,6 +38,4 @@ public static class Constants
/// </para>
/// </remarks>
public static int RewindableBufferSize { get; set; } = 81920;
public static CultureInfo DefaultCultureInfo { get; set; } = CultureInfo.InvariantCulture;
}

View File

@@ -41,11 +41,7 @@ public partial class EntryStream : Stream
{
if (Utility.UseSyncOverAsyncDispose())
{
#pragma warning disable VSTHRD002 // Avoid problematic synchronous waits
#pragma warning disable CA2012
SkipEntryAsync().GetAwaiter().GetResult();
#pragma warning restore CA2012
#pragma warning restore VSTHRD002 // Avoid problematic synchronous waits
}
else
{

View File

@@ -46,11 +46,7 @@ internal static class FlagUtility
/// <param name="flag">Flag to test</param>
/// <returns></returns>
public static bool HasFlag<T>(T bitField, T flag)
where T : struct =>
HasFlag(
Convert.ToInt64(bitField, Constants.DefaultCultureInfo),
Convert.ToInt64(flag, Constants.DefaultCultureInfo)
);
where T : struct => HasFlag(Convert.ToInt64(bitField), Convert.ToInt64(flag));
/// <summary>
/// Returns true if the flag is set on the specified bit field.
@@ -86,10 +82,5 @@ internal static class FlagUtility
/// <param name="on">bool</param>
/// <returns>The flagged variable with the flag changed</returns>
public static long SetFlag<T>(T bitField, T flag, bool on)
where T : struct =>
SetFlag(
Convert.ToInt64(bitField, Constants.DefaultCultureInfo),
Convert.ToInt64(flag, Constants.DefaultCultureInfo),
on
);
where T : struct => SetFlag(Convert.ToInt64(bitField), Convert.ToInt64(flag), on);
}

View File

@@ -12,9 +12,7 @@ public partial class GZipEntry
)
{
yield return new GZipEntry(
await GZipFilePart
.CreateAsync(stream, options.ArchiveEncoding, options.Providers)
.ConfigureAwait(false),
await GZipFilePart.CreateAsync(stream, options.ArchiveEncoding).ConfigureAwait(false),
options
);
}

View File

@@ -46,10 +46,7 @@ public partial class GZipEntry : Entry
internal static IEnumerable<GZipEntry> GetEntries(Stream stream, ReaderOptions options)
{
yield return new GZipEntry(
GZipFilePart.Create(stream, options.ArchiveEncoding, options.Providers),
options
);
yield return new GZipEntry(GZipFilePart.Create(stream, options.ArchiveEncoding), options);
}
// Async methods moved to GZipEntry.Async.cs

View File

@@ -5,9 +5,7 @@ using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Providers;
namespace SharpCompress.Common.GZip;
@@ -16,11 +14,10 @@ internal sealed partial class GZipFilePart
internal static async ValueTask<GZipFilePart> CreateAsync(
Stream stream,
IArchiveEncoding archiveEncoding,
CompressionProviderRegistry compressionProviders,
CancellationToken cancellationToken = default
)
{
var part = new GZipFilePart(stream, archiveEncoding, compressionProviders);
var part = new GZipFilePart(stream, archiveEncoding);
await part.ReadAndValidateGzipHeaderAsync(cancellationToken).ConfigureAwait(false);
if (stream.CanSeek)
@@ -134,14 +131,4 @@ internal sealed partial class GZipFilePart
var buffer = list.ToArray();
return ArchiveEncoding.Decode(buffer);
}
internal override async ValueTask<Stream?> GetCompressedStreamAsync(
CancellationToken cancellationToken = default
)
{
// GZip uses Deflate compression
return await _compressionProviders
.CreateDecompressStreamAsync(CompressionType.Deflate, _stream, cancellationToken)
.ConfigureAwait(false);
}
}

View File

@@ -5,7 +5,6 @@ using System.IO;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Providers;
namespace SharpCompress.Common.GZip;
@@ -13,15 +12,10 @@ internal sealed partial class GZipFilePart : FilePart
{
private string? _name;
private readonly Stream _stream;
private readonly CompressionProviderRegistry _compressionProviders;
internal static GZipFilePart Create(
Stream stream,
IArchiveEncoding archiveEncoding,
CompressionProviderRegistry compressionProviders
)
internal static GZipFilePart Create(Stream stream, IArchiveEncoding archiveEncoding)
{
var part = new GZipFilePart(stream, archiveEncoding, compressionProviders);
var part = new GZipFilePart(stream, archiveEncoding);
part.ReadAndValidateGzipHeader();
if (stream.CanSeek)
@@ -41,16 +35,8 @@ internal sealed partial class GZipFilePart : FilePart
return part;
}
private GZipFilePart(
Stream stream,
IArchiveEncoding archiveEncoding,
CompressionProviderRegistry compressionProviders
)
: base(archiveEncoding)
{
_stream = stream;
_compressionProviders = compressionProviders;
}
private GZipFilePart(Stream stream, IArchiveEncoding archiveEncoding)
: base(archiveEncoding) => _stream = stream;
internal long EntryStartPosition { get; private set; }
@@ -60,11 +46,13 @@ internal sealed partial class GZipFilePart : FilePart
internal override string? FilePartName => _name;
internal override Stream GetCompressedStream()
{
//GZip uses Deflate compression, at this point we need a deflate stream
return _compressionProviders.CreateDecompressStream(CompressionType.Deflate, _stream);
}
internal override Stream GetCompressedStream() =>
new DeflateStream(
_stream,
CompressionMode.Decompress,
CompressionLevel.Default,
leaveOpen: true
);
internal override Stream GetRawStream() => _stream;

View File

@@ -15,9 +15,7 @@ public partial class LzwEntry
)
{
yield return new LzwEntry(
await LzwFilePart
.CreateAsync(stream, options.ArchiveEncoding, options.Providers, cancellationToken)
.ConfigureAwait(false),
await LzwFilePart.CreateAsync(stream, options.ArchiveEncoding, cancellationToken),
options
);
}

View File

@@ -46,10 +46,7 @@ public partial class LzwEntry : Entry
internal static IEnumerable<LzwEntry> GetEntries(Stream stream, ReaderOptions options)
{
yield return new LzwEntry(
LzwFilePart.Create(stream, options.ArchiveEncoding, options.Providers),
options
);
yield return new LzwEntry(LzwFilePart.Create(stream, options.ArchiveEncoding), options);
}
// Async methods moved to LzwEntry.Async.cs

View File

@@ -1,8 +1,6 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Providers;
namespace SharpCompress.Common.Lzw;
@@ -11,25 +9,15 @@ internal sealed partial class LzwFilePart
internal static async ValueTask<LzwFilePart> CreateAsync(
Stream stream,
IArchiveEncoding archiveEncoding,
CompressionProviderRegistry compressionProviders,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
var part = new LzwFilePart(stream, archiveEncoding, compressionProviders);
var part = new LzwFilePart(stream, archiveEncoding);
// For non-seekable streams, we can't track position, so use 0 since the stream will be
// read sequentially from its current position.
part.EntryStartPosition = stream.CanSeek ? stream.Position : 0;
return part;
}
internal override async ValueTask<Stream?> GetCompressedStreamAsync(
CancellationToken cancellationToken = default
)
{
return await _compressionProviders
.CreateDecompressStreamAsync(CompressionType.Lzw, _stream, cancellationToken)
.ConfigureAwait(false);
}
}

View File

@@ -1,6 +1,5 @@
using System.IO;
using SharpCompress.Common;
using SharpCompress.Providers;
using SharpCompress.Compressors.Lzw;
namespace SharpCompress.Common.Lzw;
@@ -8,15 +7,10 @@ internal sealed partial class LzwFilePart : FilePart
{
private readonly Stream _stream;
private readonly string? _name;
private readonly CompressionProviderRegistry _compressionProviders;
internal static LzwFilePart Create(
Stream stream,
IArchiveEncoding archiveEncoding,
CompressionProviderRegistry compressionProviders
)
internal static LzwFilePart Create(Stream stream, IArchiveEncoding archiveEncoding)
{
var part = new LzwFilePart(stream, archiveEncoding, compressionProviders);
var part = new LzwFilePart(stream, archiveEncoding);
// For non-seekable streams, we can't track position, so use 0 since the stream will be
// read sequentially from its current position.
@@ -24,16 +18,11 @@ internal sealed partial class LzwFilePart : FilePart
return part;
}
private LzwFilePart(
Stream stream,
IArchiveEncoding archiveEncoding,
CompressionProviderRegistry compressionProviders
)
private LzwFilePart(Stream stream, IArchiveEncoding archiveEncoding)
: base(archiveEncoding)
{
_stream = stream;
_name = DeriveFileName(stream);
_compressionProviders = compressionProviders;
}
internal long EntryStartPosition { get; private set; }
@@ -41,7 +30,7 @@ internal sealed partial class LzwFilePart : FilePart
internal override string? FilePartName => _name;
internal override Stream GetCompressedStream() =>
_compressionProviders.CreateDecompressStream(CompressionType.Lzw, _stream);
new LzwStream(_stream) { IsStreamOwner = false };
internal override Stream GetRawStream() => _stream;

View File

@@ -1,6 +1,3 @@
using SharpCompress.Compressors;
using SharpCompress.Providers;
namespace SharpCompress.Common.Options;
public interface IReaderOptions
@@ -9,40 +6,10 @@ public interface IReaderOptions
IProgressOptions,
IExtractionOptions
{
/// <summary>
/// Look for RarArchive (Check for self-extracting archives or cases where RarArchive isn't at the start of the file)
/// </summary>
bool LookForHeader { get; init; }
/// <summary>
/// Password for encrypted archives.
/// </summary>
string? Password { get; init; }
/// <summary>
/// Disable checking for incomplete archives.
/// </summary>
bool DisableCheckIncomplete { get; init; }
/// <summary>
/// Buffer size for stream operations.
/// </summary>
int BufferSize { get; init; }
/// <summary>
/// Provide a hint for the extension of the archive being read, can speed up finding the correct decoder.
/// </summary>
string? ExtensionHint { get; init; }
/// <summary>
/// Size of the rewindable buffer for non-seekable streams.
/// </summary>
int? RewindableBufferSize { get; init; }
/// <summary>
/// Registry of compression providers.
/// Defaults to <see cref="CompressionProviderRegistry.Default" /> but can be replaced with custom providers.
/// Use this to provide alternative decompression implementations.
/// </summary>
CompressionProviderRegistry Providers { get; init; }
}

View File

@@ -1,28 +1,9 @@
using SharpCompress.Common;
using SharpCompress.Compressors;
using SharpCompress.Providers;
namespace SharpCompress.Common.Options;
/// <summary>
/// Options for configuring writer behavior when creating archives.
/// </summary>
public interface IWriterOptions : IStreamOptions, IEncodingOptions, IProgressOptions
{
/// <summary>
/// The compression type to use for the archive.
/// </summary>
CompressionType CompressionType { get; init; }
/// <summary>
/// The compression level to be used when the compression type supports variable levels.
/// </summary>
int CompressionLevel { get; init; }
/// <summary>
/// Registry of compression providers.
/// Defaults to <see cref="CompressionProviderRegistry.Default" /> but can be replaced with custom providers, such as
/// System.IO.Compression for Deflate/GZip on modern .NET.
/// </summary>
CompressionProviderRegistry Providers { get; init; }
}

View File

@@ -8,10 +8,7 @@ using SharpCompress.IO;
namespace SharpCompress.Common.Rar;
internal class AsyncMarkingBinaryReader : IDisposable
#if NET8_0_OR_GREATER
, IAsyncDisposable
#endif
internal class AsyncMarkingBinaryReader
{
private readonly AsyncBinaryReader _reader;
@@ -94,8 +91,8 @@ internal class AsyncMarkingBinaryReader : IDisposable
}
public async ValueTask<ulong> ReadRarVIntAsync(
int maxBytes = 10,
CancellationToken cancellationToken = default
CancellationToken cancellationToken = default,
int maxBytes = 10
) => await DoReadRarVIntAsync((maxBytes - 1) * 7, cancellationToken).ConfigureAwait(false);
private async ValueTask<ulong> DoReadRarVIntAsync(
@@ -124,7 +121,7 @@ internal class AsyncMarkingBinaryReader : IDisposable
shift += 7;
} while (shift <= maxShift);
throw new InvalidFormatException("malformed vint");
throw new FormatException("malformed vint");
}
public async ValueTask<uint> ReadRarVIntUInt32Async(
@@ -188,12 +185,6 @@ internal class AsyncMarkingBinaryReader : IDisposable
shift += 7;
} while (shift <= maxShift);
throw new InvalidFormatException("malformed vint");
throw new FormatException("malformed vint");
}
public virtual void Dispose() => _reader.Dispose();
#if NET8_0_OR_GREATER
public virtual ValueTask DisposeAsync() => _reader.DisposeAsync();
#endif
}

View File

@@ -1,23 +1,18 @@
using System;
using System.Diagnostics.CodeAnalysis;
#nullable disable
using System.Security.Cryptography;
using System.Text;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Common.Rar;
[SuppressMessage(
"Security",
"CA5350:Do Not Use Weak Cryptographic Algorithms",
Justification = "RAR3 key derivation is SHA-1 based by format definition."
)]
internal class CryptKey3 : ICryptKey
{
const int AES_128 = 128;
private readonly string _password;
private string _password;
public CryptKey3(string? password) => _password = password ?? string.Empty;
public CryptKey3(string password) => _password = password ?? "";
public ICryptoTransform Transformer(byte[] salt)
{
@@ -37,9 +32,7 @@ internal class CryptKey3 : ICryptKey
rawPassword[i + rawLength] = salt[i];
}
#if LEGACY_DOTNET
var msgDigest = SHA1.Create();
#endif
const int noOfRounds = (1 << 18);
const int iblock = 3;
@@ -57,19 +50,11 @@ internal class CryptKey3 : ICryptKey
if (i % (noOfRounds / EncryptionConstV5.SIZE_INITV) == 0)
{
#if LEGACY_DOTNET
digest = msgDigest.ComputeHash(data, 0, (i + 1) * (rawPassword.Length + iblock));
#else
digest = SHA1.HashData(data.AsSpan(0, (i + 1) * (rawPassword.Length + iblock)));
#endif
aesIV[i / (noOfRounds / EncryptionConstV5.SIZE_INITV)] = digest[19];
}
}
#if LEGACY_DOTNET
digest = msgDigest.ComputeHash(data);
#else
digest = SHA1.HashData(data);
#endif
//slow code ends
var aesKey = new byte[EncryptionConstV5.SIZE_INITV];

View File

@@ -14,8 +14,8 @@ internal class CryptKey5 : ICryptKey
private string _password;
private Rar5CryptoInfo _cryptoInfo;
private byte[] _pswCheck = [];
private byte[] _hashKey = [];
private byte[] _pswCheck = { };
private byte[] _hashKey = { };
public CryptKey5(string? password, Rar5CryptoInfo rar5CryptoInfo)
{
@@ -34,13 +34,8 @@ internal class CryptKey5 : ICryptKey
int keyLength
)
{
var passwordBytes = Encoding.UTF8.GetBytes(password);
#if LEGACY_DOTNET
using var hmac = new HMACSHA256(passwordBytes);
using var hmac = new HMACSHA256(Encoding.UTF8.GetBytes(password));
var block = hmac.ComputeHash(salt);
#else
var block = HMACSHA256.HashData(passwordBytes, salt);
#endif
var finalHash = (byte[])block.Clone();
var loop = new int[] { iterations, 17, 17 };
@@ -50,11 +45,7 @@ internal class CryptKey5 : ICryptKey
{
for (var i = 1; i < loop[x]; i++)
{
#if LEGACY_DOTNET
block = hmac.ComputeHash(block);
#else
block = HMACSHA256.HashData(passwordBytes, block);
#endif
for (var j = 0; j < finalHash.Length; j++)
{
finalHash[j] ^= block[j];

View File

@@ -1,3 +1,5 @@
#nullable disable
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Rar;

View File

@@ -1,3 +1,5 @@
#nullable disable
using SharpCompress.Common.Rar;
using SharpCompress.IO;

View File

@@ -44,7 +44,9 @@ internal sealed partial class ArchiveHeader
PosAv = await reader.ReadInt32Async(cancellationToken).ConfigureAwait(false);
if (HasFlag(ArchiveFlagsV4.ENCRYPT_VER))
{
_ = await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false);
EncryptionVersion = await reader
.ReadByteAsync(cancellationToken)
.ConfigureAwait(false);
}
}
}

View File

@@ -29,7 +29,7 @@ internal sealed partial class ArchiveHeader : RarHeader
PosAv = reader.ReadInt32();
if (HasFlag(ArchiveFlagsV4.ENCRYPT_VER))
{
_ = reader.ReadByte();
EncryptionVersion = reader.ReadByte();
}
}
}
@@ -44,6 +44,8 @@ internal sealed partial class ArchiveHeader : RarHeader
internal int? PosAv { get; private set; }
private byte? EncryptionVersion { get; set; }
public bool? IsEncrypted => IsRar5 ? null : HasFlag(ArchiveFlagsV4.PASSWORD);
public bool OldNumberingFormat => !IsRar5 && !HasFlag(ArchiveFlagsV4.NEW_NUMBERING);

View File

@@ -79,7 +79,7 @@ internal partial class FileHeader
CompressionMethod = (byte)((compressionInfo >> 7) & 0x7);
WindowSize = IsDirectory ? 0 : ((size_t)0x20000) << ((compressionInfo >> 10) & 0xf);
_ = await reader
HostOs = await reader
.ReadRarVIntByteAsync(cancellationToken: cancellationToken)
.ConfigureAwait(false);
@@ -222,7 +222,7 @@ internal partial class FileHeader
.ReadUInt32Async(cancellationToken)
.ConfigureAwait(false);
_ = await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false);
HostOs = await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false);
FileCrc = await reader.ReadBytesAsync(4, cancellationToken).ConfigureAwait(false);

View File

@@ -72,7 +72,7 @@ internal partial class FileHeader : RarHeader
// Bits 11 - 14 (0x3c00) define the minimum size of dictionary size required to extract data. Value 0 means 128 KB, 1 - 256 KB, ..., 14 - 2048 MB, 15 - 4096 MB.
WindowSize = IsDirectory ? 0 : ((size_t)0x20000) << ((compressionInfo >> 10) & 0xf);
_ = reader.ReadRarVIntByte();
HostOs = reader.ReadRarVIntByte();
var nameSize = reader.ReadRarVIntUInt16();
@@ -197,7 +197,7 @@ internal partial class FileHeader : RarHeader
var lowUncompressedSize = reader.ReadUInt32();
_ = reader.ReadByte();
HostOs = reader.ReadByte();
FileCrc = reader.ReadBytes(4);
@@ -415,6 +415,7 @@ internal partial class FileHeader : RarHeader
internal byte[]? R4Salt { get; private set; }
internal Rar5CryptoInfo? Rar5CryptoInfo { get; private set; }
private byte HostOs { get; set; }
internal uint FileAttributes { get; private set; }
internal long CompressedSize { get; private set; }
internal long UncompressedSize { get; private set; }

View File

@@ -20,7 +20,7 @@ internal partial class MarkHeader
{
return buffer[0];
}
throw new IncompleteArchiveException("Unexpected end of stream.");
throw new EndOfStreamException();
}
public static async ValueTask<MarkHeader> ReadAsync(
@@ -122,11 +122,7 @@ internal partial class MarkHeader
{
if (!leaveStreamOpen)
{
#if LEGACY_DOTNET
stream.Dispose();
#else
await stream.DisposeAsync().ConfigureAwait(false);
#endif
}
throw new InvalidFormatException("Error trying to read rar signature.", e);
}

View File

@@ -24,7 +24,7 @@ internal partial class MarkHeader : IRarHeader
{
return (byte)b;
}
throw new IncompleteArchiveException("Unexpected end of stream.");
throw new EndOfStreamException();
}
public static MarkHeader Read(Stream stream, bool leaveStreamOpen, bool lookForHeader)

View File

@@ -42,20 +42,4 @@ internal sealed class NewSubHeaderType : IEquatable<NewSubHeaderType>
}
public bool Equals(NewSubHeaderType? other) => other is not null && Equals(other._bytes);
public override bool Equals(object? obj) => obj is NewSubHeaderType other && Equals(other);
public override int GetHashCode()
{
unchecked
{
var hash = 17;
foreach (byte value in _bytes)
{
hash = (hash * 31) + value;
}
return hash;
}
}
}

View File

@@ -43,17 +43,10 @@ internal class Rar5CryptoInfo
cryptoInfo.PswCheck = reader.ReadBytes(EncryptionConstV5.SIZE_PSWCHECK);
var _pswCheckCsm = reader.ReadBytes(EncryptionConstV5.SIZE_PSWCHECK_CSUM);
#if LEGACY_DOTNET
var sha = SHA256.Create();
cryptoInfo.UsePswCheck = sha.ComputeHash(cryptoInfo.PswCheck)
.AsSpan()
.StartsWith(_pswCheckCsm.AsSpan());
#else
cryptoInfo.UsePswCheck = SHA256
.HashData(cryptoInfo.PswCheck)
.AsSpan()
.StartsWith(_pswCheckCsm.AsSpan());
#endif
}
return cryptoInfo;
}
@@ -105,17 +98,10 @@ internal class Rar5CryptoInfo
.ReadBytesAsync(EncryptionConstV5.SIZE_PSWCHECK_CSUM, CancellationToken.None)
.ConfigureAwait(false);
#if LEGACY_DOTNET
var sha = SHA256.Create();
cryptoInfo.UsePswCheck = sha.ComputeHash(cryptoInfo.PswCheck)
.AsSpan()
.StartsWith(_pswCheckCsm.AsSpan());
#else
cryptoInfo.UsePswCheck = SHA256
.HashData(cryptoInfo.PswCheck)
.AsSpan()
.StartsWith(_pswCheckCsm.AsSpan());
#endif
}
return cryptoInfo;
}
@@ -132,9 +118,9 @@ internal class Rar5CryptoInfo
public int LG2Count = 0;
public byte[] InitV = [];
public byte[] InitV = { };
public byte[] Salt = [];
public byte[] Salt = { };
public byte[] PswCheck = [];
public byte[] PswCheck = { };
}

View File

@@ -76,7 +76,6 @@ public abstract class RarEntry : Entry
public override string ToString() =>
string.Format(
Constants.DefaultCultureInfo,
"Entry Path: {0} Compressed Size: {1} Uncompressed Size: {2} CRC: {3}",
Key,
CompressedSize,

View File

@@ -134,13 +134,13 @@ public abstract class RarVolume : Volume
{
if (Mode == StreamingMode.Streaming)
{
throw new ArchiveOperationException(
throw new InvalidOperationException(
"ArchiveHeader should never been null in a streaming read."
);
}
// we only want to load the archive header to avoid overhead but have to do the nasty thing and reset the stream
_ = GetVolumeFileParts().First();
GetVolumeFileParts().First();
Stream.Position = 0;
}
}
@@ -243,15 +243,13 @@ public abstract class RarVolume : Volume
{
if (Mode == StreamingMode.Streaming)
{
throw new ArchiveOperationException(
throw new InvalidOperationException(
"ArchiveHeader should never been null in a streaming read."
);
}
// we only want to load the archive header to avoid overhead but have to do the nasty thing and reset the stream
#pragma warning disable CA2016 // Forward token if available; polyfill FirstAsync has no token overload
await GetVolumeFilePartsAsync(cancellationToken).FirstAsync().ConfigureAwait(false);
#pragma warning restore CA2016
Stream.Position = 0;
}
}

View File

@@ -1,8 +1,10 @@
#nullable disable
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.LZMA.Utilities;
using SharpCompress.Compressors.LZMA.Utilites;
namespace SharpCompress.Common.SevenZip;

View File

@@ -1,8 +1,10 @@
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.LZMA.Utilities;
using SharpCompress.Compressors.LZMA.Utilites;
namespace SharpCompress.Common.SevenZip;
@@ -16,7 +18,7 @@ internal partial class ArchiveDatabase
internal List<long> _packSizes = new();
internal List<uint?> _packCrCs = new();
internal List<CFolder> _folders = new();
internal List<int> _numUnpackStreamsVector = null!;
internal List<int> _numUnpackStreamsVector;
internal List<CFileItem> _files = new();
internal List<long> _packStreamStartPositions = new();
@@ -33,7 +35,7 @@ internal partial class ArchiveDatabase
_packSizes.Clear();
_packCrCs.Clear();
_folders.Clear();
_numUnpackStreamsVector = null!;
_numUnpackStreamsVector = null;
_files.Clear();
_packStreamStartPositions.Clear();
@@ -87,7 +89,7 @@ internal partial class ArchiveDatabase
{
if (folderIndex >= _folders.Count)
{
throw new ArchiveOperationException();
throw new InvalidOperationException();
}
_folderStartFileIndex.Add(i); // check it

View File

@@ -1,13 +1,13 @@
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.Deflate64;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.LZMA.Utilities;
using SharpCompress.Compressors.LZMA.Utilites;
using SharpCompress.IO;
using BlockType = SharpCompress.Compressors.LZMA.Utilities.BlockType;
namespace SharpCompress.Common.SevenZip;
@@ -66,7 +66,7 @@ internal sealed partial class ArchiveReader
if (db._majorVersion != 0)
{
throw new ArchiveOperationException();
throw new InvalidOperationException();
}
var crcFromArchive = DataReader.Get32(_header, 8);
@@ -82,7 +82,7 @@ internal sealed partial class ArchiveReader
if (crc != crcFromArchive)
{
throw new ArchiveOperationException();
throw new InvalidOperationException();
}
db._startPositionAfterHeader = _streamOrigin + 0x20;
@@ -96,12 +96,12 @@ internal sealed partial class ArchiveReader
if (nextHeaderOffset < 0 || nextHeaderSize < 0 || nextHeaderSize > int.MaxValue)
{
throw new ArchiveOperationException();
throw new InvalidOperationException();
}
if (nextHeaderOffset > _streamEnding - db._startPositionAfterHeader)
{
throw new ArchiveOperationException("nextHeaderOffset is invalid");
throw new InvalidOperationException("nextHeaderOffset is invalid");
}
_stream.Seek(nextHeaderOffset, SeekOrigin.Current);
@@ -113,7 +113,7 @@ internal sealed partial class ArchiveReader
if (Crc.Finish(Crc.Update(Crc.INIT_CRC, header, 0, header.Length)) != nextHeaderCrc)
{
throw new ArchiveOperationException();
throw new InvalidOperationException();
}
using (var streamSwitch = new CStreamSwitch())
@@ -125,7 +125,7 @@ internal sealed partial class ArchiveReader
{
if (type != BlockType.EncodedHeader)
{
throw new ArchiveOperationException();
throw new InvalidOperationException();
}
var dataVector = await ReadAndDecodePackedStreamsAsync(
@@ -144,14 +144,14 @@ internal sealed partial class ArchiveReader
if (dataVector.Count != 1)
{
throw new ArchiveOperationException();
throw new InvalidOperationException();
}
streamSwitch.Set(this, dataVector[0]);
if (ReadId() != BlockType.Header)
{
throw new ArchiveOperationException();
throw new InvalidOperationException();
}
}
@@ -167,6 +167,10 @@ internal sealed partial class ArchiveReader
CancellationToken cancellationToken
)
{
#if DEBUG
Log.WriteLine("-- ReadAndDecodePackedStreamsAsync --");
Log.PushIndent();
#endif
try
{
ReadStreamsInfo(
@@ -232,7 +236,12 @@ internal sealed partial class ArchiveReader
}
return dataVector;
}
finally { }
finally
{
#if DEBUG
Log.PopIndent();
#endif
}
}
private async ValueTask ReadHeaderAsync(
@@ -241,6 +250,10 @@ internal sealed partial class ArchiveReader
CancellationToken cancellationToken
)
{
#if DEBUG
Log.WriteLine("-- ReadHeaderAsync --");
Log.PushIndent();
#endif
try
{
var type = ReadId();
@@ -251,7 +264,7 @@ internal sealed partial class ArchiveReader
type = ReadId();
}
List<byte[]>? dataVector = null;
List<byte[]> dataVector = null;
if (type == BlockType.AdditionalStreamsInfo)
{
dataVector = await ReadAndDecodePackedStreamsAsync(
@@ -305,10 +318,13 @@ internal sealed partial class ArchiveReader
if (type != BlockType.FilesInfo)
{
throw new ArchiveOperationException();
throw new InvalidOperationException();
}
var numFiles = ReadNum();
#if DEBUG
Log.WriteLine("NumFiles: " + numFiles);
#endif
db._files = new List<CFileItem>(numFiles);
for (var i = 0; i < numFiles; i++)
{
@@ -316,8 +332,8 @@ internal sealed partial class ArchiveReader
}
var emptyStreamVector = new BitVector(numFiles);
BitVector emptyFileVector = null!;
BitVector antiFileVector = null!;
BitVector emptyFileVector = null;
BitVector antiFileVector = null;
var numEmptyStreams = 0;
for (; ; )
@@ -335,14 +351,26 @@ internal sealed partial class ArchiveReader
case BlockType.Name:
using (var streamSwitch = new CStreamSwitch())
{
streamSwitch.Set(this, dataVector ?? []);
streamSwitch.Set(this, dataVector);
#if DEBUG
Log.Write("FileNames:");
#endif
for (var i = 0; i < db._files.Count; i++)
{
db._files[i].Name = _currentReader.ReadString();
#if DEBUG
Log.Write(" " + db._files[i].Name);
#endif
}
#if DEBUG
Log.WriteLine();
#endif
}
break;
case BlockType.WinAttributes:
#if DEBUG
Log.Write("WinAttributes:");
#endif
ReadAttributeVector(
dataVector,
numFiles,
@@ -356,75 +384,155 @@ internal sealed partial class ArchiveReader
}
db._files[i].Attrib = attr;
#if DEBUG
Log.Write(
" " + (attr.HasValue ? attr.Value.ToString("x8") : "n/a")
);
#endif
}
);
#if DEBUG
Log.WriteLine();
#endif
break;
case BlockType.EmptyStream:
emptyStreamVector = ReadBitVector(numFiles);
#if DEBUG
Log.Write("EmptyStream: ");
#endif
for (var i = 0; i < emptyStreamVector.Length; i++)
{
if (emptyStreamVector[i])
{
#if DEBUG
Log.Write("x");
#endif
numEmptyStreams++;
}
else { }
else
{
#if DEBUG
Log.Write(".");
#endif
}
}
#if DEBUG
Log.WriteLine();
#endif
emptyFileVector = new BitVector(numEmptyStreams);
antiFileVector = new BitVector(numEmptyStreams);
break;
case BlockType.EmptyFile:
emptyFileVector = ReadBitVector(numEmptyStreams);
#if DEBUG
Log.Write("EmptyFile: ");
for (var i = 0; i < numEmptyStreams; i++)
{
Log.Write(emptyFileVector[i] ? "x" : ".");
}
Log.WriteLine();
#endif
break;
case BlockType.Anti:
antiFileVector = ReadBitVector(numEmptyStreams);
#if DEBUG
Log.Write("Anti: ");
for (var i = 0; i < numEmptyStreams; i++)
{
Log.Write(antiFileVector[i] ? "x" : ".");
}
Log.WriteLine();
#endif
break;
case BlockType.StartPos:
#if DEBUG
Log.Write("StartPos:");
#endif
ReadNumberVector(
dataVector,
numFiles,
delegate(int i, long? startPos)
{
db._files[i].StartPos = startPos;
#if DEBUG
Log.Write(
" " + (startPos.HasValue ? startPos.Value.ToString() : "n/a")
);
#endif
}
);
#if DEBUG
Log.WriteLine();
#endif
break;
case BlockType.CTime:
#if DEBUG
Log.Write("CTime:");
#endif
ReadDateTimeVector(
dataVector,
numFiles,
delegate(int i, DateTime? time)
{
db._files[i].CTime = time;
#if DEBUG
Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a"));
#endif
}
);
#if DEBUG
Log.WriteLine();
#endif
break;
case BlockType.ATime:
#if DEBUG
Log.Write("ATime:");
#endif
ReadDateTimeVector(
dataVector,
numFiles,
delegate(int i, DateTime? time)
{
db._files[i].ATime = time;
#if DEBUG
Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a"));
#endif
}
);
#if DEBUG
Log.WriteLine();
#endif
break;
case BlockType.MTime:
#if DEBUG
Log.Write("MTime:");
#endif
ReadDateTimeVector(
dataVector,
numFiles,
delegate(int i, DateTime? time)
{
db._files[i].MTime = time;
#if DEBUG
Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a"));
#endif
}
);
#if DEBUG
Log.WriteLine();
#endif
break;
case BlockType.Dummy:
#if DEBUG
Log.Write("Dummy: " + size);
#endif
for (long j = 0; j < size; j++)
{
if (ReadByte() != 0)
{
throw new ArchiveOperationException();
throw new InvalidOperationException();
}
}
break;
@@ -436,7 +544,7 @@ internal sealed partial class ArchiveReader
var checkRecordsSize = (db._majorVersion > 0 || db._minorVersion > 2);
if (checkRecordsSize && _currentReader.Offset - oldPos != size)
{
throw new ArchiveOperationException();
throw new InvalidOperationException();
}
}
@@ -464,6 +572,11 @@ internal sealed partial class ArchiveReader
}
}
}
finally { }
finally
{
#if DEBUG
Log.PopIndent();
#endif
}
}
}

View File

@@ -1,3 +1,5 @@
#nullable disable
using System;
using System.Collections.Generic;
using System.Diagnostics;
@@ -5,22 +7,20 @@ using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.Deflate64;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.LZMA.Utilities;
using SharpCompress.Compressors.LZMA.Utilites;
using SharpCompress.IO;
using BlockType = SharpCompress.Compressors.LZMA.Utilities.BlockType;
namespace SharpCompress.Common.SevenZip;
internal partial class ArchiveReader
{
internal Stream _stream = null!;
internal Stream _stream;
internal Stack<DataReader> _readerStack = new();
internal DataReader _currentReader = null!;
internal DataReader _currentReader;
internal long _streamOrigin;
internal long _streamEnding;
internal byte[] _header = null!;
internal byte[] _header;
private readonly Dictionary<int, Stream> _cachedStreams = new();
@@ -54,6 +54,9 @@ internal partial class ArchiveReader
{
return null;
}
#if DEBUG
Log.WriteLine("ReadId: {0}", (BlockType)id);
#endif
return (BlockType)id;
}
@@ -72,7 +75,7 @@ internal partial class ArchiveReader
}
if (type == BlockType.End)
{
throw new ArchiveOperationException();
throw new InvalidOperationException();
}
SkipData();
}
@@ -127,12 +130,12 @@ internal partial class ArchiveReader
return ReadBitVector(length);
}
private void ReadNumberVector(List<byte[]>? dataVector, int numFiles, Action<int, long?> action)
private void ReadNumberVector(List<byte[]> dataVector, int numFiles, Action<int, long?> action)
{
var defined = ReadOptionalBitVector(numFiles);
using var streamSwitch = new CStreamSwitch();
streamSwitch.Set(this, dataVector ?? []);
streamSwitch.Set(this, dataVector);
for (var i = 0; i < numFiles; i++)
{
@@ -161,7 +164,7 @@ internal partial class ArchiveReader
}
private void ReadDateTimeVector(
List<byte[]>? dataVector,
List<byte[]> dataVector,
int numFiles,
Action<int, DateTime?> action
) =>
@@ -172,14 +175,14 @@ internal partial class ArchiveReader
);
private void ReadAttributeVector(
List<byte[]>? dataVector,
List<byte[]> dataVector,
int numFiles,
Action<int, uint?> action
)
{
var boolVector = ReadOptionalBitVector(numFiles);
using var streamSwitch = new CStreamSwitch();
streamSwitch.Set(this, dataVector ?? []);
streamSwitch.Set(this, dataVector);
for (var i = 0; i < numFiles; i++)
{
if (boolVector[i])
@@ -199,14 +202,25 @@ internal partial class ArchiveReader
private void GetNextFolderItem(CFolder folder)
{
#if DEBUG
Log.WriteLine("-- GetNextFolderItem --");
Log.PushIndent();
#endif
try
{
var numCoders = ReadNum();
#if DEBUG
Log.WriteLine("NumCoders: " + numCoders);
#endif
folder._coders = new List<CCoderInfo>(numCoders);
var numInStreams = 0;
var numOutStreams = 0;
for (var i = 0; i < numCoders; i++)
{
#if DEBUG
Log.WriteLine("-- Coder --");
Log.PushIndent();
#endif
try
{
var coder = new CCoderInfo();
@@ -216,6 +230,18 @@ internal partial class ArchiveReader
var idSize = (mainByte & 0xF);
var longId = new byte[idSize];
ReadBytes(longId, 0, idSize);
#if DEBUG
Log.WriteLine(
"MethodId: "
+ string.Join(
"",
Enumerable
.Range(0, idSize)
.Select(x => longId[x].ToString("x2"))
.ToArray()
)
);
#endif
if (idSize > 8)
{
throw new NotSupportedException();
@@ -231,9 +257,21 @@ internal partial class ArchiveReader
{
coder._numInStreams = ReadNum();
coder._numOutStreams = ReadNum();
#if DEBUG
Log.WriteLine(
"Complex Stream (In: "
+ coder._numInStreams
+ " - Out: "
+ coder._numOutStreams
+ ")"
);
#endif
}
else
{
#if DEBUG
Log.WriteLine("Simple Stream (In: 1 - Out: 1)");
#endif
coder._numInStreams = 1;
coder._numOutStreams = 1;
}
@@ -243,6 +281,15 @@ internal partial class ArchiveReader
var propsSize = ReadNum();
coder._props = new byte[propsSize];
ReadBytes(coder._props, 0, propsSize);
#if DEBUG
Log.WriteLine(
"Settings: "
+ string.Join(
"",
coder._props.Select(bt => bt.ToString("x2")).ToArray()
)
);
#endif
}
if ((mainByte & 0x80) != 0)
@@ -253,18 +300,33 @@ internal partial class ArchiveReader
numInStreams += coder._numInStreams;
numOutStreams += coder._numOutStreams;
}
finally { }
finally
{
#if DEBUG
Log.PopIndent();
#endif
}
}
var numBindPairs = numOutStreams - 1;
folder._bindPairs = new List<CBindPair>(numBindPairs);
#if DEBUG
Log.WriteLine("BindPairs: " + numBindPairs);
Log.PushIndent();
#endif
for (var i = 0; i < numBindPairs; i++)
{
var bp = new CBindPair();
bp._inIndex = ReadNum();
bp._outIndex = ReadNum();
folder._bindPairs.Add(bp);
#if DEBUG
Log.WriteLine("#" + i + " - In: " + bp._inIndex + " - Out: " + bp._outIndex);
#endif
}
#if DEBUG
Log.PopIndent();
#endif
if (numInStreams < numBindPairs)
{
@@ -280,6 +342,9 @@ internal partial class ArchiveReader
{
if (folder.FindBindPairForInStream(i) < 0)
{
#if DEBUG
Log.WriteLine("Single PackStream: #" + i);
#endif
folder._packStreams.Add(i);
break;
}
@@ -292,18 +357,37 @@ internal partial class ArchiveReader
}
else
{
#if DEBUG
Log.WriteLine("Multiple PackStreams ...");
Log.PushIndent();
#endif
for (var i = 0; i < numPackStreams; i++)
{
var num = ReadNum();
#if DEBUG
Log.WriteLine("#" + i + " - " + num);
#endif
folder._packStreams.Add(num);
}
#if DEBUG
Log.PopIndent();
#endif
}
}
finally { }
finally
{
#if DEBUG
Log.PopIndent();
#endif
}
}
private List<uint?> ReadHashDigests(int count)
{
#if DEBUG
Log.Write("ReadHashDigests:");
#endif
var defined = ReadOptionalBitVector(count);
var digests = new List<uint?>(count);
for (var i = 0; i < count; i++)
@@ -311,13 +395,23 @@ internal partial class ArchiveReader
if (defined[i])
{
var crc = ReadUInt32();
#if DEBUG
Log.Write(" " + crc.ToString("x8"));
#endif
digests.Add(crc);
}
else
{
#if DEBUG
Log.Write(" ########");
#endif
digests.Add(null);
}
}
#if DEBUG
Log.WriteLine();
#endif
return digests;
}
@@ -327,21 +421,40 @@ internal partial class ArchiveReader
out List<uint?> packCrCs
)
{
#if DEBUG
Log.WriteLine("-- ReadPackInfo --");
Log.PushIndent();
#endif
try
{
packCrCs = null!;
packCrCs = null;
dataOffset = checked((long)ReadNumber());
#if DEBUG
Log.WriteLine("DataOffset: " + dataOffset);
#endif
var numPackStreams = ReadNum();
#if DEBUG
Log.WriteLine("NumPackStreams: " + numPackStreams);
#endif
WaitAttribute(BlockType.Size);
packSizes = new List<long>(numPackStreams);
#if DEBUG
Log.Write("Sizes:");
#endif
for (var i = 0; i < numPackStreams; i++)
{
var size = checked((long)ReadNumber());
#if DEBUG
Log.Write(" " + size);
#endif
packSizes.Add(size);
}
#if DEBUG
Log.WriteLine();
#endif
BlockType? type;
for (; ; )
@@ -368,19 +481,31 @@ internal partial class ArchiveReader
}
}
}
finally { }
finally
{
#if DEBUG
Log.PopIndent();
#endif
}
}
private void ReadUnpackInfo(List<byte[]>? dataVector, out List<CFolder> folders)
private void ReadUnpackInfo(List<byte[]> dataVector, out List<CFolder> folders)
{
#if DEBUG
Log.WriteLine("-- ReadUnpackInfo --");
Log.PushIndent();
#endif
try
{
WaitAttribute(BlockType.Folder);
var numFolders = ReadNum();
#if DEBUG
Log.WriteLine("NumFolders: {0}", numFolders);
#endif
using (var streamSwitch = new CStreamSwitch())
{
streamSwitch.Set(this, dataVector ?? []);
streamSwitch.Set(this, dataVector);
//folders.Clear();
//folders.Reserve(numFolders);
@@ -396,15 +521,27 @@ internal partial class ArchiveReader
}
WaitAttribute(BlockType.CodersUnpackSize);
#if DEBUG
Log.WriteLine("UnpackSizes:");
#endif
for (var i = 0; i < numFolders; i++)
{
var folder = folders[i];
#if DEBUG
Log.Write(" #" + i + ":");
#endif
var numOutStreams = folder.GetNumOutStreams();
for (var j = 0; j < numOutStreams; j++)
{
var size = checked((long)ReadNumber());
#if DEBUG
Log.Write(" " + size);
#endif
folder._unpackSizes.Add(size);
}
#if DEBUG
Log.WriteLine();
#endif
}
for (; ; )
@@ -428,7 +565,12 @@ internal partial class ArchiveReader
SkipData();
}
}
finally { }
finally
{
#if DEBUG
Log.PopIndent();
#endif
}
}
private void ReadSubStreamsInfo(
@@ -438,9 +580,13 @@ internal partial class ArchiveReader
out List<uint?> digests
)
{
#if DEBUG
Log.WriteLine("-- ReadSubStreamsInfo --");
Log.PushIndent();
#endif
try
{
numUnpackStreamsInFolders = null!;
numUnpackStreamsInFolders = null;
BlockType? type;
for (; ; )
@@ -449,11 +595,20 @@ internal partial class ArchiveReader
if (type == BlockType.NumUnpackStream)
{
numUnpackStreamsInFolders = new List<int>(folders.Count);
#if DEBUG
Log.Write("NumUnpackStreams:");
#endif
for (var i = 0; i < folders.Count; i++)
{
var num = ReadNum();
#if DEBUG
Log.Write(" " + num);
#endif
numUnpackStreamsInFolders.Add(num);
}
#if DEBUG
Log.WriteLine();
#endif
continue;
}
if (type is BlockType.Crc or BlockType.Size)
@@ -486,17 +641,26 @@ internal partial class ArchiveReader
{
continue;
}
#if DEBUG
Log.Write("#{0} StreamSizes:", i);
#endif
long sum = 0;
for (var j = 1; j < numSubstreams; j++)
{
if (type == BlockType.Size)
{
var size = checked((long)ReadNumber());
#if DEBUG
Log.Write(" " + size);
#endif
unpackSizes.Add(size);
sum += size;
}
}
unpackSizes.Add(folders[i].GetUnpackSize() - sum);
#if DEBUG
Log.WriteLine(" - rest: " + unpackSizes.Last());
#endif
}
if (type == BlockType.Size)
{
@@ -515,7 +679,7 @@ internal partial class ArchiveReader
numDigestsTotal += numSubstreams;
}
digests = null!;
digests = null;
for (; ; )
{
@@ -532,7 +696,7 @@ internal partial class ArchiveReader
var folder = folders[i];
if (numSubstreams == 1 && folder.UnpackCrcDefined)
{
digests.Add(folder._unpackCrc!.Value);
digests.Add(folder._unpackCrc.Value);
}
else
{
@@ -568,11 +732,16 @@ internal partial class ArchiveReader
type = ReadId();
}
}
finally { }
finally
{
#if DEBUG
Log.PopIndent();
#endif
}
}
private void ReadStreamsInfo(
List<byte[]>? dataVector,
List<byte[]> dataVector,
out long dataOffset,
out List<long> packSizes,
out List<uint?> packCrCs,
@@ -582,15 +751,19 @@ internal partial class ArchiveReader
out List<uint?> digests
)
{
#if DEBUG
Log.WriteLine("-- ReadStreamsInfo --");
Log.PushIndent();
#endif
try
{
dataOffset = long.MinValue;
packSizes = null!;
packCrCs = null!;
folders = null!;
numUnpackStreamsInFolders = null!;
unpackSizes = null!;
digests = null!;
packSizes = null;
packCrCs = null;
folders = null;
numUnpackStreamsInFolders = null;
unpackSizes = null;
digests = null;
for (; ; )
{
@@ -606,7 +779,7 @@ internal partial class ArchiveReader
break;
case BlockType.SubStreamsInfo:
ReadSubStreamsInfo(
folders!,
folders,
out numUnpackStreamsInFolders,
out unpackSizes,
out digests
@@ -617,11 +790,20 @@ internal partial class ArchiveReader
}
}
}
finally { }
finally
{
#if DEBUG
Log.PopIndent();
#endif
}
}
private List<byte[]> ReadAndDecodePackedStreams(long baseOffset, IPasswordProvider pass)
{
#if DEBUG
Log.WriteLine("-- ReadAndDecodePackedStreams --");
Log.PushIndent();
#endif
try
{
ReadStreamsInfo(
@@ -682,11 +864,20 @@ internal partial class ArchiveReader
}
return dataVector;
}
finally { }
finally
{
#if DEBUG
Log.PopIndent();
#endif
}
}
private void ReadHeader(ArchiveDatabase db, IPasswordProvider getTextPassword)
{
#if DEBUG
Log.WriteLine("-- ReadHeader --");
Log.PushIndent();
#endif
try
{
var type = ReadId();
@@ -697,7 +888,7 @@ internal partial class ArchiveReader
type = ReadId();
}
List<byte[]>? dataVector = null;
List<byte[]> dataVector = null;
if (type == BlockType.AdditionalStreamsInfo)
{
dataVector = ReadAndDecodePackedStreams(
@@ -749,10 +940,13 @@ internal partial class ArchiveReader
if (type != BlockType.FilesInfo)
{
throw new ArchiveOperationException();
throw new InvalidOperationException();
}
var numFiles = ReadNum();
#if DEBUG
Log.WriteLine("NumFiles: " + numFiles);
#endif
db._files = new List<CFileItem>(numFiles);
for (var i = 0; i < numFiles; i++)
{
@@ -760,8 +954,8 @@ internal partial class ArchiveReader
}
var emptyStreamVector = new BitVector(numFiles);
BitVector emptyFileVector = null!;
BitVector antiFileVector = null!;
BitVector emptyFileVector = null;
BitVector antiFileVector = null;
var numEmptyStreams = 0;
for (; ; )
@@ -779,14 +973,26 @@ internal partial class ArchiveReader
case BlockType.Name:
using (var streamSwitch = new CStreamSwitch())
{
streamSwitch.Set(this, dataVector ?? []);
streamSwitch.Set(this, dataVector);
#if DEBUG
Log.Write("FileNames:");
#endif
for (var i = 0; i < db._files.Count; i++)
{
db._files[i].Name = _currentReader.ReadString();
#if DEBUG
Log.Write(" " + db._files[i].Name);
#endif
}
#if DEBUG
Log.WriteLine();
#endif
}
break;
case BlockType.WinAttributes:
#if DEBUG
Log.Write("WinAttributes:");
#endif
ReadAttributeVector(
dataVector,
numFiles,
@@ -820,75 +1026,155 @@ internal partial class ArchiveReader
}
db._files[i].Attrib = attr;
#if DEBUG
Log.Write(
" " + (attr.HasValue ? attr.Value.ToString("x8") : "n/a")
);
#endif
}
);
#if DEBUG
Log.WriteLine();
#endif
break;
case BlockType.EmptyStream:
emptyStreamVector = ReadBitVector(numFiles);
#if DEBUG
Log.Write("EmptyStream: ");
#endif
for (var i = 0; i < emptyStreamVector.Length; i++)
{
if (emptyStreamVector[i])
{
#if DEBUG
Log.Write("x");
#endif
numEmptyStreams++;
}
else { }
else
{
#if DEBUG
Log.Write(".");
#endif
}
}
#if DEBUG
Log.WriteLine();
#endif
emptyFileVector = new BitVector(numEmptyStreams);
antiFileVector = new BitVector(numEmptyStreams);
break;
case BlockType.EmptyFile:
emptyFileVector = ReadBitVector(numEmptyStreams);
#if DEBUG
Log.Write("EmptyFile: ");
for (var i = 0; i < numEmptyStreams; i++)
{
Log.Write(emptyFileVector[i] ? "x" : ".");
}
Log.WriteLine();
#endif
break;
case BlockType.Anti:
antiFileVector = ReadBitVector(numEmptyStreams);
#if DEBUG
Log.Write("Anti: ");
for (var i = 0; i < numEmptyStreams; i++)
{
Log.Write(antiFileVector[i] ? "x" : ".");
}
Log.WriteLine();
#endif
break;
case BlockType.StartPos:
#if DEBUG
Log.Write("StartPos:");
#endif
ReadNumberVector(
dataVector,
numFiles,
delegate(int i, long? startPos)
{
db._files[i].StartPos = startPos;
#if DEBUG
Log.Write(
" " + (startPos.HasValue ? startPos.Value.ToString() : "n/a")
);
#endif
}
);
#if DEBUG
Log.WriteLine();
#endif
break;
case BlockType.CTime:
#if DEBUG
Log.Write("CTime:");
#endif
ReadDateTimeVector(
dataVector,
numFiles,
delegate(int i, DateTime? time)
{
db._files[i].CTime = time;
#if DEBUG
Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a"));
#endif
}
);
#if DEBUG
Log.WriteLine();
#endif
break;
case BlockType.ATime:
#if DEBUG
Log.Write("ATime:");
#endif
ReadDateTimeVector(
dataVector,
numFiles,
delegate(int i, DateTime? time)
{
db._files[i].ATime = time;
#if DEBUG
Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a"));
#endif
}
);
#if DEBUG
Log.WriteLine();
#endif
break;
case BlockType.MTime:
#if DEBUG
Log.Write("MTime:");
#endif
ReadDateTimeVector(
dataVector,
numFiles,
delegate(int i, DateTime? time)
{
db._files[i].MTime = time;
#if DEBUG
Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a"));
#endif
}
);
#if DEBUG
Log.WriteLine();
#endif
break;
case BlockType.Dummy:
#if DEBUG
Log.Write("Dummy: " + size);
#endif
for (long j = 0; j < size; j++)
{
if (ReadByte() != 0)
{
throw new ArchiveOperationException();
throw new InvalidOperationException();
}
}
break;
@@ -901,7 +1187,7 @@ internal partial class ArchiveReader
var checkRecordsSize = (db._majorVersion > 0 || db._minorVersion > 2);
if (checkRecordsSize && _currentReader.Offset - oldPos != size)
{
throw new ArchiveOperationException();
throw new InvalidOperationException();
}
}
@@ -929,7 +1215,12 @@ internal partial class ArchiveReader
}
}
}
finally { }
finally
{
#if DEBUG
Log.PopIndent();
#endif
}
}
#endregion
@@ -953,7 +1244,7 @@ internal partial class ArchiveReader
var delta = stream.Read(_header, offset, 0x20 - offset);
if (delta == 0)
{
throw new IncompleteArchiveException("Unexpected end of stream.");
throw new EndOfStreamException();
}
offset += delta;
@@ -1005,7 +1296,7 @@ internal partial class ArchiveReader
if (db._majorVersion != 0)
{
throw new ArchiveOperationException();
throw new InvalidOperationException();
}
var crcFromArchive = DataReader.Get32(_header, 8);
@@ -1021,7 +1312,7 @@ internal partial class ArchiveReader
if (crc != crcFromArchive)
{
throw new ArchiveOperationException();
throw new InvalidOperationException();
}
db._startPositionAfterHeader = _streamOrigin + 0x20;
@@ -1035,12 +1326,12 @@ internal partial class ArchiveReader
if (nextHeaderOffset < 0 || nextHeaderSize < 0 || nextHeaderSize > int.MaxValue)
{
throw new ArchiveOperationException();
throw new InvalidOperationException();
}
if (nextHeaderOffset > _streamEnding - db._startPositionAfterHeader)
{
throw new ArchiveOperationException("nextHeaderOffset is invalid");
throw new InvalidOperationException("nextHeaderOffset is invalid");
}
_stream.Seek(nextHeaderOffset, SeekOrigin.Current);
@@ -1050,7 +1341,7 @@ internal partial class ArchiveReader
if (Crc.Finish(Crc.Update(Crc.INIT_CRC, header, 0, header.Length)) != nextHeaderCrc)
{
throw new ArchiveOperationException();
throw new InvalidOperationException();
}
using (var streamSwitch = new CStreamSwitch())
@@ -1062,7 +1353,7 @@ internal partial class ArchiveReader
{
if (type != BlockType.EncodedHeader)
{
throw new ArchiveOperationException();
throw new InvalidOperationException();
}
var dataVector = ReadAndDecodePackedStreams(
@@ -1079,14 +1370,14 @@ internal partial class ArchiveReader
if (dataVector.Count != 1)
{
throw new ArchiveOperationException();
throw new InvalidOperationException();
}
streamSwitch.Set(this, dataVector[0]);
if (ReadId() != BlockType.Header)
{
throw new ArchiveOperationException();
throw new InvalidOperationException();
}
}
@@ -1154,7 +1445,7 @@ internal partial class ArchiveReader
public override void SetLength(long value) => throw new NotSupportedException();
private Stream? _stream;
private Stream _stream;
private long _rem;
private int _currentIndex;
@@ -1166,7 +1457,7 @@ internal partial class ArchiveReader
)
{
OpenFile();
_stream.NotNull().Dispose();
_stream.Dispose();
_stream = null;
_currentIndex++;
}
@@ -1175,10 +1466,12 @@ internal partial class ArchiveReader
private void OpenFile()
{
var index = _startIndex + _currentIndex;
var crc = _db._files[index].Crc;
if (crc.HasValue)
#if DEBUG
Log.WriteLine(_db._files[index].Name);
#endif
if (_db._files[index].Crc.HasValue)
{
_stream = new CrcCheckStream(crc.Value);
_stream = new CrcCheckStream(_db._files[index].Crc.Value);
}
else
{
@@ -1260,7 +1553,7 @@ internal partial class ArchiveReader
var firstFileIndex = db._folderStartFileIndex[folderIndex];
if (firstFileIndex > fileIndex || fileIndex - firstFileIndex >= numFilesInFolder)
{
throw new ArchiveOperationException();
throw new InvalidOperationException();
}
var skipCount = fileIndex - firstFileIndex;
@@ -1275,10 +1568,11 @@ internal partial class ArchiveReader
return new ReadOnlySubStream(s, db._files[fileIndex].Size);
}
public void Extract(ArchiveDatabase db, int[]? indices)
public void Extract(ArchiveDatabase db, int[] indices)
{
var allFilesMode = indices is null;
var numItems = allFilesMode ? db._files.Count : indices!.Length;
var allFilesMode = (indices is null);
var numItems = allFilesMode ? db._files.Count : indices.Length;
if (numItems == 0)
{
@@ -1288,7 +1582,7 @@ internal partial class ArchiveReader
var extractFolderInfoVector = new List<CExtractFolderInfo>();
for (var i = 0; i < numItems; i++)
{
var fileIndex = allFilesMode ? i : indices![i];
var fileIndex = allFilesMode ? i : indices[i];
var folderIndex = db._fileIndexToFolderIndexMap[fileIndex];
if (folderIndex == -1)
@@ -1314,7 +1608,7 @@ internal partial class ArchiveReader
}
}
byte[] buffer = null!;
byte[] buffer = null;
foreach (var efi in extractFolderInfoVector)
{
int startIndex;

View File

@@ -1,9 +1,11 @@
#nullable disable
namespace SharpCompress.Common.SevenZip;
internal class CCoderInfo
{
internal CMethodId _methodId;
internal byte[]? _props;
internal byte[] _props;
internal int _numInStreams;
internal int _numOutStreams;
}

View File

@@ -1,3 +1,5 @@
#nullable disable
using System;
namespace SharpCompress.Common.SevenZip;
@@ -8,7 +10,7 @@ internal class CFileItem
public uint? Attrib { get; internal set; }
public uint? ExtendedAttrib { get; internal set; }
public uint? Crc { get; internal set; }
public string Name { get; internal set; } = string.Empty;
public string Name { get; internal set; }
public bool HasStream { get; internal set; }
public bool IsDir { get; internal set; }

View File

@@ -30,7 +30,7 @@ internal class CFolder
}
}
throw new ArchiveOperationException();
throw new InvalidOperationException();
}
public int GetNumOutStreams()

View File

@@ -15,6 +15,9 @@ internal struct CStreamSwitch : IDisposable
if (_active)
{
_active = false;
#if DEBUG
Log.WriteLine("[end of switch]");
#endif
}
if (_needRemove)
@@ -44,14 +47,22 @@ internal struct CStreamSwitch : IDisposable
var dataIndex = archive.ReadNum();
if (dataIndex < 0 || dataIndex >= dataVector.Count)
{
throw new ArchiveOperationException();
throw new InvalidOperationException();
}
#if DEBUG
Log.WriteLine("[switch to stream {0}]", dataIndex);
#endif
_archive = archive;
_archive.AddByteStream(dataVector[dataIndex], 0, dataVector[dataIndex].Length);
_needRemove = true;
_active = true;
}
else { }
else
{
#if DEBUG
Log.WriteLine("[inline data]");
#endif
}
}
}

View File

@@ -49,7 +49,7 @@ internal class DataReader
{
if (Offset >= _ending)
{
throw new IncompleteArchiveException("Unexpected end of stream.");
throw new EndOfStreamException();
}
return _buffer[Offset++];
@@ -59,7 +59,7 @@ internal class DataReader
{
if (length > _ending - Offset)
{
throw new IncompleteArchiveException("Unexpected end of stream.");
throw new EndOfStreamException();
}
while (length-- > 0)
@@ -72,10 +72,13 @@ internal class DataReader
{
if (size > _ending - Offset)
{
throw new IncompleteArchiveException("Unexpected end of stream.");
throw new EndOfStreamException();
}
Offset += (int)size;
#if DEBUG
Log.WriteLine("SkipData {0}", size);
#endif
}
public void SkipData() => SkipData(checked((long)ReadNumber()));
@@ -84,7 +87,7 @@ internal class DataReader
{
if (Offset >= _ending)
{
throw new IncompleteArchiveException("Unexpected end of stream.");
throw new EndOfStreamException();
}
var firstByte = _buffer[Offset++];
@@ -102,7 +105,7 @@ internal class DataReader
if (Offset >= _ending)
{
throw new IncompleteArchiveException("Unexpected end of stream.");
throw new EndOfStreamException();
}
value |= (ulong)_buffer[Offset++] << (8 * i);
@@ -127,7 +130,7 @@ internal class DataReader
{
if (Offset + 4 > _ending)
{
throw new IncompleteArchiveException("Unexpected end of stream.");
throw new EndOfStreamException();
}
var res = Get32(_buffer, Offset);
@@ -139,7 +142,7 @@ internal class DataReader
{
if (Offset + 8 > _ending)
{
throw new IncompleteArchiveException("Unexpected end of stream.");
throw new EndOfStreamException();
}
var res = Get64(_buffer, Offset);
@@ -155,7 +158,7 @@ internal class DataReader
{
if (ending + 2 > _ending)
{
throw new IncompleteArchiveException("Unexpected end of stream.");
throw new EndOfStreamException();
}
if (_buffer[ending] == 0 && _buffer[ending + 1] == 0)

View File

@@ -15,17 +15,6 @@ public class SharpCompressException : Exception
public class ArchiveException(string message) : SharpCompressException(message);
public class ArchiveOperationException : SharpCompressException
{
public ArchiveOperationException() { }
public ArchiveOperationException(string message)
: base(message) { }
public ArchiveOperationException(string message, Exception inner)
: base(message, inner) { }
}
public class IncompleteArchiveException(string message) : ArchiveException(message);
public class CryptographicException(string message) : SharpCompressException(message);

View File

@@ -3,7 +3,6 @@ using System.Buffers;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
@@ -27,7 +26,7 @@ internal sealed partial class TarHeader
await WriteUstarAsync(output, cancellationToken).ConfigureAwait(false);
break;
default:
throw new ArchiveOperationException("This should be impossible...");
throw new Exception("This should be impossible...");
}
}
@@ -59,15 +58,9 @@ internal sealed partial class TarHeader
int splitIndex = -1;
for (int i = 0; i < dirSeps.Count; i++)
{
#if NET8_0_OR_GREATER
int count = ArchiveEncoding
.GetEncoding()
.GetByteCount(fullName.AsSpan(0, dirSeps[i]));
#else
int count = ArchiveEncoding
.GetEncoding()
.GetByteCount(fullName.Substring(0, dirSeps[i]));
#endif
if (count < 155)
{
splitIndex = dirSeps[i];
@@ -80,7 +73,7 @@ internal sealed partial class TarHeader
if (splitIndex == -1)
{
throw new InvalidFormatException(
throw new Exception(
$"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Directory separator not found! Try using GNU Tar format instead!"
);
}
@@ -90,14 +83,14 @@ internal sealed partial class TarHeader
if (this.ArchiveEncoding.GetEncoding().GetByteCount(namePrefix) >= 155)
{
throw new InvalidFormatException(
throw new Exception(
$"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Try using GNU Tar format instead!"
);
}
if (this.ArchiveEncoding.GetEncoding().GetByteCount(name) >= 100)
{
throw new InvalidFormatException(
throw new Exception(
$"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Try using GNU Tar format instead!"
);
}
@@ -266,7 +259,7 @@ internal sealed partial class TarHeader
LastModifiedTime = EPOCH.AddSeconds(unixTimeStamp).ToLocalTime();
Magic = ArchiveEncoding.Decode(buffer, 257, 6).TrimNulls();
if (!string.IsNullOrEmpty(Magic) && "ustar".Equals(Magic, StringComparison.Ordinal))
if (!string.IsNullOrEmpty(Magic) && "ustar".Equals(Magic))
{
var namePrefix = ArchiveEncoding.Decode(buffer, 345, 157).TrimNulls();

View File

@@ -3,7 +3,6 @@ using System.Buffers;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
using System.Text;
using System.Threading.Tasks;
@@ -52,7 +51,7 @@ internal sealed partial class TarHeader
WriteUstar(output);
break;
default:
throw new ArchiveOperationException("This should be impossible...");
throw new Exception("This should be impossible...");
}
}
@@ -89,15 +88,9 @@ internal sealed partial class TarHeader
int splitIndex = -1;
for (int i = 0; i < dirSeps.Count; i++)
{
#if NET8_0_OR_GREATER
int count = ArchiveEncoding
.GetEncoding()
.GetByteCount(fullName.AsSpan(0, dirSeps[i]));
#else
int count = ArchiveEncoding
.GetEncoding()
.GetByteCount(fullName.Substring(0, dirSeps[i]));
#endif
if (count < 155)
{
splitIndex = dirSeps[i];
@@ -110,7 +103,7 @@ internal sealed partial class TarHeader
if (splitIndex == -1)
{
throw new InvalidFormatException(
throw new Exception(
$"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Directory separator not found! Try using GNU Tar format instead!"
);
}
@@ -120,14 +113,14 @@ internal sealed partial class TarHeader
if (this.ArchiveEncoding.GetEncoding().GetByteCount(namePrefix) >= 155)
{
throw new InvalidFormatException(
throw new Exception(
$"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Try using GNU Tar format instead!"
);
}
if (this.ArchiveEncoding.GetEncoding().GetByteCount(name) >= 100)
{
throw new InvalidFormatException(
throw new Exception(
$"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Try using GNU Tar format instead!"
);
}
@@ -299,7 +292,7 @@ internal sealed partial class TarHeader
LastModifiedTime = EPOCH.AddSeconds(unixTimeStamp).ToLocalTime();
Magic = ArchiveEncoding.Decode(buffer, 257, 6).TrimNulls();
if (!string.IsNullOrEmpty(Magic) && "ustar".Equals(Magic, StringComparison.Ordinal))
if (!string.IsNullOrEmpty(Magic) && "ustar".Equals(Magic))
{
var namePrefix = ArchiveEncoding.Decode(buffer, 345, 157).TrimNulls();
@@ -457,7 +450,7 @@ internal sealed partial class TarHeader
{
return 0;
}
return Convert.ToInt64(s, Constants.DefaultCultureInfo);
return Convert.ToInt64(s);
}
private static readonly byte[] eightSpaces =

View File

@@ -6,6 +6,7 @@ namespace SharpCompress.Common.Tar;
internal class TarReadOnlySubStream : Stream
{
private readonly Stream _stream;
private readonly bool _useSyncOverAsyncDispose;
private bool _isDisposed;
private long _amountRead;
@@ -13,6 +14,7 @@ internal class TarReadOnlySubStream : Stream
public TarReadOnlySubStream(Stream stream, long bytesToRead, bool useSyncOverAsyncDispose)
{
_stream = stream;
_useSyncOverAsyncDispose = useSyncOverAsyncDispose;
BytesLeftToRead = bytesToRead;
}
@@ -20,7 +22,6 @@ internal class TarReadOnlySubStream : Stream
{
if (_isDisposed)
{
base.Dispose(disposing);
return;
}
@@ -38,11 +39,7 @@ internal class TarReadOnlySubStream : Stream
{
if (Utility.UseSyncOverAsyncDispose())
{
#pragma warning disable VSTHRD002 // Avoid problematic synchronous waits
#pragma warning disable CA2012
_stream.SkipAsync(512 - bytesInLastBlock).GetAwaiter().GetResult();
#pragma warning restore CA2012
#pragma warning restore VSTHRD002 // Avoid problematic synchronous waits
}
else
{
@@ -50,7 +47,6 @@ internal class TarReadOnlySubStream : Stream
}
}
}
base.Dispose(disposing);
}
#if !LEGACY_DOTNET
@@ -58,7 +54,6 @@ internal class TarReadOnlySubStream : Stream
{
if (_isDisposed)
{
await base.DisposeAsync().ConfigureAwait(false);
return;
}
@@ -76,7 +71,6 @@ internal class TarReadOnlySubStream : Stream
}
GC.SuppressFinalize(this);
await base.DisposeAsync().ConfigureAwait(false);
}
#endif

View File

@@ -9,7 +9,7 @@ public abstract partial class Volume
{
#if LEGACY_DOTNET
_actualStream.Dispose();
await Task.CompletedTask.ConfigureAwait(false);
await Task.CompletedTask;
#else
await _actualStream.DisposeAsync().ConfigureAwait(false);
#endif

View File

@@ -12,7 +12,10 @@ internal abstract partial class ZipFileEntry
CancellationToken cancellationToken = default
)
{
ThrowHelper.ThrowIfNull(archiveStream);
if (archiveStream is null)
{
throw new ArgumentNullException(nameof(archiveStream));
}
var buffer = new byte[12];
await archiveStream.ReadFullyAsync(buffer, 0, 12, cancellationToken).ConfigureAwait(false);

View File

@@ -44,7 +44,10 @@ internal abstract partial class ZipFileEntry(ZipHeaderType type, IArchiveEncodin
internal PkwareTraditionalEncryptionData ComposeEncryptionData(Stream archiveStream)
{
ThrowHelper.ThrowIfNull(archiveStream);
if (archiveStream is null)
{
throw new ArgumentNullException(nameof(archiveStream));
}
var buffer = new byte[12];
archiveStream.ReadFully(buffer);
@@ -92,7 +95,7 @@ internal abstract partial class ZipFileEntry(ZipHeaderType type, IArchiveEncodin
}
var type = (ExtraDataType)BinaryPrimitives.ReadUInt16LittleEndian(extra.AsSpan(i));
if (!IsDefined(type))
if (!Enum.IsDefined(typeof(ExtraDataType), type))
{
type = ExtraDataType.NotImplementedExtraData;
}
@@ -132,13 +135,4 @@ internal abstract partial class ZipFileEntry(ZipHeaderType type, IArchiveEncodin
internal uint ExternalFileAttributes { get; set; }
internal string? Comment { get; set; }
private static bool IsDefined(ExtraDataType type)
{
#if LEGACY_DOTNET
return Enum.IsDefined(typeof(ExtraDataType), type);
#else
return Enum.IsDefined(type);
#endif
}
}

View File

@@ -20,7 +20,10 @@ internal partial class PkwareTraditionalCryptoStream
throw new NotSupportedException("This stream does not encrypt via Read()");
}
ThrowHelper.ThrowIfNull(buffer);
if (buffer is null)
{
throw new ArgumentNullException(nameof(buffer));
}
var temp = new byte[count];
var readBytes = await _stream

View File

@@ -48,7 +48,10 @@ internal partial class PkwareTraditionalCryptoStream : Stream
throw new NotSupportedException("This stream does not encrypt via Read()");
}
ThrowHelper.ThrowIfNull(buffer);
if (buffer is null)
{
throw new ArgumentNullException(nameof(buffer));
}
var temp = new byte[count];
var readBytes = _stream.Read(temp, 0, count);

View File

@@ -69,7 +69,10 @@ internal class PkwareTraditionalEncryptionData
public byte[] Encrypt(byte[] plainText, int length)
{
ThrowHelper.ThrowIfNull(plainText);
if (plainText is null)
{
throw new ArgumentNullException(nameof(plainText));
}
if (length > plainText.Length)
{

View File

@@ -1,7 +1,5 @@
using System.IO;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors;
using SharpCompress.Providers;
namespace SharpCompress.Common.Zip;
@@ -13,10 +11,9 @@ internal partial class SeekableZipFilePart : ZipFilePart
internal SeekableZipFilePart(
SeekableZipHeaderFactory headerFactory,
DirectoryEntryHeader header,
Stream stream,
CompressionProviderRegistry compressionProviders
Stream stream
)
: base(header, stream, compressionProviders) => _headerFactory = headerFactory;
: base(header, stream) => _headerFactory = headerFactory;
internal override Stream GetCompressedStream()
{

View File

@@ -143,7 +143,7 @@ internal sealed partial class SeekableZipHeaderFactory
is not LocalEntryHeader localEntryHeader
)
{
throw new ArchiveOperationException();
throw new InvalidOperationException();
}
// populate fields only known from the DirectoryEntryHeader

View File

@@ -148,7 +148,7 @@ internal sealed partial class SeekableZipHeaderFactory : ZipHeaderFactory
var signature = reader.ReadUInt32();
if (ReadHeader(signature, reader, _zip64) is not LocalEntryHeader localEntryHeader)
{
throw new ArchiveOperationException();
throw new InvalidOperationException();
}
// populate fields only known from the DirectoryEntryHeader

View File

@@ -1,8 +1,7 @@
using System.IO;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
using SharpCompress.IO;
using SharpCompress.Providers;
namespace SharpCompress.Common.Zip;
@@ -10,12 +9,8 @@ internal sealed partial class StreamingZipFilePart : ZipFilePart
{
private Stream? _decompressionStream;
internal StreamingZipFilePart(
ZipFileEntry header,
Stream stream,
CompressionProviderRegistry compressionProviders
)
: base(header, stream, compressionProviders) { }
internal StreamingZipFilePart(ZipFileEntry header, Stream stream)
: base(header, stream) { }
protected override Stream CreateBaseStream() => Header.PackedStream.NotNull();
@@ -52,6 +47,11 @@ internal sealed partial class StreamingZipFilePart : ZipFilePart
// If we had TotalIn / TotalOut we could have used them
Header.CompressedSize = _decompressionStream.Position;
if (_decompressionStream is DeflateStream deflateStream)
{
stream.Position = 0;
}
Skipped = true;
}
var reader = new BinaryReader(stream, System.Text.Encoding.Default, leaveOpen: true);

View File

@@ -82,7 +82,7 @@ internal sealed partial class StreamingZipHeaderFactory
private ZipHeader? _current;
public ZipHeader Current =>
_current ?? throw new ArchiveOperationException("No current header is available.");
_current ?? throw new InvalidOperationException("No current header is available.");
/// <summary>
/// Advances to the next ZIP header in the stream, honoring streaming data descriptors where applicable.

View File

@@ -13,7 +13,6 @@ internal partial class WinzipAesCryptoStream
{
if (_isDisposed)
{
await base.DisposeAsync().ConfigureAwait(false);
return;
}
_isDisposed = true;
@@ -28,7 +27,6 @@ internal partial class WinzipAesCryptoStream
ArrayPool<byte>.Shared.Return(authBytes);
await _stream.DisposeAsync().ConfigureAwait(false);
}
await base.DisposeAsync().ConfigureAwait(false);
}
#endif
@@ -99,7 +97,7 @@ internal partial class WinzipAesCryptoStream
{
if (_isFinalBlock)
{
throw new ArchiveOperationException();
throw new InvalidOperationException();
}
var bytesRemaining = last - offset;

Some files were not shown because too many files have changed in this diff Show More