Compare commits

..

2 Commits

Author SHA1 Message Date
Adam Hathcock
8a54f253d5 Merge pull request #1200 from adamhathcock/adam/fix-async-7z-seeking 2026-02-11 12:35:18 +00:00
Adam Hathcock
d0baa16502 Fix 7z seeking to be contigous in async too 2026-02-11 12:16:19 +00:00
67 changed files with 304 additions and 291 deletions

View File

@@ -299,19 +299,6 @@ dotnet_diagnostic.CA2251.severity = error
dotnet_diagnostic.CA2252.severity = none
dotnet_diagnostic.CA2254.severity = suggestion
; High volume analyzers requiring extensive refactoring - set to suggestion temporarily
dotnet_diagnostic.CA1835.severity = suggestion
dotnet_diagnostic.CA1510.severity = suggestion
dotnet_diagnostic.CA1512.severity = suggestion
dotnet_diagnostic.CA1844.severity = suggestion
dotnet_diagnostic.CA1825.severity = suggestion
dotnet_diagnostic.CA1712.severity = suggestion
dotnet_diagnostic.CA2022.severity = suggestion
dotnet_diagnostic.CA1850.severity = suggestion
dotnet_diagnostic.CA2263.severity = suggestion
dotnet_diagnostic.CA2012.severity = suggestion
dotnet_diagnostic.CA1001.severity = suggestion
dotnet_diagnostic.CS0169.severity = error
dotnet_diagnostic.CS0219.severity = error
dotnet_diagnostic.CS0649.severity = suggestion
@@ -331,9 +318,9 @@ dotnet_diagnostic.MVC1000.severity = suggestion
dotnet_diagnostic.RZ10012.severity = error
dotnet_diagnostic.IDE0004.severity = suggestion # redundant cast
dotnet_diagnostic.IDE0004.severity = error # redundant cast
dotnet_diagnostic.IDE0005.severity = suggestion
dotnet_diagnostic.IDE0007.severity = suggestion # Use var
dotnet_diagnostic.IDE0007.severity = error # Use var
dotnet_diagnostic.IDE0011.severity = error # Use braces on if statements
dotnet_diagnostic.IDE0010.severity = silent # populate switch
dotnet_diagnostic.IDE0017.severity = suggestion # initialization can be simplified
@@ -347,7 +334,7 @@ dotnet_diagnostic.IDE0028.severity = silent # expression body for accessors
dotnet_diagnostic.IDE0032.severity = suggestion # Use auto property
dotnet_diagnostic.IDE0033.severity = error # prefer tuple name
dotnet_diagnostic.IDE0037.severity = suggestion # simplify anonymous type
dotnet_diagnostic.IDE0040.severity = suggestion # modifiers required
dotnet_diagnostic.IDE0040.severity = error # modifiers required
dotnet_diagnostic.IDE0041.severity = error # simplify null
dotnet_diagnostic.IDE0042.severity = error # deconstruct variable
dotnet_diagnostic.IDE0044.severity = suggestion # make field only when possible
@@ -361,12 +348,6 @@ dotnet_diagnostic.IDE0060.severity = suggestion # unused parameters
dotnet_diagnostic.IDE0061.severity = suggestion # local expression body
dotnet_diagnostic.IDE0062.severity = suggestion # local to static
dotnet_diagnostic.IDE0063.severity = error # simplify using
[tests/**/*.cs]
dotnet_diagnostic.CA1861.severity = suggestion
dotnet_diagnostic.IDE0042.severity = suggestion
dotnet_diagnostic.IDE0051.severity = suggestion
dotnet_diagnostic.IDE0063.severity = suggestion
dotnet_diagnostic.IDE0066.severity = suggestion # switch expression
dotnet_diagnostic.IDE0072.severity = suggestion # Populate switch - forces population of all cases even when default specified
dotnet_diagnostic.IDE0078.severity = suggestion # use pattern matching
@@ -378,7 +359,7 @@ dotnet_diagnostic.IDE0200.severity = suggestion # lambda not needed
dotnet_diagnostic.IDE1006.severity = suggestion # Naming rule violation: These words cannot contain lower case characters
dotnet_diagnostic.IDE0260.severity = suggestion # Use pattern matching
dotnet_diagnostic.IDE0270.severity = suggestion # Null check simplifcation
dotnet_diagnostic.IDE0290.severity = suggestion # Primary Constructor
dotnet_diagnostic.IDE0290.severity = error # Primary Constructor
dotnet_diagnostic.IDE0300.severity = suggestion # Collection
dotnet_diagnostic.IDE0305.severity = suggestion # Collection ToList

View File

@@ -8,6 +8,8 @@
<CodeAnalysisTreatWarningsAsErrors>true</CodeAnalysisTreatWarningsAsErrors>
<EnforceCodeStyleInBuild>true</EnforceCodeStyleInBuild>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
<RunAnalyzersDuringLiveAnalysis>False</RunAnalyzersDuringLiveAnalysis>
<RunAnalyzersDuringBuild>False</RunAnalyzersDuringBuild>
<ManagePackageVersionsCentrally>true</ManagePackageVersionsCentrally>
<RestorePackagesWithLockFile>true</RestorePackagesWithLockFile>
<CentralPackageTransitivePinningEnabled>true</CentralPackageTransitivePinningEnabled>

View File

@@ -1,6 +1,5 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices;
@@ -115,19 +114,14 @@ Target(
{
var (version, isPrerelease) = await GetVersion();
Console.WriteLine($"VERSION={version}");
Console.WriteLine(
$"PRERELEASE={isPrerelease.ToString().ToLower(CultureInfo.InvariantCulture)}"
);
Console.WriteLine($"PRERELEASE={isPrerelease.ToString().ToLower()}");
// Write to environment file for GitHub Actions
var githubOutput = Environment.GetEnvironmentVariable("GITHUB_OUTPUT");
if (!string.IsNullOrEmpty(githubOutput))
{
File.AppendAllText(githubOutput, $"version={version}\n");
File.AppendAllText(
githubOutput,
$"prerelease={isPrerelease.ToString().ToLower(CultureInfo.InvariantCulture)}\n"
);
File.AppendAllText(githubOutput, $"prerelease={isPrerelease.ToString().ToLower()}\n");
}
}
);
@@ -369,13 +363,9 @@ Target(
: "⚪";
if (timeChange > 25 || memChange > 25)
{
hasRegressions = true;
}
if (timeChange < -25 || memChange < -25)
{
hasImprovements = true;
}
output.Add(
$"| {method} | {baseline.Mean} | {current.Mean} | {timeIcon} {timeChange:+0.0;-0.0;0}% | {baseline.Memory} | {current.Memory} | {memIcon} {memChange:+0.0;-0.0;0}% |"
@@ -555,10 +545,7 @@ static async Task<string> GetGitOutput(string command, string args)
}
catch (Exception ex)
{
throw new InvalidOperationException(
$"Git command failed: git {command} {args}\n{ex.Message}",
ex
);
throw new Exception($"Git command failed: git {command} {args}\n{ex.Message}", ex);
}
}
@@ -588,7 +575,7 @@ static Dictionary<string, BenchmarkMetric> ParseBenchmarkResults(string markdown
var line = lines[i].Trim();
// Look for table rows with benchmark data
if (line.StartsWith('|') && line.Contains("&#39;", StringComparison.Ordinal) && i > 0)
if (line.StartsWith("|") && line.Contains("&#39;") && i > 0)
{
var parts = line.Split('|', StringSplitOptions.TrimEntries);
if (parts.Length >= 5)
@@ -601,10 +588,10 @@ static Dictionary<string, BenchmarkMetric> ParseBenchmarkResults(string markdown
for (int j = parts.Length - 2; j >= 2; j--)
{
if (
parts[j].Contains("KB", StringComparison.Ordinal)
|| parts[j].Contains("MB", StringComparison.Ordinal)
|| parts[j].Contains("GB", StringComparison.Ordinal)
|| parts[j].Contains('B')
parts[j].Contains("KB")
|| parts[j].Contains("MB")
|| parts[j].Contains("GB")
|| parts[j].Contains("B")
)
{
memoryStr = parts[j];
@@ -637,21 +624,17 @@ static Dictionary<string, BenchmarkMetric> ParseBenchmarkResults(string markdown
static double ParseTimeValue(string timeStr)
{
if (string.IsNullOrWhiteSpace(timeStr) || timeStr == "N/A" || timeStr == "NA")
{
return 0;
}
// Remove thousands separators and parse
timeStr = timeStr.Replace(",", "").Trim();
var match = Regex.Match(timeStr, @"([\d.]+)\s*(\w+)");
if (!match.Success)
{
return 0;
}
var value = double.Parse(match.Groups[1].Value);
var unit = match.Groups[2].Value.ToLower(CultureInfo.InvariantCulture);
var unit = match.Groups[2].Value.ToLower();
// Convert to microseconds for comparison
return unit switch
@@ -667,20 +650,16 @@ static double ParseTimeValue(string timeStr)
static double ParseMemoryValue(string memStr)
{
if (string.IsNullOrWhiteSpace(memStr) || memStr == "N/A" || memStr == "NA")
{
return 0;
}
memStr = memStr.Replace(",", "").Trim();
var match = Regex.Match(memStr, @"([\d.]+)\s*(\w+)");
if (!match.Success)
{
return 0;
}
var value = double.Parse(match.Groups[1].Value);
var unit = match.Groups[2].Value.ToUpper(CultureInfo.InvariantCulture);
var unit = match.Groups[2].Value.ToUpper();
// Convert to KB for comparison
return unit switch
@@ -696,9 +675,7 @@ static double ParseMemoryValue(string memStr)
static double CalculateChange(double baseline, double current)
{
if (baseline == 0)
{
return 0;
}
return ((current - baseline) / baseline) * 100;
}

View File

@@ -40,7 +40,10 @@ public partial class GZipArchive
{
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
}
await using var writer = new GZipWriter(stream, options);
await using var writer = new GZipWriter(
stream,
options as GZipWriterOptions ?? new GZipWriterOptions(options)
);
await foreach (
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
)

View File

@@ -67,7 +67,10 @@ public partial class GZipArchive
{
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
}
using var writer = new GZipWriter(stream, options);
using var writer = new GZipWriter(
stream,
options as GZipWriterOptions ?? new GZipWriterOptions(options)
);
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();

View File

@@ -182,15 +182,15 @@ public partial class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, Sev
);
}
// Wrap with SyncOnlyStream to work around LZMA async bugs
// Return a ReadOnlySubStream that reads from the shared folder stream
return CreateEntryStream(
new SyncOnlyStream(
new ReadOnlySubStream(_currentFolderStream, entry.Size, leaveOpen: true)
)
new ReadOnlySubStream(_currentFolderStream, entry.Size, leaveOpen: true)
);
}
protected override ValueTask<EntryStream> GetEntryStreamAsync(
CancellationToken cancellationToken = default
) => new(GetEntryStream());
public override void Dispose()
{
_currentFolderStream?.Dispose();

View File

@@ -25,7 +25,10 @@ public partial class TarArchive
CancellationToken cancellationToken = default
)
{
using var writer = new TarWriter(stream, options);
using var writer = new TarWriter(
stream,
options as TarWriterOptions ?? new TarWriterOptions(options)
);
await foreach (
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
)

View File

@@ -124,7 +124,10 @@ public partial class TarArchive
IEnumerable<TarArchiveEntry> newEntries
)
{
using var writer = new TarWriter(stream, options);
using var writer = new TarWriter(
stream,
options as TarWriterOptions ?? new TarWriterOptions(options)
);
foreach (var entry in oldEntries.Concat(newEntries))
{
if (entry.IsDirectory)

View File

@@ -79,7 +79,10 @@ public partial class ZipArchive
CancellationToken cancellationToken = default
)
{
using var writer = new ZipWriter(stream, options);
using var writer = new ZipWriter(
stream,
options as ZipWriterOptions ?? new ZipWriterOptions(options)
);
await foreach (
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
)

View File

@@ -122,7 +122,10 @@ public partial class ZipArchive
IEnumerable<ZipArchiveEntry> newEntries
)
{
using var writer = new ZipWriter(stream, options);
using var writer = new ZipWriter(
stream,
options as ZipWriterOptions ?? new ZipWriterOptions(options)
);
foreach (var entry in oldEntries.Concat(newEntries))
{
if (entry.IsDirectory)

View File

@@ -2,7 +2,6 @@ using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.ArcLzw;
using SharpCompress.Compressors.Lzw;
using SharpCompress.Compressors.RLE90;
using SharpCompress.Compressors.Squeezed;

View File

@@ -8,7 +8,6 @@ using SharpCompress.Common.GZip;
using SharpCompress.Common.Tar;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors.ArcLzw;
using SharpCompress.Compressors.Lzw;
using SharpCompress.Compressors.RLE90;
using SharpCompress.Compressors.Squeezed;

View File

@@ -18,6 +18,7 @@ public enum ArjHeaderType
public abstract partial class ArjHeader
{
private const int FIRST_HDR_SIZE = 34;
private const ushort ARJ_MAGIC = 0xEA60;
public ArjHeader(ArjHeaderType type)

View File

@@ -10,6 +10,9 @@ namespace SharpCompress.Common.Arj.Headers;
public partial class ArjMainHeader : ArjHeader
{
private const int FIRST_HDR_SIZE = 34;
private const ushort ARJ_MAGIC = 0xEA60;
public ArchiveEncoding ArchiveEncoding { get; }
public int ArchiverVersionNumber { get; private set; }

View File

@@ -91,8 +91,8 @@ internal class AsyncMarkingBinaryReader
}
public async ValueTask<ulong> ReadRarVIntAsync(
int maxBytes = 10,
CancellationToken cancellationToken = default
CancellationToken cancellationToken = default,
int maxBytes = 10
) => await DoReadRarVIntAsync((maxBytes - 1) * 7, cancellationToken).ConfigureAwait(false);
private async ValueTask<ulong> DoReadRarVIntAsync(

View File

@@ -1,17 +1,11 @@
#nullable disable
using System.Diagnostics.CodeAnalysis;
using System.Security.Cryptography;
using System.Text;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Common.Rar;
[SuppressMessage(
"Security",
"CA5350:Do Not Use Weak Cryptographic Algorithms",
Justification = "RAR3 key derivation is SHA-1 based by format definition."
)]
internal class CryptKey3 : ICryptKey
{
const int AES_128 = 128;

View File

@@ -44,7 +44,9 @@ internal sealed partial class ArchiveHeader
PosAv = await reader.ReadInt32Async(cancellationToken).ConfigureAwait(false);
if (HasFlag(ArchiveFlagsV4.ENCRYPT_VER))
{
_ = await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false);
EncryptionVersion = await reader
.ReadByteAsync(cancellationToken)
.ConfigureAwait(false);
}
}
}

View File

@@ -29,7 +29,7 @@ internal sealed partial class ArchiveHeader : RarHeader
PosAv = reader.ReadInt32();
if (HasFlag(ArchiveFlagsV4.ENCRYPT_VER))
{
_ = reader.ReadByte();
EncryptionVersion = reader.ReadByte();
}
}
}
@@ -44,6 +44,8 @@ internal sealed partial class ArchiveHeader : RarHeader
internal int? PosAv { get; private set; }
private byte? EncryptionVersion { get; set; }
public bool? IsEncrypted => IsRar5 ? null : HasFlag(ArchiveFlagsV4.PASSWORD);
public bool OldNumberingFormat => !IsRar5 && !HasFlag(ArchiveFlagsV4.NEW_NUMBERING);

View File

@@ -79,7 +79,7 @@ internal partial class FileHeader
CompressionMethod = (byte)((compressionInfo >> 7) & 0x7);
WindowSize = IsDirectory ? 0 : ((size_t)0x20000) << ((compressionInfo >> 10) & 0xf);
_ = await reader
HostOs = await reader
.ReadRarVIntByteAsync(cancellationToken: cancellationToken)
.ConfigureAwait(false);
@@ -222,7 +222,7 @@ internal partial class FileHeader
.ReadUInt32Async(cancellationToken)
.ConfigureAwait(false);
_ = await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false);
HostOs = await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false);
FileCrc = await reader.ReadBytesAsync(4, cancellationToken).ConfigureAwait(false);

View File

@@ -72,7 +72,7 @@ internal partial class FileHeader : RarHeader
// Bits 11 - 14 (0x3c00) define the minimum size of dictionary size required to extract data. Value 0 means 128 KB, 1 - 256 KB, ..., 14 - 2048 MB, 15 - 4096 MB.
WindowSize = IsDirectory ? 0 : ((size_t)0x20000) << ((compressionInfo >> 10) & 0xf);
_ = reader.ReadRarVIntByte();
HostOs = reader.ReadRarVIntByte();
var nameSize = reader.ReadRarVIntUInt16();
@@ -197,7 +197,7 @@ internal partial class FileHeader : RarHeader
var lowUncompressedSize = reader.ReadUInt32();
_ = reader.ReadByte();
HostOs = reader.ReadByte();
FileCrc = reader.ReadBytes(4);
@@ -415,6 +415,7 @@ internal partial class FileHeader : RarHeader
internal byte[]? R4Salt { get; private set; }
internal Rar5CryptoInfo? Rar5CryptoInfo { get; private set; }
private byte HostOs { get; set; }
internal uint FileAttributes { get; private set; }
internal long CompressedSize { get; private set; }
internal long UncompressedSize { get; private set; }

View File

@@ -42,20 +42,4 @@ internal sealed class NewSubHeaderType : IEquatable<NewSubHeaderType>
}
public bool Equals(NewSubHeaderType? other) => other is not null && Equals(other._bytes);
public override bool Equals(object? obj) => obj is NewSubHeaderType other && Equals(other);
public override int GetHashCode()
{
unchecked
{
var hash = 17;
foreach (byte value in _bytes)
{
hash = (hash * 31) + value;
}
return hash;
}
}
}

View File

@@ -140,7 +140,7 @@ public abstract class RarVolume : Volume
}
// we only want to load the archive header to avoid overhead but have to do the nasty thing and reset the stream
_ = GetVolumeFileParts().First();
GetVolumeFileParts().First();
Stream.Position = 0;
}
}

View File

@@ -3,7 +3,6 @@ using System.Buffers;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
@@ -27,7 +26,7 @@ internal sealed partial class TarHeader
await WriteUstarAsync(output, cancellationToken).ConfigureAwait(false);
break;
default:
throw new InvalidOperationException("This should be impossible...");
throw new Exception("This should be impossible...");
}
}
@@ -59,15 +58,9 @@ internal sealed partial class TarHeader
int splitIndex = -1;
for (int i = 0; i < dirSeps.Count; i++)
{
#if NET8_0_OR_GREATER
int count = ArchiveEncoding
.GetEncoding()
.GetByteCount(fullName.AsSpan(0, dirSeps[i]));
#else
int count = ArchiveEncoding
.GetEncoding()
.GetByteCount(fullName.Substring(0, dirSeps[i]));
#endif
if (count < 155)
{
splitIndex = dirSeps[i];
@@ -80,7 +73,7 @@ internal sealed partial class TarHeader
if (splitIndex == -1)
{
throw new InvalidDataException(
throw new Exception(
$"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Directory separator not found! Try using GNU Tar format instead!"
);
}
@@ -90,14 +83,14 @@ internal sealed partial class TarHeader
if (this.ArchiveEncoding.GetEncoding().GetByteCount(namePrefix) >= 155)
{
throw new InvalidDataException(
throw new Exception(
$"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Try using GNU Tar format instead!"
);
}
if (this.ArchiveEncoding.GetEncoding().GetByteCount(name) >= 100)
{
throw new InvalidDataException(
throw new Exception(
$"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Try using GNU Tar format instead!"
);
}

View File

@@ -3,7 +3,6 @@ using System.Buffers;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
using System.Text;
using System.Threading.Tasks;
@@ -52,7 +51,7 @@ internal sealed partial class TarHeader
WriteUstar(output);
break;
default:
throw new InvalidOperationException("This should be impossible...");
throw new Exception("This should be impossible...");
}
}
@@ -89,15 +88,9 @@ internal sealed partial class TarHeader
int splitIndex = -1;
for (int i = 0; i < dirSeps.Count; i++)
{
#if NET8_0_OR_GREATER
int count = ArchiveEncoding
.GetEncoding()
.GetByteCount(fullName.AsSpan(0, dirSeps[i]));
#else
int count = ArchiveEncoding
.GetEncoding()
.GetByteCount(fullName.Substring(0, dirSeps[i]));
#endif
if (count < 155)
{
splitIndex = dirSeps[i];
@@ -110,7 +103,7 @@ internal sealed partial class TarHeader
if (splitIndex == -1)
{
throw new InvalidDataException(
throw new Exception(
$"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Directory separator not found! Try using GNU Tar format instead!"
);
}
@@ -120,14 +113,14 @@ internal sealed partial class TarHeader
if (this.ArchiveEncoding.GetEncoding().GetByteCount(namePrefix) >= 155)
{
throw new InvalidDataException(
throw new Exception(
$"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Try using GNU Tar format instead!"
);
}
if (this.ArchiveEncoding.GetEncoding().GetByteCount(name) >= 100)
{
throw new InvalidDataException(
throw new Exception(
$"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Try using GNU Tar format instead!"
);
}

View File

@@ -6,6 +6,7 @@ namespace SharpCompress.Common.Tar;
internal class TarReadOnlySubStream : Stream
{
private readonly Stream _stream;
private readonly bool _useSyncOverAsyncDispose;
private bool _isDisposed;
private long _amountRead;
@@ -13,6 +14,7 @@ internal class TarReadOnlySubStream : Stream
public TarReadOnlySubStream(Stream stream, long bytesToRead, bool useSyncOverAsyncDispose)
{
_stream = stream;
_useSyncOverAsyncDispose = useSyncOverAsyncDispose;
BytesLeftToRead = bytesToRead;
}
@@ -20,7 +22,6 @@ internal class TarReadOnlySubStream : Stream
{
if (_isDisposed)
{
base.Dispose(disposing);
return;
}
@@ -46,7 +47,6 @@ internal class TarReadOnlySubStream : Stream
}
}
}
base.Dispose(disposing);
}
#if !LEGACY_DOTNET
@@ -54,7 +54,6 @@ internal class TarReadOnlySubStream : Stream
{
if (_isDisposed)
{
await base.DisposeAsync().ConfigureAwait(false);
return;
}
@@ -72,7 +71,6 @@ internal class TarReadOnlySubStream : Stream
}
GC.SuppressFinalize(this);
await base.DisposeAsync().ConfigureAwait(false);
}
#endif

View File

@@ -13,7 +13,6 @@ internal partial class WinzipAesCryptoStream
{
if (_isDisposed)
{
await base.DisposeAsync().ConfigureAwait(false);
return;
}
_isDisposed = true;
@@ -28,7 +27,6 @@ internal partial class WinzipAesCryptoStream
ArrayPool<byte>.Shared.Return(authBytes);
await _stream.DisposeAsync().ConfigureAwait(false);
}
await base.DisposeAsync().ConfigureAwait(false);
}
#endif

View File

@@ -10,7 +10,7 @@ namespace SharpCompress.Common.Zip;
internal partial class WinzipAesCryptoStream : Stream
{
private const int BLOCK_SIZE_IN_BYTES = 16;
private readonly Aes _cipher;
private readonly SymmetricAlgorithm _cipher;
private readonly byte[] _counter = new byte[BLOCK_SIZE_IN_BYTES];
private readonly Stream _stream;
private readonly ICryptoTransform _transform;
@@ -35,7 +35,7 @@ internal partial class WinzipAesCryptoStream : Stream
_transform = _cipher.CreateEncryptor(winzipAesEncryptionData.KeyBytes, iv);
}
private Aes CreateCipher(WinzipAesEncryptionData winzipAesEncryptionData)
private SymmetricAlgorithm CreateCipher(WinzipAesEncryptionData winzipAesEncryptionData)
{
var cipher = Aes.Create();
cipher.BlockSize = BLOCK_SIZE_IN_BYTES * 8;
@@ -63,7 +63,6 @@ internal partial class WinzipAesCryptoStream : Stream
{
if (_isDisposed)
{
base.Dispose(disposing);
return;
}
_isDisposed = true;
@@ -89,7 +88,6 @@ internal partial class WinzipAesCryptoStream : Stream
}
_stream.Dispose();
}
base.Dispose(disposing);
}
private async Task ReadAuthBytesAsync()

View File

@@ -1,16 +1,10 @@
using System;
using System.Buffers.Binary;
using System.Diagnostics.CodeAnalysis;
using System.Security.Cryptography;
using System.Text;
namespace SharpCompress.Common.Zip;
[SuppressMessage(
"Security",
"CA5379:Rfc2898DeriveBytes might be using a weak hash algorithm",
Justification = "WinZip AES specification requires PBKDF2 with SHA-1."
)]
internal class WinzipAesEncryptionData
{
private const int RFC2898_ITERATIONS = 1000;

View File

@@ -150,22 +150,26 @@ internal abstract partial class ZipFilePart : FilePart
{
throw new NotSupportedException("LZMA with pkware encryption.");
}
using var reader = new BinaryReader(
stream,
System.Text.Encoding.Default,
leaveOpen: true
);
reader.ReadUInt16(); //LZMA version
var props = new byte[reader.ReadUInt16()];
reader.Read(props, 0, props.Length);
return LzmaStream.Create(
props,
stream,
Header.CompressedSize > 0 ? Header.CompressedSize - 4 - props.Length : -1,
FlagUtility.HasFlag(Header.Flags, HeaderFlags.Bit1)
? -1
: Header.UncompressedSize
);
using (
var reader = new BinaryReader(
stream,
System.Text.Encoding.Default,
leaveOpen: true
)
)
{
reader.ReadUInt16(); //LZMA version
var props = new byte[reader.ReadUInt16()];
reader.Read(props, 0, props.Length);
return LzmaStream.Create(
props,
stream,
Header.CompressedSize > 0 ? Header.CompressedSize - 4 - props.Length : -1,
FlagUtility.HasFlag(Header.Flags, HeaderFlags.Bit1)
? -1
: Header.UncompressedSize
);
}
}
case ZipCompressionMethod.Xz:
{

View File

@@ -1,11 +1,10 @@
using System;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.RLE90;
namespace SharpCompress.Compressors.ArcLzw;
public partial class ArcLzwStream
{
public override async Task<int> ReadAsync(

View File

@@ -3,8 +3,7 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Compressors.RLE90;
namespace SharpCompress.Compressors.ArcLzw;
using SharpCompress.Compressors.Squeezed;
public partial class ArcLzwStream : Stream
{

View File

@@ -1,7 +1,5 @@
using System;
namespace SharpCompress.Compressors.ArcLzw;
public partial class ArcLzwStream
{
public class BitReader

View File

@@ -1,12 +1,11 @@
using System;
using System.IO;
using System.IO.Compression;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Compressors.Arj;
public sealed partial class LhaStream<TDecoderConfig>
public sealed partial class LhaStream<C>
{
public override async Task<int> ReadAsync(
byte[] buffer,
@@ -21,7 +20,7 @@ public sealed partial class LhaStream<TDecoderConfig>
}
if (offset < 0 || count < 0 || (offset + count) > buffer.Length)
{
throw new ArgumentOutOfRangeException(nameof(offset));
throw new ArgumentOutOfRangeException();
}
if (_producedBytes >= _originalSize)
@@ -117,7 +116,7 @@ public sealed partial class LhaStream<TDecoderConfig>
if (numCodes > NUM_TEMP_CODELEN)
{
throw new InvalidDataException("temporary codelen table has invalid size");
throw new Exception("temporary codelen table has invalid size");
}
// read actual lengths
@@ -133,7 +132,7 @@ public sealed partial class LhaStream<TDecoderConfig>
if (3 + skip > numCodes)
{
throw new InvalidDataException("temporary codelen table has invalid size");
throw new Exception("temporary codelen table has invalid size");
}
for (int i = 3 + skip; i < numCodes; i++)
@@ -162,7 +161,7 @@ public sealed partial class LhaStream<TDecoderConfig>
if (numCodes > NUM_COMMANDS)
{
throw new InvalidDataException("commands codelen table has invalid size");
throw new Exception("commands codelen table has invalid size");
}
int index = 0;

View File

@@ -1,23 +1,23 @@
using System;
using System.Data;
using System.IO;
using System.IO.Compression;
using System.Linq;
namespace SharpCompress.Compressors.Arj;
[CLSCompliant(true)]
public sealed partial class LhaStream<TDecoderConfig> : Stream
where TDecoderConfig : ILhaDecoderConfig, new()
public sealed partial class LhaStream<C> : Stream
where C : ILhaDecoderConfig, new()
{
private readonly BitReader _bitReader;
private readonly Stream _stream;
private readonly HuffTree _commandTree;
private readonly HuffTree _offsetTree;
private int _remainingCommands;
private (int offset, int count)? _copyProgress;
private readonly RingBuffer _ringBuffer;
private readonly TDecoderConfig _config = new TDecoderConfig();
private readonly C _config = new C();
private const int NUM_COMMANDS = 510;
private const int NUM_TEMP_CODELEN = 20;
@@ -27,6 +27,7 @@ public sealed partial class LhaStream<TDecoderConfig> : Stream
public LhaStream(Stream compressedStream, int originalSize)
{
_stream = compressedStream ?? throw new ArgumentNullException(nameof(compressedStream));
_bitReader = new BitReader(compressedStream);
_ringBuffer = _config.RingBuffer;
_commandTree = new HuffTree(NUM_COMMANDS * 2);
@@ -63,7 +64,7 @@ public sealed partial class LhaStream<TDecoderConfig> : Stream
}
if (offset < 0 || count < 0 || (offset + count) > buffer.Length)
{
throw new ArgumentOutOfRangeException(nameof(offset));
throw new ArgumentOutOfRangeException();
}
if (_producedBytes >= _originalSize)
@@ -136,7 +137,7 @@ public sealed partial class LhaStream<TDecoderConfig> : Stream
if (numCodes > NUM_TEMP_CODELEN)
{
throw new InvalidDataException("temporary codelen table has invalid size");
throw new Exception("temporary codelen table has invalid size");
}
// read actual lengths
@@ -151,7 +152,7 @@ public sealed partial class LhaStream<TDecoderConfig> : Stream
if (3 + skip > numCodes)
{
throw new InvalidDataException("temporary codelen table has invalid size");
throw new Exception("temporary codelen table has invalid size");
}
for (int i = 3 + skip; i < numCodes; i++)
@@ -179,7 +180,7 @@ public sealed partial class LhaStream<TDecoderConfig> : Stream
if (numCodes > NUM_COMMANDS)
{
throw new InvalidDataException("commands codelen table has invalid size");
throw new Exception("commands codelen table has invalid size");
}
int index = 0;

View File

@@ -1,6 +1,8 @@
using System;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Compressors.BZip2;
@@ -52,7 +54,6 @@ public sealed partial class BZip2Stream : Stream
{
if (isDisposed || leaveOpen)
{
base.Dispose(disposing);
return;
}
isDisposed = true;
@@ -60,7 +61,6 @@ public sealed partial class BZip2Stream : Stream
{
stream.Dispose();
}
base.Dispose(disposing);
}
public CompressionMode Mode { get; private set; }

View File

@@ -447,7 +447,6 @@ internal sealed class CBZip2OutputStream : Stream
{
if (disposed)
{
base.Dispose(disposing);
return;
}
@@ -461,7 +460,6 @@ internal sealed class CBZip2OutputStream : Stream
}
bsStream = null;
}
base.Dispose(disposing);
}
public void Finish()

View File

@@ -1,6 +1,5 @@
using System;
using System.IO;
using System.IO.Compression;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
@@ -279,7 +278,7 @@ public partial class ExplodeStream
if (literalResult.returnCode != 0)
{
throw new InvalidDataException("Error decoding literal value");
throw new Exception("Error decoding literal value");
}
huftPointer = literalResult.huftPointer;
@@ -319,7 +318,7 @@ public partial class ExplodeStream
if (distanceResult.returnCode != 0)
{
throw new InvalidDataException("Error decoding distance high bits");
throw new Exception("Error decoding distance high bits");
}
huftPointer = distanceResult.huftPointer;
@@ -335,7 +334,7 @@ public partial class ExplodeStream
if (lengthResult.returnCode != 0)
{
throw new InvalidDataException("Error decoding coded length");
throw new Exception("Error decoding coded length");
}
huftPointer = lengthResult.huftPointer;

View File

@@ -1,6 +1,5 @@
using System;
using System.IO;
using System.IO.Compression;
using SharpCompress.Common.Zip.Headers;
namespace SharpCompress.Compressors.Explode;
@@ -697,7 +696,7 @@ public partial class ExplodeStream : Stream
) != 0
)
{
throw new InvalidDataException("Error decoding literal value");
throw new Exception("Error decoding literal value");
}
nextByte = (byte)huftPointer.Value;
@@ -736,7 +735,7 @@ public partial class ExplodeStream : Stream
) != 0
)
{
throw new InvalidDataException("Error decoding distance high bits");
throw new Exception("Error decoding distance high bits");
}
distance = windowIndex - (distance + huftPointer.Value); /* construct offset */
@@ -752,7 +751,7 @@ public partial class ExplodeStream : Stream
) != 0
)
{
throw new InvalidDataException("Error decoding coded length");
throw new Exception("Error decoding coded length");
}
length = huftPointer.Value;

View File

@@ -18,9 +18,9 @@ public sealed class BranchExecFilter
ARCH_x86_ALIGNMENT = 1,
ARCH_PowerPC_ALIGNMENT = 4,
ARCH_IA64_ALIGNMENT = 16,
ARCH_ARM_ALIGNMENT = ARCH_PowerPC_ALIGNMENT,
ARCH_ARM_ALIGNMENT = 4,
ARCH_ARMTHUMB_ALIGNMENT = 2,
ARCH_SPARC_ALIGNMENT = ARCH_PowerPC_ALIGNMENT,
ARCH_SPARC_ALIGNMENT = 4,
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]

View File

@@ -4,6 +4,7 @@ namespace SharpCompress.Compressors.Filters;
internal class DeltaFilter : Filter
{
private const int DISTANCE_MIN = 1;
private const int DISTANCE_MAX = 256;
private const int DISTANCE_MASK = DISTANCE_MAX - 1;

View File

@@ -99,7 +99,6 @@ public sealed partial class LZipStream : Stream
{
if (_disposed)
{
base.Dispose(disposing);
return;
}
_disposed = true;
@@ -112,7 +111,6 @@ public sealed partial class LZipStream : Stream
_originalStream?.Dispose();
}
}
base.Dispose(disposing);
}
public CompressionMode Mode { get; }

View File

@@ -281,6 +281,9 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
}
}
private const uint K_NUM_LEN_SPEC_SYMBOLS =
Base.K_NUM_LOW_LEN_SYMBOLS + Base.K_NUM_MID_LEN_SYMBOLS;
private class LenPriceTableEncoder : LenEncoder
{
private readonly uint[] _prices = new uint[
@@ -1229,6 +1232,12 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
}
}
private bool ChangePair(uint smallDist, uint bigDist)
{
const int kDif = 7;
return (smallDist < ((uint)(1) << (32 - kDif)) && bigDist >= (smallDist << kDif));
}
private void WriteEndMarker(uint posState)
{
if (!_writeEndMark)

View File

@@ -559,7 +559,6 @@ public partial class LzwStream : Stream
baseInputStream.Dispose();
}
}
base.Dispose(disposing);
}
#endregion Stream Overrides

View File

@@ -21,7 +21,7 @@ public partial class RunLength90Stream
if (offset < 0 || count < 0 || offset + count > buffer.Length)
{
throw new ArgumentOutOfRangeException(nameof(offset));
throw new ArgumentOutOfRangeException();
}
int bytesWritten = 0;

View File

@@ -60,7 +60,7 @@ public partial class RunLength90Stream : Stream
if (offset < 0 || count < 0 || offset + count > buffer.Length)
{
throw new ArgumentOutOfRangeException(nameof(offset));
throw new ArgumentOutOfRangeException();
}
int bytesWritten = 0;

View File

@@ -54,6 +54,7 @@ internal partial class RarBLAKE2spStream : RarStream
internal byte[] b;
internal int bufferPosition;
internal UInt32 lastNodeFlag;
UInt32[] dummy;
public BLAKE2S()
{
@@ -61,6 +62,7 @@ internal partial class RarBLAKE2spStream : RarStream
t = new uint[2];
f = new uint[2];
b = new byte[BLAKE2S_BLOCK_SIZE];
dummy = new uint[2];
}
};

View File

@@ -19,7 +19,7 @@ internal class BitStream
31U,
63U,
(uint)sbyte.MaxValue,
byte.MaxValue,
(uint)byte.MaxValue,
511U,
1023U,
2047U,
@@ -27,7 +27,7 @@ internal class BitStream
8191U,
16383U,
(uint)short.MaxValue,
ushort.MaxValue,
(uint)ushort.MaxValue,
};
public BitStream(byte[] src, int srcLen)
@@ -62,7 +62,7 @@ internal class BitStream
_bitsLeft += 8;
}
}
result = (int)(_bitBuffer & _maskBits[nbits]);
result = (int)((long)_bitBuffer & (long)_maskBits[nbits]);
_bitBuffer >>= nbits;
_bitsLeft -= nbits;
return result;

View File

@@ -7,6 +7,7 @@ namespace SharpCompress.Compressors.Shrink;
internal partial class ShrinkStream : Stream
{
private Stream inStream;
private CompressionMode _compressionMode;
private ulong _compressedSize;
private long _uncompressedSize;
@@ -23,6 +24,7 @@ internal partial class ShrinkStream : Stream
)
{
inStream = stream;
_compressionMode = compressionMode;
_compressedSize = (ulong)compressedSize;
_uncompressedSize = uncompressedSize;

View File

@@ -10,6 +10,7 @@ namespace SharpCompress.Compressors.Squeezed;
public partial class SqueezeStream : Stream
{
private readonly Stream _stream;
private readonly int _compressedSize;
private const int NUMVALS = 257;
private const int SPEOF = 256;
@@ -18,6 +19,7 @@ public partial class SqueezeStream : Stream
private SqueezeStream(Stream stream, int compressedSize)
{
_stream = stream ?? throw new ArgumentNullException(nameof(stream));
_compressedSize = compressedSize;
}
public static SqueezeStream Create(Stream stream, int compressedSize)
@@ -61,41 +63,43 @@ public partial class SqueezeStream : Stream
private Stream BuildDecodedStream()
{
using var binaryReader = new BinaryReader(_stream, Encoding.Default, leaveOpen: true);
int numnodes = binaryReader.ReadUInt16();
if (numnodes >= NUMVALS || numnodes == 0)
using (var binaryReader = new BinaryReader(_stream, Encoding.Default, leaveOpen: true))
{
return new MemoryStream(Array.Empty<byte>());
}
int numnodes = binaryReader.ReadUInt16();
var dnode = new int[numnodes, 2];
for (int j = 0; j < numnodes; j++)
{
dnode[j, 0] = binaryReader.ReadInt16();
dnode[j, 1] = binaryReader.ReadInt16();
}
var bitReader = new BitReader(_stream);
var huffmanDecoded = new MemoryStream();
int i = 0;
while (true)
{
i = dnode[i, bitReader.ReadBit() ? 1 : 0];
if (i < 0)
if (numnodes >= NUMVALS || numnodes == 0)
{
i = -(i + 1);
if (i == SPEOF)
{
break;
}
huffmanDecoded.WriteByte((byte)i);
i = 0;
return new MemoryStream(Array.Empty<byte>());
}
}
huffmanDecoded.Position = 0;
return new RunLength90Stream(huffmanDecoded, (int)huffmanDecoded.Length);
var dnode = new int[numnodes, 2];
for (int j = 0; j < numnodes; j++)
{
dnode[j, 0] = binaryReader.ReadInt16();
dnode[j, 1] = binaryReader.ReadInt16();
}
var bitReader = new BitReader(_stream);
var huffmanDecoded = new MemoryStream();
int i = 0;
while (true)
{
i = dnode[i, bitReader.ReadBit() ? 1 : 0];
if (i < 0)
{
i = -(i + 1);
if (i == SPEOF)
{
break;
}
huffmanDecoded.WriteByte((byte)i);
i = 0;
}
}
huffmanDecoded.Position = 0;
return new RunLength90Stream(huffmanDecoded, (int)huffmanDecoded.Length);
}
}
}

View File

@@ -40,12 +40,12 @@ public abstract class BlockFilter : ReadOnlyStream
public static BlockFilter Read(BinaryReader reader)
{
var filterType = (FilterTypes)reader.ReadXZInteger();
if (!FILTER_MAP.TryGetValue(filterType, out var createFilter))
if (!FILTER_MAP.ContainsKey(filterType))
{
throw new NotImplementedException($"Filter {filterType} has not yet been implemented");
}
var filter = createFilter();
var filter = FILTER_MAP[filterType]();
var sizeOfProperties = reader.ReadXZInteger();
if (sizeOfProperties > int.MaxValue)

View File

@@ -10,18 +10,18 @@ internal static partial class MultiByteIntegers
{
public static async ValueTask<ulong> ReadXZIntegerAsync(
this BinaryReader reader,
int maxBytes = 9,
CancellationToken cancellationToken = default
CancellationToken cancellationToken = default,
int MaxBytes = 9
)
{
if (maxBytes <= 0)
if (MaxBytes <= 0)
{
throw new ArgumentOutOfRangeException(nameof(maxBytes));
throw new ArgumentOutOfRangeException(nameof(MaxBytes));
}
if (maxBytes > 9)
if (MaxBytes > 9)
{
maxBytes = 9;
MaxBytes = 9;
}
var LastByte = await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false);
@@ -30,7 +30,7 @@ internal static partial class MultiByteIntegers
var i = 0;
while ((LastByte & 0x80) != 0)
{
if (++i >= maxBytes)
if (++i >= MaxBytes)
{
throw new InvalidFormatException();
}

View File

@@ -19,6 +19,7 @@ public sealed partial class XZBlock : XZReadOnlyStream
public ulong? UncompressedSize { get; private set; }
public Stack<BlockFilter> Filters { get; private set; } = new();
public bool HeaderIsLoaded { get; private set; }
private CheckType _checkType;
private readonly int _checkSize;
private bool _streamConnected;
private int _numFilters;
@@ -32,6 +33,7 @@ public sealed partial class XZBlock : XZReadOnlyStream
public XZBlock(Stream stream, CheckType checkType, int checkSize)
: base(stream)
{
_checkType = checkType;
_checkSize = checkSize;
_startPosition = stream.Position;
}

View File

@@ -33,9 +33,7 @@ public partial class XZIndex
await VerifyIndexMarkerAsync(cancellationToken).ConfigureAwait(false);
}
NumberOfRecords = await _reader
.ReadXZIntegerAsync(cancellationToken: cancellationToken)
.ConfigureAwait(false);
NumberOfRecords = await _reader.ReadXZIntegerAsync(cancellationToken).ConfigureAwait(false);
for (ulong i = 0; i < NumberOfRecords; i++)
{
Records.Add(

View File

@@ -13,9 +13,8 @@ public partial class XZIndexRecord
)
{
var record = new XZIndexRecord();
record.UnpaddedSize = await br.ReadXZIntegerAsync(cancellationToken: cancellationToken)
.ConfigureAwait(false);
record.UncompressedSize = await br.ReadXZIntegerAsync(cancellationToken: cancellationToken)
record.UnpaddedSize = await br.ReadXZIntegerAsync(cancellationToken).ConfigureAwait(false);
record.UncompressedSize = await br.ReadXZIntegerAsync(cancellationToken)
.ConfigureAwait(false);
return record;
}

View File

@@ -12,7 +12,10 @@ namespace SharpCompress.Compressors.Xz;
public sealed partial class XZStream : XZReadOnlyStream
{
public XZStream(Stream baseStream)
: base(baseStream) { }
: base(baseStream)
{
_baseStream = baseStream;
}
protected override void Dispose(bool disposing)
{
@@ -45,6 +48,7 @@ public sealed partial class XZStream : XZReadOnlyStream
}
}
private readonly Stream _baseStream;
public XZHeader Header { get; private set; }
public XZIndex Index { get; private set; }
public XZFooter Footer { get; private set; }

View File

@@ -16,13 +16,6 @@ public partial class CompressionStream : Stream
{
if (compressor == null)
{
#if LEGACY_DOTNET
Dispose(true);
GC.SuppressFinalize(this);
await Task.CompletedTask.ConfigureAwait(false);
#else
await base.DisposeAsync().ConfigureAwait(false);
#endif
return;
}
@@ -35,12 +28,6 @@ public partial class CompressionStream : Stream
ReleaseUnmanagedResources();
GC.SuppressFinalize(this);
}
#if LEGACY_DOTNET
Dispose(true);
await Task.CompletedTask.ConfigureAwait(false);
#else
await base.DisposeAsync().ConfigureAwait(false);
#endif
}
public override async Task FlushAsync(CancellationToken cancellationToken) =>

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Buffers;
using System.IO;
using System.Threading;
@@ -84,7 +84,6 @@ public partial class CompressionStream : Stream
{
if (compressor == null)
{
base.Dispose(disposing);
return;
}
@@ -99,7 +98,6 @@ public partial class CompressionStream : Stream
{
ReleaseUnmanagedResources();
}
base.Dispose(disposing);
}
private void ReleaseUnmanagedResources()
@@ -123,8 +121,7 @@ public partial class CompressionStream : Stream
public override void Flush() => FlushInternal(ZSTD_EndDirective.ZSTD_e_flush);
private void FlushInternal(ZSTD_EndDirective directive) =>
WriteInternal(ReadOnlySpan<byte>.Empty, directive);
private void FlushInternal(ZSTD_EndDirective directive) => WriteInternal(null, directive);
public override void Write(byte[] buffer, int offset, int count) =>
Write(new ReadOnlySpan<byte>(buffer, offset, count));
@@ -141,7 +138,11 @@ public partial class CompressionStream : Stream
{
EnsureNotDisposed();
var input = new ZSTD_inBuffer_s { pos = 0, size = (nuint)buffer.Length };
var input = new ZSTD_inBuffer_s
{
pos = 0,
size = buffer != null ? (nuint)buffer.Length : 0,
};
nuint remaining;
do
{

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Buffers;
using System.IO;
using System.Threading;
@@ -90,7 +90,6 @@ public partial class DecompressionStream : Stream
{
if (decompressor == null)
{
base.Dispose(disposing);
return;
}
@@ -109,7 +108,6 @@ public partial class DecompressionStream : Stream
{
innerStream.Dispose();
}
base.Dispose(disposing);
}
public override int Read(byte[] buffer, int offset, int count) =>

View File

@@ -32,7 +32,7 @@ internal abstract unsafe class SafeZstdHandle : SafeHandle
internal sealed unsafe class SafeCctxHandle : SafeZstdHandle
{
/// <inheritdoc/>
internal SafeCctxHandle() { }
private SafeCctxHandle() { }
/// <summary>
/// Creates a new instance of <see cref="SafeCctxHandle"/>.
@@ -85,7 +85,7 @@ internal sealed unsafe class SafeCctxHandle : SafeZstdHandle
internal sealed unsafe class SafeDctxHandle : SafeZstdHandle
{
/// <inheritdoc/>
internal SafeDctxHandle() { }
private SafeDctxHandle() { }
/// <summary>
/// Creates a new instance of <see cref="SafeDctxHandle"/>.

View File

@@ -275,7 +275,6 @@ internal partial class SharpCompressStream
_ringBuffer?.Dispose();
_ringBuffer = null;
}
await base.DisposeAsync().ConfigureAwait(false);
}
#endif
}

View File

@@ -63,11 +63,7 @@ internal static class NotNullExtensions
)
where T : struct
{
if (!obj.HasValue)
{
throw new ArgumentNullException(paramName);
}
ArgumentNullException.ThrowIfNull(obj, paramName);
return obj.Value;
}
#endif

View File

@@ -20,14 +20,10 @@ internal static partial class Utility
CancellationToken cancellationToken = default
)
{
#if LEGACY_DOTNET
if (source is null)
{
throw new ArgumentNullException();
throw new ArgumentNullException(nameof(source));
}
#else
ArgumentNullException.ThrowIfNull(source);
#endif
if (buffer is null)
{

View File

@@ -247,14 +247,10 @@ internal static partial class Utility
/// </summary>
public void ReadExact(byte[] buffer, int offset, int length)
{
#if LEGACY_DOTNET
if (source is null)
{
throw new ArgumentNullException();
throw new ArgumentNullException(nameof(source));
}
#else
ArgumentNullException.ThrowIfNull(source);
#endif
if (buffer is null)
{

View File

@@ -92,7 +92,7 @@ public class LargeMemoryStream : Stream
if (offset < 0 || count < 0 || offset + count > buffer.Length)
{
throw new ArgumentOutOfRangeException(nameof(offset));
throw new ArgumentOutOfRangeException();
}
long length = Length;
@@ -137,7 +137,7 @@ public class LargeMemoryStream : Stream
if (offset < 0 || count < 0 || offset + count > buffer.Length)
{
throw new ArgumentOutOfRangeException(nameof(offset));
throw new ArgumentOutOfRangeException();
}
int bytesWritten = 0;

View File

@@ -212,9 +212,7 @@ public class RarReaderAsyncTests : ReaderTests
var file = Path.GetFileName(reader.Entry.Key).NotNull();
var folder =
Path.GetDirectoryName(reader.Entry.Key)
?? throw new InvalidOperationException(
"Entry key must have a directory name."
);
?? throw new ArgumentNullException();
var destdir = Path.Combine(SCRATCH_FILES_PATH, folder);
if (!Directory.Exists(destdir))
{

View File

@@ -195,9 +195,7 @@ public class RarReaderTests : ReaderTests
var file = Path.GetFileName(reader.Entry.Key).NotNull();
var folder =
Path.GetDirectoryName(reader.Entry.Key)
?? throw new InvalidOperationException(
"Entry key must have a directory name."
);
?? throw new ArgumentNullException();
var destdir = Path.Combine(SCRATCH_FILES_PATH, folder);
if (!Directory.Exists(destdir))
{

View File

@@ -3,6 +3,8 @@ using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.SevenZip;
using SharpCompress.Readers;
using SharpCompress.Test.Mocks;
using Xunit;
@@ -224,4 +226,95 @@ public class SevenZipArchiveAsyncTests : ArchiveTests
VerifyFiles();
}
[Fact]
public async Task SevenZipArchive_Solid_ExtractAllEntries_Contiguous_Async()
{
// This test verifies that solid archives iterate entries as contiguous streams
// rather than recreating the decompression stream for each entry
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "7Zip.solid.7z");
await using var archive = SevenZipArchive.OpenAsyncArchive(testArchive);
Assert.True(((SevenZipArchive)archive).IsSolid);
await using var reader = await archive.ExtractAllEntriesAsync();
while (await reader.MoveToNextEntryAsync())
{
if (!reader.Entry.IsDirectory)
{
await reader.WriteEntryToDirectoryAsync(SCRATCH_FILES_PATH);
}
}
VerifyFiles();
}
[Fact]
public async Task SevenZipArchive_Solid_VerifyStreamReuse()
{
// This test verifies that the folder stream is reused within each folder
// and not recreated for each entry in solid archives
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "7Zip.solid.7z");
await using var archive = SevenZipArchive.OpenAsyncArchive(testArchive);
Assert.True(((SevenZipArchive)archive).IsSolid);
await using var reader = await archive.ExtractAllEntriesAsync();
var sevenZipReader = Assert.IsType<SevenZipArchive.SevenZipReader>(reader);
sevenZipReader.DiagnosticsEnabled = true;
Stream? currentFolderStreamInstance = null;
object? currentFolder = null;
var entryCount = 0;
var entriesInCurrentFolder = 0;
var streamRecreationsWithinFolder = 0;
while (await reader.MoveToNextEntryAsync())
{
if (!reader.Entry.IsDirectory)
{
// Extract the entry to trigger GetEntryStream
using var entryStream = await reader.OpenEntryStreamAsync();
var buffer = new byte[4096];
while (entryStream.Read(buffer, 0, buffer.Length) > 0)
{
// Read the stream to completion
}
entryCount++;
var folderStream = sevenZipReader.DiagnosticsCurrentFolderStream;
var folder = sevenZipReader.DiagnosticsCurrentFolder;
Assert.NotNull(folderStream); // Folder stream should exist
// Check if we're in a new folder
if (currentFolder == null || !ReferenceEquals(currentFolder, folder))
{
// Starting a new folder
currentFolder = folder;
currentFolderStreamInstance = folderStream;
entriesInCurrentFolder = 1;
}
else
{
// Same folder - verify stream wasn't recreated
entriesInCurrentFolder++;
if (!ReferenceEquals(currentFolderStreamInstance, folderStream))
{
// Stream was recreated within the same folder - this is the bug we're testing for!
streamRecreationsWithinFolder++;
}
currentFolderStreamInstance = folderStream;
}
}
}
// Verify we actually tested multiple entries
Assert.True(entryCount > 1, "Test should have multiple entries to verify stream reuse");
// The critical check: within a single folder, the stream should NEVER be recreated
Assert.Equal(0, streamRecreationsWithinFolder); // Folder stream should remain the same for all entries in the same folder
}
}