mirror of
https://github.com/adamhathcock/sharpcompress.git
synced 2026-02-12 13:35:11 +00:00
Compare commits
4 Commits
0.45.1
...
adam/fix-e
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5e4094952a | ||
|
|
99d8eb9265 | ||
|
|
a9a0201ae9 | ||
|
|
5fe248eb45 |
@@ -299,6 +299,19 @@ dotnet_diagnostic.CA2251.severity = error
|
||||
dotnet_diagnostic.CA2252.severity = none
|
||||
dotnet_diagnostic.CA2254.severity = suggestion
|
||||
|
||||
; High volume analyzers requiring extensive refactoring - set to suggestion temporarily
|
||||
dotnet_diagnostic.CA1835.severity = suggestion
|
||||
dotnet_diagnostic.CA1510.severity = suggestion
|
||||
dotnet_diagnostic.CA1512.severity = suggestion
|
||||
dotnet_diagnostic.CA1844.severity = suggestion
|
||||
dotnet_diagnostic.CA1825.severity = suggestion
|
||||
dotnet_diagnostic.CA1712.severity = suggestion
|
||||
dotnet_diagnostic.CA2022.severity = suggestion
|
||||
dotnet_diagnostic.CA1850.severity = suggestion
|
||||
dotnet_diagnostic.CA2263.severity = suggestion
|
||||
dotnet_diagnostic.CA2012.severity = suggestion
|
||||
dotnet_diagnostic.CA1001.severity = suggestion
|
||||
|
||||
dotnet_diagnostic.CS0169.severity = error
|
||||
dotnet_diagnostic.CS0219.severity = error
|
||||
dotnet_diagnostic.CS0649.severity = suggestion
|
||||
@@ -318,9 +331,9 @@ dotnet_diagnostic.MVC1000.severity = suggestion
|
||||
|
||||
dotnet_diagnostic.RZ10012.severity = error
|
||||
|
||||
dotnet_diagnostic.IDE0004.severity = error # redundant cast
|
||||
dotnet_diagnostic.IDE0004.severity = suggestion # redundant cast
|
||||
dotnet_diagnostic.IDE0005.severity = suggestion
|
||||
dotnet_diagnostic.IDE0007.severity = error # Use var
|
||||
dotnet_diagnostic.IDE0007.severity = suggestion # Use var
|
||||
dotnet_diagnostic.IDE0011.severity = error # Use braces on if statements
|
||||
dotnet_diagnostic.IDE0010.severity = silent # populate switch
|
||||
dotnet_diagnostic.IDE0017.severity = suggestion # initialization can be simplified
|
||||
@@ -334,7 +347,7 @@ dotnet_diagnostic.IDE0028.severity = silent # expression body for accessors
|
||||
dotnet_diagnostic.IDE0032.severity = suggestion # Use auto property
|
||||
dotnet_diagnostic.IDE0033.severity = error # prefer tuple name
|
||||
dotnet_diagnostic.IDE0037.severity = suggestion # simplify anonymous type
|
||||
dotnet_diagnostic.IDE0040.severity = error # modifiers required
|
||||
dotnet_diagnostic.IDE0040.severity = suggestion # modifiers required
|
||||
dotnet_diagnostic.IDE0041.severity = error # simplify null
|
||||
dotnet_diagnostic.IDE0042.severity = error # deconstruct variable
|
||||
dotnet_diagnostic.IDE0044.severity = suggestion # make field only when possible
|
||||
@@ -348,6 +361,12 @@ dotnet_diagnostic.IDE0060.severity = suggestion # unused parameters
|
||||
dotnet_diagnostic.IDE0061.severity = suggestion # local expression body
|
||||
dotnet_diagnostic.IDE0062.severity = suggestion # local to static
|
||||
dotnet_diagnostic.IDE0063.severity = error # simplify using
|
||||
|
||||
[tests/**/*.cs]
|
||||
dotnet_diagnostic.CA1861.severity = suggestion
|
||||
dotnet_diagnostic.IDE0042.severity = suggestion
|
||||
dotnet_diagnostic.IDE0051.severity = suggestion
|
||||
dotnet_diagnostic.IDE0063.severity = suggestion
|
||||
dotnet_diagnostic.IDE0066.severity = suggestion # switch expression
|
||||
dotnet_diagnostic.IDE0072.severity = suggestion # Populate switch - forces population of all cases even when default specified
|
||||
dotnet_diagnostic.IDE0078.severity = suggestion # use pattern matching
|
||||
@@ -359,7 +378,7 @@ dotnet_diagnostic.IDE0200.severity = suggestion # lambda not needed
|
||||
dotnet_diagnostic.IDE1006.severity = suggestion # Naming rule violation: These words cannot contain lower case characters
|
||||
dotnet_diagnostic.IDE0260.severity = suggestion # Use pattern matching
|
||||
dotnet_diagnostic.IDE0270.severity = suggestion # Null check simplifcation
|
||||
dotnet_diagnostic.IDE0290.severity = error # Primary Constructor
|
||||
dotnet_diagnostic.IDE0290.severity = suggestion # Primary Constructor
|
||||
dotnet_diagnostic.IDE0300.severity = suggestion # Collection
|
||||
dotnet_diagnostic.IDE0305.severity = suggestion # Collection ToList
|
||||
|
||||
|
||||
@@ -8,8 +8,6 @@
|
||||
<CodeAnalysisTreatWarningsAsErrors>true</CodeAnalysisTreatWarningsAsErrors>
|
||||
<EnforceCodeStyleInBuild>true</EnforceCodeStyleInBuild>
|
||||
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
|
||||
<RunAnalyzersDuringLiveAnalysis>False</RunAnalyzersDuringLiveAnalysis>
|
||||
<RunAnalyzersDuringBuild>False</RunAnalyzersDuringBuild>
|
||||
<ManagePackageVersionsCentrally>true</ManagePackageVersionsCentrally>
|
||||
<RestorePackagesWithLockFile>true</RestorePackagesWithLockFile>
|
||||
<CentralPackageTransitivePinningEnabled>true</CentralPackageTransitivePinningEnabled>
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Runtime.InteropServices;
|
||||
@@ -114,14 +115,19 @@ Target(
|
||||
{
|
||||
var (version, isPrerelease) = await GetVersion();
|
||||
Console.WriteLine($"VERSION={version}");
|
||||
Console.WriteLine($"PRERELEASE={isPrerelease.ToString().ToLower()}");
|
||||
Console.WriteLine(
|
||||
$"PRERELEASE={isPrerelease.ToString().ToLower(CultureInfo.InvariantCulture)}"
|
||||
);
|
||||
|
||||
// Write to environment file for GitHub Actions
|
||||
var githubOutput = Environment.GetEnvironmentVariable("GITHUB_OUTPUT");
|
||||
if (!string.IsNullOrEmpty(githubOutput))
|
||||
{
|
||||
File.AppendAllText(githubOutput, $"version={version}\n");
|
||||
File.AppendAllText(githubOutput, $"prerelease={isPrerelease.ToString().ToLower()}\n");
|
||||
File.AppendAllText(
|
||||
githubOutput,
|
||||
$"prerelease={isPrerelease.ToString().ToLower(CultureInfo.InvariantCulture)}\n"
|
||||
);
|
||||
}
|
||||
}
|
||||
);
|
||||
@@ -363,9 +369,13 @@ Target(
|
||||
: "⚪";
|
||||
|
||||
if (timeChange > 25 || memChange > 25)
|
||||
{
|
||||
hasRegressions = true;
|
||||
}
|
||||
if (timeChange < -25 || memChange < -25)
|
||||
{
|
||||
hasImprovements = true;
|
||||
}
|
||||
|
||||
output.Add(
|
||||
$"| {method} | {baseline.Mean} | {current.Mean} | {timeIcon} {timeChange:+0.0;-0.0;0}% | {baseline.Memory} | {current.Memory} | {memIcon} {memChange:+0.0;-0.0;0}% |"
|
||||
@@ -545,7 +555,10 @@ static async Task<string> GetGitOutput(string command, string args)
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
throw new Exception($"Git command failed: git {command} {args}\n{ex.Message}", ex);
|
||||
throw new InvalidOperationException(
|
||||
$"Git command failed: git {command} {args}\n{ex.Message}",
|
||||
ex
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -575,7 +588,7 @@ static Dictionary<string, BenchmarkMetric> ParseBenchmarkResults(string markdown
|
||||
var line = lines[i].Trim();
|
||||
|
||||
// Look for table rows with benchmark data
|
||||
if (line.StartsWith("|") && line.Contains("'") && i > 0)
|
||||
if (line.StartsWith('|') && line.Contains("'", StringComparison.Ordinal) && i > 0)
|
||||
{
|
||||
var parts = line.Split('|', StringSplitOptions.TrimEntries);
|
||||
if (parts.Length >= 5)
|
||||
@@ -588,10 +601,10 @@ static Dictionary<string, BenchmarkMetric> ParseBenchmarkResults(string markdown
|
||||
for (int j = parts.Length - 2; j >= 2; j--)
|
||||
{
|
||||
if (
|
||||
parts[j].Contains("KB")
|
||||
|| parts[j].Contains("MB")
|
||||
|| parts[j].Contains("GB")
|
||||
|| parts[j].Contains("B")
|
||||
parts[j].Contains("KB", StringComparison.Ordinal)
|
||||
|| parts[j].Contains("MB", StringComparison.Ordinal)
|
||||
|| parts[j].Contains("GB", StringComparison.Ordinal)
|
||||
|| parts[j].Contains('B')
|
||||
)
|
||||
{
|
||||
memoryStr = parts[j];
|
||||
@@ -624,17 +637,21 @@ static Dictionary<string, BenchmarkMetric> ParseBenchmarkResults(string markdown
|
||||
static double ParseTimeValue(string timeStr)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(timeStr) || timeStr == "N/A" || timeStr == "NA")
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Remove thousands separators and parse
|
||||
timeStr = timeStr.Replace(",", "").Trim();
|
||||
|
||||
var match = Regex.Match(timeStr, @"([\d.]+)\s*(\w+)");
|
||||
if (!match.Success)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
var value = double.Parse(match.Groups[1].Value);
|
||||
var unit = match.Groups[2].Value.ToLower();
|
||||
var unit = match.Groups[2].Value.ToLower(CultureInfo.InvariantCulture);
|
||||
|
||||
// Convert to microseconds for comparison
|
||||
return unit switch
|
||||
@@ -650,16 +667,20 @@ static double ParseTimeValue(string timeStr)
|
||||
static double ParseMemoryValue(string memStr)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(memStr) || memStr == "N/A" || memStr == "NA")
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
memStr = memStr.Replace(",", "").Trim();
|
||||
|
||||
var match = Regex.Match(memStr, @"([\d.]+)\s*(\w+)");
|
||||
if (!match.Success)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
var value = double.Parse(match.Groups[1].Value);
|
||||
var unit = match.Groups[2].Value.ToUpper();
|
||||
var unit = match.Groups[2].Value.ToUpper(CultureInfo.InvariantCulture);
|
||||
|
||||
// Convert to KB for comparison
|
||||
return unit switch
|
||||
@@ -675,7 +696,9 @@ static double ParseMemoryValue(string memStr)
|
||||
static double CalculateChange(double baseline, double current)
|
||||
{
|
||||
if (baseline == 0)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
return ((current - baseline) / baseline) * 100;
|
||||
}
|
||||
|
||||
|
||||
@@ -40,10 +40,7 @@ public partial class GZipArchive
|
||||
{
|
||||
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
|
||||
}
|
||||
await using var writer = new GZipWriter(
|
||||
stream,
|
||||
options as GZipWriterOptions ?? new GZipWriterOptions(options)
|
||||
);
|
||||
await using var writer = new GZipWriter(stream, options);
|
||||
await foreach (
|
||||
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
|
||||
)
|
||||
|
||||
@@ -67,10 +67,7 @@ public partial class GZipArchive
|
||||
{
|
||||
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
|
||||
}
|
||||
using var writer = new GZipWriter(
|
||||
stream,
|
||||
options as GZipWriterOptions ?? new GZipWriterOptions(options)
|
||||
);
|
||||
using var writer = new GZipWriter(stream, options);
|
||||
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
|
||||
@@ -25,10 +25,7 @@ public partial class TarArchive
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
using var writer = new TarWriter(
|
||||
stream,
|
||||
options as TarWriterOptions ?? new TarWriterOptions(options)
|
||||
);
|
||||
using var writer = new TarWriter(stream, options);
|
||||
await foreach (
|
||||
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
|
||||
)
|
||||
|
||||
@@ -124,10 +124,7 @@ public partial class TarArchive
|
||||
IEnumerable<TarArchiveEntry> newEntries
|
||||
)
|
||||
{
|
||||
using var writer = new TarWriter(
|
||||
stream,
|
||||
options as TarWriterOptions ?? new TarWriterOptions(options)
|
||||
);
|
||||
using var writer = new TarWriter(stream, options);
|
||||
foreach (var entry in oldEntries.Concat(newEntries))
|
||||
{
|
||||
if (entry.IsDirectory)
|
||||
|
||||
@@ -79,10 +79,7 @@ public partial class ZipArchive
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
using var writer = new ZipWriter(
|
||||
stream,
|
||||
options as ZipWriterOptions ?? new ZipWriterOptions(options)
|
||||
);
|
||||
using var writer = new ZipWriter(stream, options);
|
||||
await foreach (
|
||||
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
|
||||
)
|
||||
|
||||
@@ -122,10 +122,7 @@ public partial class ZipArchive
|
||||
IEnumerable<ZipArchiveEntry> newEntries
|
||||
)
|
||||
{
|
||||
using var writer = new ZipWriter(
|
||||
stream,
|
||||
options as ZipWriterOptions ?? new ZipWriterOptions(options)
|
||||
);
|
||||
using var writer = new ZipWriter(stream, options);
|
||||
foreach (var entry in oldEntries.Concat(newEntries))
|
||||
{
|
||||
if (entry.IsDirectory)
|
||||
|
||||
@@ -2,6 +2,7 @@ using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Compressors.ArcLzw;
|
||||
using SharpCompress.Compressors.Lzw;
|
||||
using SharpCompress.Compressors.RLE90;
|
||||
using SharpCompress.Compressors.Squeezed;
|
||||
|
||||
@@ -8,6 +8,7 @@ using SharpCompress.Common.GZip;
|
||||
using SharpCompress.Common.Tar;
|
||||
using SharpCompress.Common.Tar.Headers;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
using SharpCompress.Compressors.ArcLzw;
|
||||
using SharpCompress.Compressors.Lzw;
|
||||
using SharpCompress.Compressors.RLE90;
|
||||
using SharpCompress.Compressors.Squeezed;
|
||||
|
||||
@@ -18,7 +18,6 @@ public enum ArjHeaderType
|
||||
|
||||
public abstract partial class ArjHeader
|
||||
{
|
||||
private const int FIRST_HDR_SIZE = 34;
|
||||
private const ushort ARJ_MAGIC = 0xEA60;
|
||||
|
||||
public ArjHeader(ArjHeaderType type)
|
||||
|
||||
@@ -10,9 +10,6 @@ namespace SharpCompress.Common.Arj.Headers;
|
||||
|
||||
public partial class ArjMainHeader : ArjHeader
|
||||
{
|
||||
private const int FIRST_HDR_SIZE = 34;
|
||||
private const ushort ARJ_MAGIC = 0xEA60;
|
||||
|
||||
public ArchiveEncoding ArchiveEncoding { get; }
|
||||
|
||||
public int ArchiverVersionNumber { get; private set; }
|
||||
|
||||
@@ -91,8 +91,8 @@ internal class AsyncMarkingBinaryReader
|
||||
}
|
||||
|
||||
public async ValueTask<ulong> ReadRarVIntAsync(
|
||||
CancellationToken cancellationToken = default,
|
||||
int maxBytes = 10
|
||||
int maxBytes = 10,
|
||||
CancellationToken cancellationToken = default
|
||||
) => await DoReadRarVIntAsync((maxBytes - 1) * 7, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
private async ValueTask<ulong> DoReadRarVIntAsync(
|
||||
|
||||
@@ -1,11 +1,17 @@
|
||||
#nullable disable
|
||||
|
||||
using System.Diagnostics.CodeAnalysis;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using SharpCompress.Common.Rar.Headers;
|
||||
|
||||
namespace SharpCompress.Common.Rar;
|
||||
|
||||
[SuppressMessage(
|
||||
"Security",
|
||||
"CA5350:Do Not Use Weak Cryptographic Algorithms",
|
||||
Justification = "RAR3 key derivation is SHA-1 based by format definition."
|
||||
)]
|
||||
internal class CryptKey3 : ICryptKey
|
||||
{
|
||||
const int AES_128 = 128;
|
||||
|
||||
@@ -44,9 +44,7 @@ internal sealed partial class ArchiveHeader
|
||||
PosAv = await reader.ReadInt32Async(cancellationToken).ConfigureAwait(false);
|
||||
if (HasFlag(ArchiveFlagsV4.ENCRYPT_VER))
|
||||
{
|
||||
EncryptionVersion = await reader
|
||||
.ReadByteAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
_ = await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,7 +29,7 @@ internal sealed partial class ArchiveHeader : RarHeader
|
||||
PosAv = reader.ReadInt32();
|
||||
if (HasFlag(ArchiveFlagsV4.ENCRYPT_VER))
|
||||
{
|
||||
EncryptionVersion = reader.ReadByte();
|
||||
_ = reader.ReadByte();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -44,8 +44,6 @@ internal sealed partial class ArchiveHeader : RarHeader
|
||||
|
||||
internal int? PosAv { get; private set; }
|
||||
|
||||
private byte? EncryptionVersion { get; set; }
|
||||
|
||||
public bool? IsEncrypted => IsRar5 ? null : HasFlag(ArchiveFlagsV4.PASSWORD);
|
||||
|
||||
public bool OldNumberingFormat => !IsRar5 && !HasFlag(ArchiveFlagsV4.NEW_NUMBERING);
|
||||
|
||||
@@ -79,7 +79,7 @@ internal partial class FileHeader
|
||||
CompressionMethod = (byte)((compressionInfo >> 7) & 0x7);
|
||||
WindowSize = IsDirectory ? 0 : ((size_t)0x20000) << ((compressionInfo >> 10) & 0xf);
|
||||
|
||||
HostOs = await reader
|
||||
_ = await reader
|
||||
.ReadRarVIntByteAsync(cancellationToken: cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
@@ -222,7 +222,7 @@ internal partial class FileHeader
|
||||
.ReadUInt32Async(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
HostOs = await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false);
|
||||
_ = await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
FileCrc = await reader.ReadBytesAsync(4, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
|
||||
@@ -72,7 +72,7 @@ internal partial class FileHeader : RarHeader
|
||||
// Bits 11 - 14 (0x3c00) define the minimum size of dictionary size required to extract data. Value 0 means 128 KB, 1 - 256 KB, ..., 14 - 2048 MB, 15 - 4096 MB.
|
||||
WindowSize = IsDirectory ? 0 : ((size_t)0x20000) << ((compressionInfo >> 10) & 0xf);
|
||||
|
||||
HostOs = reader.ReadRarVIntByte();
|
||||
_ = reader.ReadRarVIntByte();
|
||||
|
||||
var nameSize = reader.ReadRarVIntUInt16();
|
||||
|
||||
@@ -197,7 +197,7 @@ internal partial class FileHeader : RarHeader
|
||||
|
||||
var lowUncompressedSize = reader.ReadUInt32();
|
||||
|
||||
HostOs = reader.ReadByte();
|
||||
_ = reader.ReadByte();
|
||||
|
||||
FileCrc = reader.ReadBytes(4);
|
||||
|
||||
@@ -415,7 +415,6 @@ internal partial class FileHeader : RarHeader
|
||||
|
||||
internal byte[]? R4Salt { get; private set; }
|
||||
internal Rar5CryptoInfo? Rar5CryptoInfo { get; private set; }
|
||||
private byte HostOs { get; set; }
|
||||
internal uint FileAttributes { get; private set; }
|
||||
internal long CompressedSize { get; private set; }
|
||||
internal long UncompressedSize { get; private set; }
|
||||
|
||||
@@ -42,4 +42,20 @@ internal sealed class NewSubHeaderType : IEquatable<NewSubHeaderType>
|
||||
}
|
||||
|
||||
public bool Equals(NewSubHeaderType? other) => other is not null && Equals(other._bytes);
|
||||
|
||||
public override bool Equals(object? obj) => obj is NewSubHeaderType other && Equals(other);
|
||||
|
||||
public override int GetHashCode()
|
||||
{
|
||||
unchecked
|
||||
{
|
||||
var hash = 17;
|
||||
foreach (byte value in _bytes)
|
||||
{
|
||||
hash = (hash * 31) + value;
|
||||
}
|
||||
|
||||
return hash;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -140,7 +140,7 @@ public abstract class RarVolume : Volume
|
||||
}
|
||||
|
||||
// we only want to load the archive header to avoid overhead but have to do the nasty thing and reset the stream
|
||||
GetVolumeFileParts().First();
|
||||
_ = GetVolumeFileParts().First();
|
||||
Stream.Position = 0;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ using System.Buffers;
|
||||
using System.Buffers.Binary;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.IO.Compression;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
@@ -26,7 +27,7 @@ internal sealed partial class TarHeader
|
||||
await WriteUstarAsync(output, cancellationToken).ConfigureAwait(false);
|
||||
break;
|
||||
default:
|
||||
throw new Exception("This should be impossible...");
|
||||
throw new InvalidOperationException("This should be impossible...");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -58,9 +59,15 @@ internal sealed partial class TarHeader
|
||||
int splitIndex = -1;
|
||||
for (int i = 0; i < dirSeps.Count; i++)
|
||||
{
|
||||
#if NET8_0_OR_GREATER
|
||||
int count = ArchiveEncoding
|
||||
.GetEncoding()
|
||||
.GetByteCount(fullName.AsSpan(0, dirSeps[i]));
|
||||
#else
|
||||
int count = ArchiveEncoding
|
||||
.GetEncoding()
|
||||
.GetByteCount(fullName.Substring(0, dirSeps[i]));
|
||||
#endif
|
||||
if (count < 155)
|
||||
{
|
||||
splitIndex = dirSeps[i];
|
||||
@@ -73,7 +80,7 @@ internal sealed partial class TarHeader
|
||||
|
||||
if (splitIndex == -1)
|
||||
{
|
||||
throw new Exception(
|
||||
throw new InvalidDataException(
|
||||
$"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Directory separator not found! Try using GNU Tar format instead!"
|
||||
);
|
||||
}
|
||||
@@ -83,14 +90,14 @@ internal sealed partial class TarHeader
|
||||
|
||||
if (this.ArchiveEncoding.GetEncoding().GetByteCount(namePrefix) >= 155)
|
||||
{
|
||||
throw new Exception(
|
||||
throw new InvalidDataException(
|
||||
$"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Try using GNU Tar format instead!"
|
||||
);
|
||||
}
|
||||
|
||||
if (this.ArchiveEncoding.GetEncoding().GetByteCount(name) >= 100)
|
||||
{
|
||||
throw new Exception(
|
||||
throw new InvalidDataException(
|
||||
$"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Try using GNU Tar format instead!"
|
||||
);
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ using System.Buffers;
|
||||
using System.Buffers.Binary;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.IO.Compression;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
@@ -51,7 +52,7 @@ internal sealed partial class TarHeader
|
||||
WriteUstar(output);
|
||||
break;
|
||||
default:
|
||||
throw new Exception("This should be impossible...");
|
||||
throw new InvalidOperationException("This should be impossible...");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -88,9 +89,15 @@ internal sealed partial class TarHeader
|
||||
int splitIndex = -1;
|
||||
for (int i = 0; i < dirSeps.Count; i++)
|
||||
{
|
||||
#if NET8_0_OR_GREATER
|
||||
int count = ArchiveEncoding
|
||||
.GetEncoding()
|
||||
.GetByteCount(fullName.AsSpan(0, dirSeps[i]));
|
||||
#else
|
||||
int count = ArchiveEncoding
|
||||
.GetEncoding()
|
||||
.GetByteCount(fullName.Substring(0, dirSeps[i]));
|
||||
#endif
|
||||
if (count < 155)
|
||||
{
|
||||
splitIndex = dirSeps[i];
|
||||
@@ -103,7 +110,7 @@ internal sealed partial class TarHeader
|
||||
|
||||
if (splitIndex == -1)
|
||||
{
|
||||
throw new Exception(
|
||||
throw new InvalidDataException(
|
||||
$"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Directory separator not found! Try using GNU Tar format instead!"
|
||||
);
|
||||
}
|
||||
@@ -113,14 +120,14 @@ internal sealed partial class TarHeader
|
||||
|
||||
if (this.ArchiveEncoding.GetEncoding().GetByteCount(namePrefix) >= 155)
|
||||
{
|
||||
throw new Exception(
|
||||
throw new InvalidDataException(
|
||||
$"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Try using GNU Tar format instead!"
|
||||
);
|
||||
}
|
||||
|
||||
if (this.ArchiveEncoding.GetEncoding().GetByteCount(name) >= 100)
|
||||
{
|
||||
throw new Exception(
|
||||
throw new InvalidDataException(
|
||||
$"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Try using GNU Tar format instead!"
|
||||
);
|
||||
}
|
||||
|
||||
@@ -6,7 +6,6 @@ namespace SharpCompress.Common.Tar;
|
||||
internal class TarReadOnlySubStream : Stream
|
||||
{
|
||||
private readonly Stream _stream;
|
||||
private readonly bool _useSyncOverAsyncDispose;
|
||||
|
||||
private bool _isDisposed;
|
||||
private long _amountRead;
|
||||
@@ -14,7 +13,6 @@ internal class TarReadOnlySubStream : Stream
|
||||
public TarReadOnlySubStream(Stream stream, long bytesToRead, bool useSyncOverAsyncDispose)
|
||||
{
|
||||
_stream = stream;
|
||||
_useSyncOverAsyncDispose = useSyncOverAsyncDispose;
|
||||
BytesLeftToRead = bytesToRead;
|
||||
}
|
||||
|
||||
@@ -22,6 +20,7 @@ internal class TarReadOnlySubStream : Stream
|
||||
{
|
||||
if (_isDisposed)
|
||||
{
|
||||
base.Dispose(disposing);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -47,6 +46,7 @@ internal class TarReadOnlySubStream : Stream
|
||||
}
|
||||
}
|
||||
}
|
||||
base.Dispose(disposing);
|
||||
}
|
||||
|
||||
#if !LEGACY_DOTNET
|
||||
@@ -54,6 +54,7 @@ internal class TarReadOnlySubStream : Stream
|
||||
{
|
||||
if (_isDisposed)
|
||||
{
|
||||
await base.DisposeAsync().ConfigureAwait(false);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -71,6 +72,7 @@ internal class TarReadOnlySubStream : Stream
|
||||
}
|
||||
|
||||
GC.SuppressFinalize(this);
|
||||
await base.DisposeAsync().ConfigureAwait(false);
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@ internal partial class WinzipAesCryptoStream
|
||||
{
|
||||
if (_isDisposed)
|
||||
{
|
||||
await base.DisposeAsync().ConfigureAwait(false);
|
||||
return;
|
||||
}
|
||||
_isDisposed = true;
|
||||
@@ -27,6 +28,7 @@ internal partial class WinzipAesCryptoStream
|
||||
ArrayPool<byte>.Shared.Return(authBytes);
|
||||
await _stream.DisposeAsync().ConfigureAwait(false);
|
||||
}
|
||||
await base.DisposeAsync().ConfigureAwait(false);
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ namespace SharpCompress.Common.Zip;
|
||||
internal partial class WinzipAesCryptoStream : Stream
|
||||
{
|
||||
private const int BLOCK_SIZE_IN_BYTES = 16;
|
||||
private readonly SymmetricAlgorithm _cipher;
|
||||
private readonly Aes _cipher;
|
||||
private readonly byte[] _counter = new byte[BLOCK_SIZE_IN_BYTES];
|
||||
private readonly Stream _stream;
|
||||
private readonly ICryptoTransform _transform;
|
||||
@@ -35,7 +35,7 @@ internal partial class WinzipAesCryptoStream : Stream
|
||||
_transform = _cipher.CreateEncryptor(winzipAesEncryptionData.KeyBytes, iv);
|
||||
}
|
||||
|
||||
private SymmetricAlgorithm CreateCipher(WinzipAesEncryptionData winzipAesEncryptionData)
|
||||
private Aes CreateCipher(WinzipAesEncryptionData winzipAesEncryptionData)
|
||||
{
|
||||
var cipher = Aes.Create();
|
||||
cipher.BlockSize = BLOCK_SIZE_IN_BYTES * 8;
|
||||
@@ -63,6 +63,7 @@ internal partial class WinzipAesCryptoStream : Stream
|
||||
{
|
||||
if (_isDisposed)
|
||||
{
|
||||
base.Dispose(disposing);
|
||||
return;
|
||||
}
|
||||
_isDisposed = true;
|
||||
@@ -88,6 +89,7 @@ internal partial class WinzipAesCryptoStream : Stream
|
||||
}
|
||||
_stream.Dispose();
|
||||
}
|
||||
base.Dispose(disposing);
|
||||
}
|
||||
|
||||
private async Task ReadAuthBytesAsync()
|
||||
|
||||
@@ -1,10 +1,16 @@
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.Diagnostics.CodeAnalysis;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
|
||||
namespace SharpCompress.Common.Zip;
|
||||
|
||||
[SuppressMessage(
|
||||
"Security",
|
||||
"CA5379:Rfc2898DeriveBytes might be using a weak hash algorithm",
|
||||
Justification = "WinZip AES specification requires PBKDF2 with SHA-1."
|
||||
)]
|
||||
internal class WinzipAesEncryptionData
|
||||
{
|
||||
private const int RFC2898_ITERATIONS = 1000;
|
||||
|
||||
@@ -150,26 +150,22 @@ internal abstract partial class ZipFilePart : FilePart
|
||||
{
|
||||
throw new NotSupportedException("LZMA with pkware encryption.");
|
||||
}
|
||||
using (
|
||||
var reader = new BinaryReader(
|
||||
stream,
|
||||
System.Text.Encoding.Default,
|
||||
leaveOpen: true
|
||||
)
|
||||
)
|
||||
{
|
||||
reader.ReadUInt16(); //LZMA version
|
||||
var props = new byte[reader.ReadUInt16()];
|
||||
reader.Read(props, 0, props.Length);
|
||||
return LzmaStream.Create(
|
||||
props,
|
||||
stream,
|
||||
Header.CompressedSize > 0 ? Header.CompressedSize - 4 - props.Length : -1,
|
||||
FlagUtility.HasFlag(Header.Flags, HeaderFlags.Bit1)
|
||||
? -1
|
||||
: Header.UncompressedSize
|
||||
);
|
||||
}
|
||||
using var reader = new BinaryReader(
|
||||
stream,
|
||||
System.Text.Encoding.Default,
|
||||
leaveOpen: true
|
||||
);
|
||||
reader.ReadUInt16(); //LZMA version
|
||||
var props = new byte[reader.ReadUInt16()];
|
||||
reader.Read(props, 0, props.Length);
|
||||
return LzmaStream.Create(
|
||||
props,
|
||||
stream,
|
||||
Header.CompressedSize > 0 ? Header.CompressedSize - 4 - props.Length : -1,
|
||||
FlagUtility.HasFlag(Header.Flags, HeaderFlags.Bit1)
|
||||
? -1
|
||||
: Header.UncompressedSize
|
||||
);
|
||||
}
|
||||
case ZipCompressionMethod.Xz:
|
||||
{
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Compressors.RLE90;
|
||||
|
||||
namespace SharpCompress.Compressors.ArcLzw;
|
||||
|
||||
public partial class ArcLzwStream
|
||||
{
|
||||
public override async Task<int> ReadAsync(
|
||||
|
||||
@@ -3,7 +3,8 @@ using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using SharpCompress.Compressors.RLE90;
|
||||
using SharpCompress.Compressors.Squeezed;
|
||||
|
||||
namespace SharpCompress.Compressors.ArcLzw;
|
||||
|
||||
public partial class ArcLzwStream : Stream
|
||||
{
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Compressors.ArcLzw;
|
||||
|
||||
public partial class ArcLzwStream
|
||||
{
|
||||
public class BitReader
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.IO.Compression;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Compressors.Arj;
|
||||
|
||||
public sealed partial class LhaStream<C>
|
||||
public sealed partial class LhaStream<TDecoderConfig>
|
||||
{
|
||||
public override async Task<int> ReadAsync(
|
||||
byte[] buffer,
|
||||
@@ -20,7 +21,7 @@ public sealed partial class LhaStream<C>
|
||||
}
|
||||
if (offset < 0 || count < 0 || (offset + count) > buffer.Length)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException();
|
||||
throw new ArgumentOutOfRangeException(nameof(offset));
|
||||
}
|
||||
|
||||
if (_producedBytes >= _originalSize)
|
||||
@@ -116,7 +117,7 @@ public sealed partial class LhaStream<C>
|
||||
|
||||
if (numCodes > NUM_TEMP_CODELEN)
|
||||
{
|
||||
throw new Exception("temporary codelen table has invalid size");
|
||||
throw new InvalidDataException("temporary codelen table has invalid size");
|
||||
}
|
||||
|
||||
// read actual lengths
|
||||
@@ -132,7 +133,7 @@ public sealed partial class LhaStream<C>
|
||||
|
||||
if (3 + skip > numCodes)
|
||||
{
|
||||
throw new Exception("temporary codelen table has invalid size");
|
||||
throw new InvalidDataException("temporary codelen table has invalid size");
|
||||
}
|
||||
|
||||
for (int i = 3 + skip; i < numCodes; i++)
|
||||
@@ -161,7 +162,7 @@ public sealed partial class LhaStream<C>
|
||||
|
||||
if (numCodes > NUM_COMMANDS)
|
||||
{
|
||||
throw new Exception("commands codelen table has invalid size");
|
||||
throw new InvalidDataException("commands codelen table has invalid size");
|
||||
}
|
||||
|
||||
int index = 0;
|
||||
|
||||
@@ -1,23 +1,23 @@
|
||||
using System;
|
||||
using System.Data;
|
||||
using System.IO;
|
||||
using System.IO.Compression;
|
||||
using System.Linq;
|
||||
|
||||
namespace SharpCompress.Compressors.Arj;
|
||||
|
||||
[CLSCompliant(true)]
|
||||
public sealed partial class LhaStream<C> : Stream
|
||||
where C : ILhaDecoderConfig, new()
|
||||
public sealed partial class LhaStream<TDecoderConfig> : Stream
|
||||
where TDecoderConfig : ILhaDecoderConfig, new()
|
||||
{
|
||||
private readonly BitReader _bitReader;
|
||||
private readonly Stream _stream;
|
||||
|
||||
private readonly HuffTree _commandTree;
|
||||
private readonly HuffTree _offsetTree;
|
||||
private int _remainingCommands;
|
||||
private (int offset, int count)? _copyProgress;
|
||||
private readonly RingBuffer _ringBuffer;
|
||||
private readonly C _config = new C();
|
||||
private readonly TDecoderConfig _config = new TDecoderConfig();
|
||||
|
||||
private const int NUM_COMMANDS = 510;
|
||||
private const int NUM_TEMP_CODELEN = 20;
|
||||
@@ -27,7 +27,6 @@ public sealed partial class LhaStream<C> : Stream
|
||||
|
||||
public LhaStream(Stream compressedStream, int originalSize)
|
||||
{
|
||||
_stream = compressedStream ?? throw new ArgumentNullException(nameof(compressedStream));
|
||||
_bitReader = new BitReader(compressedStream);
|
||||
_ringBuffer = _config.RingBuffer;
|
||||
_commandTree = new HuffTree(NUM_COMMANDS * 2);
|
||||
@@ -64,7 +63,7 @@ public sealed partial class LhaStream<C> : Stream
|
||||
}
|
||||
if (offset < 0 || count < 0 || (offset + count) > buffer.Length)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException();
|
||||
throw new ArgumentOutOfRangeException(nameof(offset));
|
||||
}
|
||||
|
||||
if (_producedBytes >= _originalSize)
|
||||
@@ -137,7 +136,7 @@ public sealed partial class LhaStream<C> : Stream
|
||||
|
||||
if (numCodes > NUM_TEMP_CODELEN)
|
||||
{
|
||||
throw new Exception("temporary codelen table has invalid size");
|
||||
throw new InvalidDataException("temporary codelen table has invalid size");
|
||||
}
|
||||
|
||||
// read actual lengths
|
||||
@@ -152,7 +151,7 @@ public sealed partial class LhaStream<C> : Stream
|
||||
|
||||
if (3 + skip > numCodes)
|
||||
{
|
||||
throw new Exception("temporary codelen table has invalid size");
|
||||
throw new InvalidDataException("temporary codelen table has invalid size");
|
||||
}
|
||||
|
||||
for (int i = 3 + skip; i < numCodes; i++)
|
||||
@@ -180,7 +179,7 @@ public sealed partial class LhaStream<C> : Stream
|
||||
|
||||
if (numCodes > NUM_COMMANDS)
|
||||
{
|
||||
throw new Exception("commands codelen table has invalid size");
|
||||
throw new InvalidDataException("commands codelen table has invalid size");
|
||||
}
|
||||
|
||||
int index = 0;
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Compressors.BZip2;
|
||||
|
||||
@@ -54,6 +52,7 @@ public sealed partial class BZip2Stream : Stream
|
||||
{
|
||||
if (isDisposed || leaveOpen)
|
||||
{
|
||||
base.Dispose(disposing);
|
||||
return;
|
||||
}
|
||||
isDisposed = true;
|
||||
@@ -61,6 +60,7 @@ public sealed partial class BZip2Stream : Stream
|
||||
{
|
||||
stream.Dispose();
|
||||
}
|
||||
base.Dispose(disposing);
|
||||
}
|
||||
|
||||
public CompressionMode Mode { get; private set; }
|
||||
|
||||
@@ -447,6 +447,7 @@ internal sealed class CBZip2OutputStream : Stream
|
||||
{
|
||||
if (disposed)
|
||||
{
|
||||
base.Dispose(disposing);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -460,6 +461,7 @@ internal sealed class CBZip2OutputStream : Stream
|
||||
}
|
||||
bsStream = null;
|
||||
}
|
||||
base.Dispose(disposing);
|
||||
}
|
||||
|
||||
public void Finish()
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.IO.Compression;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
@@ -278,7 +279,7 @@ public partial class ExplodeStream
|
||||
|
||||
if (literalResult.returnCode != 0)
|
||||
{
|
||||
throw new Exception("Error decoding literal value");
|
||||
throw new InvalidDataException("Error decoding literal value");
|
||||
}
|
||||
|
||||
huftPointer = literalResult.huftPointer;
|
||||
@@ -318,7 +319,7 @@ public partial class ExplodeStream
|
||||
|
||||
if (distanceResult.returnCode != 0)
|
||||
{
|
||||
throw new Exception("Error decoding distance high bits");
|
||||
throw new InvalidDataException("Error decoding distance high bits");
|
||||
}
|
||||
|
||||
huftPointer = distanceResult.huftPointer;
|
||||
@@ -334,7 +335,7 @@ public partial class ExplodeStream
|
||||
|
||||
if (lengthResult.returnCode != 0)
|
||||
{
|
||||
throw new Exception("Error decoding coded length");
|
||||
throw new InvalidDataException("Error decoding coded length");
|
||||
}
|
||||
|
||||
huftPointer = lengthResult.huftPointer;
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.IO.Compression;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
|
||||
namespace SharpCompress.Compressors.Explode;
|
||||
@@ -696,7 +697,7 @@ public partial class ExplodeStream : Stream
|
||||
) != 0
|
||||
)
|
||||
{
|
||||
throw new Exception("Error decoding literal value");
|
||||
throw new InvalidDataException("Error decoding literal value");
|
||||
}
|
||||
|
||||
nextByte = (byte)huftPointer.Value;
|
||||
@@ -735,7 +736,7 @@ public partial class ExplodeStream : Stream
|
||||
) != 0
|
||||
)
|
||||
{
|
||||
throw new Exception("Error decoding distance high bits");
|
||||
throw new InvalidDataException("Error decoding distance high bits");
|
||||
}
|
||||
|
||||
distance = windowIndex - (distance + huftPointer.Value); /* construct offset */
|
||||
@@ -751,7 +752,7 @@ public partial class ExplodeStream : Stream
|
||||
) != 0
|
||||
)
|
||||
{
|
||||
throw new Exception("Error decoding coded length");
|
||||
throw new InvalidDataException("Error decoding coded length");
|
||||
}
|
||||
|
||||
length = huftPointer.Value;
|
||||
|
||||
@@ -18,9 +18,9 @@ public sealed class BranchExecFilter
|
||||
ARCH_x86_ALIGNMENT = 1,
|
||||
ARCH_PowerPC_ALIGNMENT = 4,
|
||||
ARCH_IA64_ALIGNMENT = 16,
|
||||
ARCH_ARM_ALIGNMENT = 4,
|
||||
ARCH_ARM_ALIGNMENT = ARCH_PowerPC_ALIGNMENT,
|
||||
ARCH_ARMTHUMB_ALIGNMENT = 2,
|
||||
ARCH_SPARC_ALIGNMENT = 4,
|
||||
ARCH_SPARC_ALIGNMENT = ARCH_PowerPC_ALIGNMENT,
|
||||
}
|
||||
|
||||
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
||||
|
||||
@@ -4,7 +4,6 @@ namespace SharpCompress.Compressors.Filters;
|
||||
|
||||
internal class DeltaFilter : Filter
|
||||
{
|
||||
private const int DISTANCE_MIN = 1;
|
||||
private const int DISTANCE_MAX = 256;
|
||||
private const int DISTANCE_MASK = DISTANCE_MAX - 1;
|
||||
|
||||
|
||||
@@ -99,6 +99,7 @@ public sealed partial class LZipStream : Stream
|
||||
{
|
||||
if (_disposed)
|
||||
{
|
||||
base.Dispose(disposing);
|
||||
return;
|
||||
}
|
||||
_disposed = true;
|
||||
@@ -111,6 +112,7 @@ public sealed partial class LZipStream : Stream
|
||||
_originalStream?.Dispose();
|
||||
}
|
||||
}
|
||||
base.Dispose(disposing);
|
||||
}
|
||||
|
||||
public CompressionMode Mode { get; }
|
||||
|
||||
@@ -281,9 +281,6 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
|
||||
}
|
||||
}
|
||||
|
||||
private const uint K_NUM_LEN_SPEC_SYMBOLS =
|
||||
Base.K_NUM_LOW_LEN_SYMBOLS + Base.K_NUM_MID_LEN_SYMBOLS;
|
||||
|
||||
private class LenPriceTableEncoder : LenEncoder
|
||||
{
|
||||
private readonly uint[] _prices = new uint[
|
||||
@@ -1232,12 +1229,6 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
|
||||
}
|
||||
}
|
||||
|
||||
private bool ChangePair(uint smallDist, uint bigDist)
|
||||
{
|
||||
const int kDif = 7;
|
||||
return (smallDist < ((uint)(1) << (32 - kDif)) && bigDist >= (smallDist << kDif));
|
||||
}
|
||||
|
||||
private void WriteEndMarker(uint posState)
|
||||
{
|
||||
if (!_writeEndMark)
|
||||
|
||||
@@ -559,6 +559,7 @@ public partial class LzwStream : Stream
|
||||
baseInputStream.Dispose();
|
||||
}
|
||||
}
|
||||
base.Dispose(disposing);
|
||||
}
|
||||
|
||||
#endregion Stream Overrides
|
||||
|
||||
@@ -21,7 +21,7 @@ public partial class RunLength90Stream
|
||||
|
||||
if (offset < 0 || count < 0 || offset + count > buffer.Length)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException();
|
||||
throw new ArgumentOutOfRangeException(nameof(offset));
|
||||
}
|
||||
|
||||
int bytesWritten = 0;
|
||||
|
||||
@@ -60,7 +60,7 @@ public partial class RunLength90Stream : Stream
|
||||
|
||||
if (offset < 0 || count < 0 || offset + count > buffer.Length)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException();
|
||||
throw new ArgumentOutOfRangeException(nameof(offset));
|
||||
}
|
||||
|
||||
int bytesWritten = 0;
|
||||
|
||||
@@ -54,7 +54,6 @@ internal partial class RarBLAKE2spStream : RarStream
|
||||
internal byte[] b;
|
||||
internal int bufferPosition;
|
||||
internal UInt32 lastNodeFlag;
|
||||
UInt32[] dummy;
|
||||
|
||||
public BLAKE2S()
|
||||
{
|
||||
@@ -62,7 +61,6 @@ internal partial class RarBLAKE2spStream : RarStream
|
||||
t = new uint[2];
|
||||
f = new uint[2];
|
||||
b = new byte[BLAKE2S_BLOCK_SIZE];
|
||||
dummy = new uint[2];
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ internal class BitStream
|
||||
31U,
|
||||
63U,
|
||||
(uint)sbyte.MaxValue,
|
||||
(uint)byte.MaxValue,
|
||||
byte.MaxValue,
|
||||
511U,
|
||||
1023U,
|
||||
2047U,
|
||||
@@ -27,7 +27,7 @@ internal class BitStream
|
||||
8191U,
|
||||
16383U,
|
||||
(uint)short.MaxValue,
|
||||
(uint)ushort.MaxValue,
|
||||
ushort.MaxValue,
|
||||
};
|
||||
|
||||
public BitStream(byte[] src, int srcLen)
|
||||
@@ -62,7 +62,7 @@ internal class BitStream
|
||||
_bitsLeft += 8;
|
||||
}
|
||||
}
|
||||
result = (int)((long)_bitBuffer & (long)_maskBits[nbits]);
|
||||
result = (int)(_bitBuffer & _maskBits[nbits]);
|
||||
_bitBuffer >>= nbits;
|
||||
_bitsLeft -= nbits;
|
||||
return result;
|
||||
|
||||
@@ -7,7 +7,6 @@ namespace SharpCompress.Compressors.Shrink;
|
||||
internal partial class ShrinkStream : Stream
|
||||
{
|
||||
private Stream inStream;
|
||||
private CompressionMode _compressionMode;
|
||||
|
||||
private ulong _compressedSize;
|
||||
private long _uncompressedSize;
|
||||
@@ -24,7 +23,6 @@ internal partial class ShrinkStream : Stream
|
||||
)
|
||||
{
|
||||
inStream = stream;
|
||||
_compressionMode = compressionMode;
|
||||
|
||||
_compressedSize = (ulong)compressedSize;
|
||||
_uncompressedSize = uncompressedSize;
|
||||
|
||||
@@ -10,7 +10,6 @@ namespace SharpCompress.Compressors.Squeezed;
|
||||
public partial class SqueezeStream : Stream
|
||||
{
|
||||
private readonly Stream _stream;
|
||||
private readonly int _compressedSize;
|
||||
private const int NUMVALS = 257;
|
||||
private const int SPEOF = 256;
|
||||
|
||||
@@ -19,7 +18,6 @@ public partial class SqueezeStream : Stream
|
||||
private SqueezeStream(Stream stream, int compressedSize)
|
||||
{
|
||||
_stream = stream ?? throw new ArgumentNullException(nameof(stream));
|
||||
_compressedSize = compressedSize;
|
||||
}
|
||||
|
||||
public static SqueezeStream Create(Stream stream, int compressedSize)
|
||||
@@ -63,43 +61,41 @@ public partial class SqueezeStream : Stream
|
||||
|
||||
private Stream BuildDecodedStream()
|
||||
{
|
||||
using (var binaryReader = new BinaryReader(_stream, Encoding.Default, leaveOpen: true))
|
||||
using var binaryReader = new BinaryReader(_stream, Encoding.Default, leaveOpen: true);
|
||||
int numnodes = binaryReader.ReadUInt16();
|
||||
|
||||
if (numnodes >= NUMVALS || numnodes == 0)
|
||||
{
|
||||
int numnodes = binaryReader.ReadUInt16();
|
||||
|
||||
if (numnodes >= NUMVALS || numnodes == 0)
|
||||
{
|
||||
return new MemoryStream(Array.Empty<byte>());
|
||||
}
|
||||
|
||||
var dnode = new int[numnodes, 2];
|
||||
for (int j = 0; j < numnodes; j++)
|
||||
{
|
||||
dnode[j, 0] = binaryReader.ReadInt16();
|
||||
dnode[j, 1] = binaryReader.ReadInt16();
|
||||
}
|
||||
|
||||
var bitReader = new BitReader(_stream);
|
||||
var huffmanDecoded = new MemoryStream();
|
||||
int i = 0;
|
||||
|
||||
while (true)
|
||||
{
|
||||
i = dnode[i, bitReader.ReadBit() ? 1 : 0];
|
||||
if (i < 0)
|
||||
{
|
||||
i = -(i + 1);
|
||||
if (i == SPEOF)
|
||||
{
|
||||
break;
|
||||
}
|
||||
huffmanDecoded.WriteByte((byte)i);
|
||||
i = 0;
|
||||
}
|
||||
}
|
||||
|
||||
huffmanDecoded.Position = 0;
|
||||
return new RunLength90Stream(huffmanDecoded, (int)huffmanDecoded.Length);
|
||||
return new MemoryStream(Array.Empty<byte>());
|
||||
}
|
||||
|
||||
var dnode = new int[numnodes, 2];
|
||||
for (int j = 0; j < numnodes; j++)
|
||||
{
|
||||
dnode[j, 0] = binaryReader.ReadInt16();
|
||||
dnode[j, 1] = binaryReader.ReadInt16();
|
||||
}
|
||||
|
||||
var bitReader = new BitReader(_stream);
|
||||
var huffmanDecoded = new MemoryStream();
|
||||
int i = 0;
|
||||
|
||||
while (true)
|
||||
{
|
||||
i = dnode[i, bitReader.ReadBit() ? 1 : 0];
|
||||
if (i < 0)
|
||||
{
|
||||
i = -(i + 1);
|
||||
if (i == SPEOF)
|
||||
{
|
||||
break;
|
||||
}
|
||||
huffmanDecoded.WriteByte((byte)i);
|
||||
i = 0;
|
||||
}
|
||||
}
|
||||
|
||||
huffmanDecoded.Position = 0;
|
||||
return new RunLength90Stream(huffmanDecoded, (int)huffmanDecoded.Length);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -40,12 +40,12 @@ public abstract class BlockFilter : ReadOnlyStream
|
||||
public static BlockFilter Read(BinaryReader reader)
|
||||
{
|
||||
var filterType = (FilterTypes)reader.ReadXZInteger();
|
||||
if (!FILTER_MAP.ContainsKey(filterType))
|
||||
if (!FILTER_MAP.TryGetValue(filterType, out var createFilter))
|
||||
{
|
||||
throw new NotImplementedException($"Filter {filterType} has not yet been implemented");
|
||||
}
|
||||
|
||||
var filter = FILTER_MAP[filterType]();
|
||||
var filter = createFilter();
|
||||
|
||||
var sizeOfProperties = reader.ReadXZInteger();
|
||||
if (sizeOfProperties > int.MaxValue)
|
||||
|
||||
@@ -10,18 +10,18 @@ internal static partial class MultiByteIntegers
|
||||
{
|
||||
public static async ValueTask<ulong> ReadXZIntegerAsync(
|
||||
this BinaryReader reader,
|
||||
CancellationToken cancellationToken = default,
|
||||
int MaxBytes = 9
|
||||
int maxBytes = 9,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
if (MaxBytes <= 0)
|
||||
if (maxBytes <= 0)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(MaxBytes));
|
||||
throw new ArgumentOutOfRangeException(nameof(maxBytes));
|
||||
}
|
||||
|
||||
if (MaxBytes > 9)
|
||||
if (maxBytes > 9)
|
||||
{
|
||||
MaxBytes = 9;
|
||||
maxBytes = 9;
|
||||
}
|
||||
|
||||
var LastByte = await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false);
|
||||
@@ -30,7 +30,7 @@ internal static partial class MultiByteIntegers
|
||||
var i = 0;
|
||||
while ((LastByte & 0x80) != 0)
|
||||
{
|
||||
if (++i >= MaxBytes)
|
||||
if (++i >= maxBytes)
|
||||
{
|
||||
throw new InvalidFormatException();
|
||||
}
|
||||
|
||||
@@ -19,7 +19,6 @@ public sealed partial class XZBlock : XZReadOnlyStream
|
||||
public ulong? UncompressedSize { get; private set; }
|
||||
public Stack<BlockFilter> Filters { get; private set; } = new();
|
||||
public bool HeaderIsLoaded { get; private set; }
|
||||
private CheckType _checkType;
|
||||
private readonly int _checkSize;
|
||||
private bool _streamConnected;
|
||||
private int _numFilters;
|
||||
@@ -33,7 +32,6 @@ public sealed partial class XZBlock : XZReadOnlyStream
|
||||
public XZBlock(Stream stream, CheckType checkType, int checkSize)
|
||||
: base(stream)
|
||||
{
|
||||
_checkType = checkType;
|
||||
_checkSize = checkSize;
|
||||
_startPosition = stream.Position;
|
||||
}
|
||||
|
||||
@@ -33,7 +33,9 @@ public partial class XZIndex
|
||||
await VerifyIndexMarkerAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
NumberOfRecords = await _reader.ReadXZIntegerAsync(cancellationToken).ConfigureAwait(false);
|
||||
NumberOfRecords = await _reader
|
||||
.ReadXZIntegerAsync(cancellationToken: cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
for (ulong i = 0; i < NumberOfRecords; i++)
|
||||
{
|
||||
Records.Add(
|
||||
|
||||
@@ -13,8 +13,9 @@ public partial class XZIndexRecord
|
||||
)
|
||||
{
|
||||
var record = new XZIndexRecord();
|
||||
record.UnpaddedSize = await br.ReadXZIntegerAsync(cancellationToken).ConfigureAwait(false);
|
||||
record.UncompressedSize = await br.ReadXZIntegerAsync(cancellationToken)
|
||||
record.UnpaddedSize = await br.ReadXZIntegerAsync(cancellationToken: cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
record.UncompressedSize = await br.ReadXZIntegerAsync(cancellationToken: cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
return record;
|
||||
}
|
||||
|
||||
@@ -12,10 +12,7 @@ namespace SharpCompress.Compressors.Xz;
|
||||
public sealed partial class XZStream : XZReadOnlyStream
|
||||
{
|
||||
public XZStream(Stream baseStream)
|
||||
: base(baseStream)
|
||||
{
|
||||
_baseStream = baseStream;
|
||||
}
|
||||
: base(baseStream) { }
|
||||
|
||||
protected override void Dispose(bool disposing)
|
||||
{
|
||||
@@ -48,7 +45,6 @@ public sealed partial class XZStream : XZReadOnlyStream
|
||||
}
|
||||
}
|
||||
|
||||
private readonly Stream _baseStream;
|
||||
public XZHeader Header { get; private set; }
|
||||
public XZIndex Index { get; private set; }
|
||||
public XZFooter Footer { get; private set; }
|
||||
|
||||
@@ -16,6 +16,13 @@ public partial class CompressionStream : Stream
|
||||
{
|
||||
if (compressor == null)
|
||||
{
|
||||
#if LEGACY_DOTNET
|
||||
Dispose(true);
|
||||
GC.SuppressFinalize(this);
|
||||
await Task.CompletedTask.ConfigureAwait(false);
|
||||
#else
|
||||
await base.DisposeAsync().ConfigureAwait(false);
|
||||
#endif
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -28,6 +35,12 @@ public partial class CompressionStream : Stream
|
||||
ReleaseUnmanagedResources();
|
||||
GC.SuppressFinalize(this);
|
||||
}
|
||||
#if LEGACY_DOTNET
|
||||
Dispose(true);
|
||||
await Task.CompletedTask.ConfigureAwait(false);
|
||||
#else
|
||||
await base.DisposeAsync().ConfigureAwait(false);
|
||||
#endif
|
||||
}
|
||||
|
||||
public override async Task FlushAsync(CancellationToken cancellationToken) =>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using System;
|
||||
using System;
|
||||
using System.Buffers;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
@@ -84,6 +84,7 @@ public partial class CompressionStream : Stream
|
||||
{
|
||||
if (compressor == null)
|
||||
{
|
||||
base.Dispose(disposing);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -98,6 +99,7 @@ public partial class CompressionStream : Stream
|
||||
{
|
||||
ReleaseUnmanagedResources();
|
||||
}
|
||||
base.Dispose(disposing);
|
||||
}
|
||||
|
||||
private void ReleaseUnmanagedResources()
|
||||
@@ -121,7 +123,8 @@ public partial class CompressionStream : Stream
|
||||
|
||||
public override void Flush() => FlushInternal(ZSTD_EndDirective.ZSTD_e_flush);
|
||||
|
||||
private void FlushInternal(ZSTD_EndDirective directive) => WriteInternal(null, directive);
|
||||
private void FlushInternal(ZSTD_EndDirective directive) =>
|
||||
WriteInternal(ReadOnlySpan<byte>.Empty, directive);
|
||||
|
||||
public override void Write(byte[] buffer, int offset, int count) =>
|
||||
Write(new ReadOnlySpan<byte>(buffer, offset, count));
|
||||
@@ -138,11 +141,7 @@ public partial class CompressionStream : Stream
|
||||
{
|
||||
EnsureNotDisposed();
|
||||
|
||||
var input = new ZSTD_inBuffer_s
|
||||
{
|
||||
pos = 0,
|
||||
size = buffer != null ? (nuint)buffer.Length : 0,
|
||||
};
|
||||
var input = new ZSTD_inBuffer_s { pos = 0, size = (nuint)buffer.Length };
|
||||
nuint remaining;
|
||||
do
|
||||
{
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using System;
|
||||
using System;
|
||||
using System.Buffers;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
@@ -90,6 +90,7 @@ public partial class DecompressionStream : Stream
|
||||
{
|
||||
if (decompressor == null)
|
||||
{
|
||||
base.Dispose(disposing);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -108,6 +109,7 @@ public partial class DecompressionStream : Stream
|
||||
{
|
||||
innerStream.Dispose();
|
||||
}
|
||||
base.Dispose(disposing);
|
||||
}
|
||||
|
||||
public override int Read(byte[] buffer, int offset, int count) =>
|
||||
|
||||
@@ -32,7 +32,7 @@ internal abstract unsafe class SafeZstdHandle : SafeHandle
|
||||
internal sealed unsafe class SafeCctxHandle : SafeZstdHandle
|
||||
{
|
||||
/// <inheritdoc/>
|
||||
private SafeCctxHandle() { }
|
||||
internal SafeCctxHandle() { }
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new instance of <see cref="SafeCctxHandle"/>.
|
||||
@@ -85,7 +85,7 @@ internal sealed unsafe class SafeCctxHandle : SafeZstdHandle
|
||||
internal sealed unsafe class SafeDctxHandle : SafeZstdHandle
|
||||
{
|
||||
/// <inheritdoc/>
|
||||
private SafeDctxHandle() { }
|
||||
internal SafeDctxHandle() { }
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new instance of <see cref="SafeDctxHandle"/>.
|
||||
|
||||
@@ -275,6 +275,7 @@ internal partial class SharpCompressStream
|
||||
_ringBuffer?.Dispose();
|
||||
_ringBuffer = null;
|
||||
}
|
||||
await base.DisposeAsync().ConfigureAwait(false);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
@@ -63,7 +63,11 @@ internal static class NotNullExtensions
|
||||
)
|
||||
where T : struct
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(obj, paramName);
|
||||
if (!obj.HasValue)
|
||||
{
|
||||
throw new ArgumentNullException(paramName);
|
||||
}
|
||||
|
||||
return obj.Value;
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -20,10 +20,14 @@ internal static partial class Utility
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
#if LEGACY_DOTNET
|
||||
if (source is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(source));
|
||||
throw new ArgumentNullException();
|
||||
}
|
||||
#else
|
||||
ArgumentNullException.ThrowIfNull(source);
|
||||
#endif
|
||||
|
||||
if (buffer is null)
|
||||
{
|
||||
|
||||
@@ -247,10 +247,14 @@ internal static partial class Utility
|
||||
/// </summary>
|
||||
public void ReadExact(byte[] buffer, int offset, int length)
|
||||
{
|
||||
#if LEGACY_DOTNET
|
||||
if (source is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(source));
|
||||
throw new ArgumentNullException();
|
||||
}
|
||||
#else
|
||||
ArgumentNullException.ThrowIfNull(source);
|
||||
#endif
|
||||
|
||||
if (buffer is null)
|
||||
{
|
||||
|
||||
@@ -92,7 +92,7 @@ public class LargeMemoryStream : Stream
|
||||
|
||||
if (offset < 0 || count < 0 || offset + count > buffer.Length)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException();
|
||||
throw new ArgumentOutOfRangeException(nameof(offset));
|
||||
}
|
||||
|
||||
long length = Length;
|
||||
@@ -137,7 +137,7 @@ public class LargeMemoryStream : Stream
|
||||
|
||||
if (offset < 0 || count < 0 || offset + count > buffer.Length)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException();
|
||||
throw new ArgumentOutOfRangeException(nameof(offset));
|
||||
}
|
||||
|
||||
int bytesWritten = 0;
|
||||
|
||||
@@ -212,7 +212,9 @@ public class RarReaderAsyncTests : ReaderTests
|
||||
var file = Path.GetFileName(reader.Entry.Key).NotNull();
|
||||
var folder =
|
||||
Path.GetDirectoryName(reader.Entry.Key)
|
||||
?? throw new ArgumentNullException();
|
||||
?? throw new InvalidOperationException(
|
||||
"Entry key must have a directory name."
|
||||
);
|
||||
var destdir = Path.Combine(SCRATCH_FILES_PATH, folder);
|
||||
if (!Directory.Exists(destdir))
|
||||
{
|
||||
|
||||
@@ -195,7 +195,9 @@ public class RarReaderTests : ReaderTests
|
||||
var file = Path.GetFileName(reader.Entry.Key).NotNull();
|
||||
var folder =
|
||||
Path.GetDirectoryName(reader.Entry.Key)
|
||||
?? throw new ArgumentNullException();
|
||||
?? throw new InvalidOperationException(
|
||||
"Entry key must have a directory name."
|
||||
);
|
||||
var destdir = Path.Combine(SCRATCH_FILES_PATH, folder);
|
||||
if (!Directory.Exists(destdir))
|
||||
{
|
||||
|
||||
Reference in New Issue
Block a user