Compare commits

...

46 Commits
0.25 ... 0.26

Author SHA1 Message Date
Adam Hathcock
a7944f28c5 Fix CI again 2020-07-26 14:45:32 +01:00
Adam Hathcock
426d459284 Fix CI build 2020-07-26 14:39:13 +01:00
Adam Hathcock
b00b461ada Update documented targets 2020-07-26 14:38:19 +01:00
Adam Hathcock
84834b6348 ignore snukpkg 2020-07-26 14:36:42 +01:00
Adam Hathcock
f521fd35ff Fix tests, update to 0.26 2020-07-26 14:36:07 +01:00
Adam Hathcock
2979fceecf Merge pull request #522 from JTOne123/master
[PR] The proj files have been updated to enable SourceLink
2020-07-26 12:07:13 +01:00
Adam Hathcock
b12e8e793f Merge branch 'master' into master 2020-07-26 12:07:05 +01:00
Adam Hathcock
c77ec59a28 Merge pull request #527 from adamhathcock/default-encoding
Don’t use 437 Encoding by default anymore.
2020-07-26 12:06:30 +01:00
Adam Hathcock
42ba8cf828 Merge branch 'master' into default-encoding 2020-07-26 12:06:22 +01:00
Adam Hathcock
c7618fc895 Merge pull request #528 from DannyBoyk/issue_524_tararchive_fails_read_all_entries
Ensure TarArchive enumerates all entries
2020-07-26 12:05:58 +01:00
Daniel Nash
d055b34efe Ensure TarArchive enumerates all entries
While enumerating the entries of a tar file and writing their contents
to disk using TarArchive, it was discovered TarArchive was not properly
discarding padding bytes in the last block of each entry. TarArchive was
sometimes able to recover depending on the number of padding bytes due
to the logic it uses to find the next entry header, but not always.

TarArchive was changed to use TarReadOnlySubStream when opening entries
and TarReadOnlySubstream was changed to ensure all an entry's blocks are
read when it is being disposed.

Fixes adamhathcock/sharpcompress#524
2020-07-20 12:57:39 -04:00
Adam Hathcock
b7f635f540 Update readme 2020-07-16 15:35:27 +01:00
Adam Hathcock
5e95a54260 Merge branch 'master' into default-encoding 2020-07-16 14:32:51 +01:00
Adam Hathcock
4354e82bb5 Don’t use 437 Encoding by default anymore. 2020-07-16 14:28:37 +01:00
Adam Hathcock
ab7bdc24dc Merge pull request #523 from kdaadk/master
Decompress multipart solid RAR4.x archive
2020-07-01 12:56:58 +01:00
Dmitriy
81997fe1ba rename test 2020-07-01 15:10:21 +05:00
Dmitriy
de6759a83f - remove check of solid archive
- change tests
- add test on multi solid archive
2020-07-01 15:06:54 +05:00
Adam Hathcock
233dc33130 Fix running tests on build 2020-06-25 09:22:15 +01:00
Adam Hathcock
39b07f45f1 Update github action and minor SDK bump 2020-06-25 09:16:15 +01:00
Pavlo Datsiuk
802662a165 [COMMIT] The proj files have been updated to enable SourceLink [SharpCompress.csproj] 2020-06-25 10:58:21 +03:00
Adam Hathcock
2859848fc4 Give the artifacts names 2020-05-24 10:41:06 +01:00
Adam Hathcock
b734d00062 Remove README for appveyor and fix artifacts again 2020-05-24 10:35:39 +01:00
Adam Hathcock
02a17d22f6 Adjust artifacts and remove appveyor 2020-05-24 10:32:50 +01:00
Adam Hathcock
7bfff472c6 Fix yaml 2020-05-24 10:26:51 +01:00
Adam Hathcock
5aa146be17 Remove matrix var 2020-05-24 10:25:40 +01:00
Adam Hathcock
a0bfc22a29 Try upload artifact 2020-05-24 10:23:01 +01:00
Adam Hathcock
6ed46b5fcc Fix CI paths 2020-05-24 09:04:10 +01:00
Adam Hathcock
904e40ef57 Switch to bullseye for building 2020-05-24 09:00:27 +01:00
Adam Hathcock
00ff119ec4 Minor Rider issues resolved. Still two outstanding. 2020-05-24 08:42:36 +01:00
Adam Hathcock
60d2511e80 Remove .NET Standard 1.3 which is no longer in support 2020-05-24 08:42:06 +01:00
Adam Hathcock
ed56a4aa4a Merge pull request #515 from carbon/master
Enable nullable
2020-05-24 08:20:37 +01:00
Jason Nelson
5b6a1c97e3 Enable nullable 2020-05-23 16:27:55 -07:00
Adam Hathcock
8bfc9ef4de Update for 0.25.1 2020-05-22 13:46:36 +01:00
Adam Hathcock
fa949e089e Merge pull request #512 from adamhathcock/fix-codepages
Attempt Windows reference fix
2020-05-22 13:44:10 +01:00
Adam Hathcock
c296ca7660 Merge pull request #513 from RealOrko/symbolic-link-default-write
Added default implementation with warning for symbolic links
2020-05-22 13:43:56 +01:00
RealOrko
538b38869f Added a warning for the writing of symbolic links with a link to the original GitHub issue for the DOTNET runtime 2020-05-21 13:00:25 +01:00
Adam Hathcock
ce328ed90b Merge branch 'master' into fix-codepages 2020-05-15 08:28:25 +01:00
Adam Hathcock
632bae725d Add braces for clarity 2020-05-14 13:47:21 +01:00
Adam Hathcock
4f824b1d9a Add build flags for Core targets 2020-05-14 13:16:00 +01:00
Adam Hathcock
120aee8039 See if windows reference is fixed 2020-05-14 13:09:17 +01:00
Adam Hathcock
3b2b341c4d Merge pull request #508 from turbedi/minor_optimizations
Minor optimizations
2020-04-11 09:08:07 +01:00
Berkan Diler
4cad40f637 Minor string optimizations 2020-04-10 12:33:40 +02:00
Berkan Diler
2c64380019 Use 3 argument Array.Copy when possible 2020-04-10 12:06:38 +02:00
Berkan Diler
ccb9593de2 Replace Span<T>.Fill(0) with Span<T>.Clear() 2020-04-10 12:03:32 +02:00
Berkan Diler
921a99fc32 Replace static readonly byte[] fields with static ReadOnlySpan<byte> properties 2020-04-10 11:54:58 +02:00
Adam Hathcock
400d2c1774 Fix usings and add braces for better merging 2020-04-03 08:47:30 +01:00
206 changed files with 1686 additions and 1029 deletions

View File

@@ -1,5 +1,5 @@
name: SharpCompress
on: [push]
on: [push, pull_request]
jobs:
build:
@@ -12,6 +12,13 @@ jobs:
- uses: actions/checkout@v1
- uses: actions/setup-dotnet@v1
with:
dotnet-version: 3.1.100
- name: Run the Cake script
uses: ecampidoglio/cake-action@master
dotnet-version: 3.1.302
- run: dotnet run -p build/build.csproj
- uses: actions/upload-artifact@v2
with:
name: ${{ matrix.os }}-sharpcompress.nupkg
path: artifacts/*
- uses: actions/upload-artifact@v2
with:
name: ${{ matrix.os }}-sharpcompress.snupkg
path: artifacts/*

1
.gitignore vendored
View File

@@ -17,3 +17,4 @@ tools
.idea/
.DS_Store
*.snupkg

View File

@@ -1,12 +1,9 @@
# SharpCompress
SharpCompress is a compression library in pure C# for .NET Standard 1.3 and 2.0 that can unrar, un7zip, unzip, untar unbzip2 and ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip are implemented.
SharpCompress is a compression library in pure C# for .NET Standard 2.0, 2.1 and NET 4.6 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).
AppVeyor Build -
[![Build status](https://ci.appveyor.com/api/projects/status/voxg971oemmvxh1e/branch/master?svg=true)](https://ci.appveyor.com/project/adamhathcock/sharpcompress/branch/master)
GitHub Actions Build -
[![GitHubActions](https://github.com/adamhathcock/sharpcompress/workflows/SharpCompress/badge.svg)](https://circleci.com/gh/adamhathcock/sharpcompress)

View File

@@ -13,6 +13,8 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SharpCompress", "src\SharpC
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SharpCompress.Test", "tests\SharpCompress.Test\SharpCompress.Test.csproj", "{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "build", "build\build.csproj", "{D4D613CB-5E94-47FB-85BE-B8423D20C545}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -27,6 +29,10 @@ Global
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}.Debug|Any CPU.Build.0 = Debug|Any CPU
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}.Release|Any CPU.ActiveCfg = Release|Any CPU
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}.Release|Any CPU.Build.0 = Release|Any CPU
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Debug|Any CPU.Build.0 = Debug|Any CPU
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Release|Any CPU.ActiveCfg = Release|Any CPU
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE

View File

@@ -126,4 +126,7 @@
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EAddAccessorOwnerDeclarationBracesMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002ECSharpPlaceAttributeOnSameLineMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateBlankLinesAroundFieldToBlankLinesAroundProperty/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateThisQualifierSettings/@EntryIndexedValue">True</s:Boolean></wpf:ResourceDictionary>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateThisQualifierSettings/@EntryIndexedValue">True</s:Boolean>
<s:String x:Key="/Default/Environment/UnitTesting/UnitTestSessionStore/Sessions/=6af8f80e_002D9fdd_002D4223_002D8e02_002D473db916f9b2/@EntryIndexedValue">&lt;SessionState ContinuousTestingIsOn="False" ContinuousTestingMode="0" FrameworkVersion="{x:Null}" IsLocked="False" Name="All tests from Solution" PlatformMonoPreference="{x:Null}" PlatformType="{x:Null}" xmlns="urn:schemas-jetbrains-com:jetbrains-ut-session" xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"&gt;
&lt;Solution /&gt;
&lt;/SessionState&gt;</s:String></wpf:ResourceDictionary>

View File

@@ -1,20 +0,0 @@
version: '{build}'
image: Visual Studio 2019
pull_requests:
do_not_increment_build_number: true
branches:
only:
- master
nuget:
disable_publish_on_pr: true
build_script:
- ps: .\build.ps1
test: off
artifacts:
- path: src\SharpCompress\bin\Release\*.nupkg

View File

@@ -1,89 +0,0 @@
var target = Argument("target", "Default");
var tag = Argument("tag", "cake");
Task("Restore")
.Does(() =>
{
DotNetCoreRestore(".");
});
Task("Build")
.IsDependentOn("Restore")
.Does(() =>
{
if (IsRunningOnWindows())
{
MSBuild("./sharpcompress.sln", c =>
{
c.SetConfiguration("Release")
.SetVerbosity(Verbosity.Minimal)
.UseToolVersion(MSBuildToolVersion.VS2019);
});
}
else
{
var settings = new DotNetCoreBuildSettings
{
Framework = "netstandard1.3",
Configuration = "Release",
NoRestore = true
};
DotNetCoreBuild("./src/SharpCompress/SharpCompress.csproj", settings);
settings.Framework = "netstandard2.0";
DotNetCoreBuild("./src/SharpCompress/SharpCompress.csproj", settings);
settings.Framework = "netstandard2.1";
DotNetCoreBuild("./src/SharpCompress/SharpCompress.csproj", settings);
}
});
Task("Test")
.IsDependentOn("Build")
.Does(() =>
{
var files = GetFiles("tests/**/*.csproj");
foreach(var file in files)
{
var settings = new DotNetCoreTestSettings
{
Configuration = "Release",
Framework = "netcoreapp3.1"
};
DotNetCoreTest(file.ToString(), settings);
}
});
Task("Pack")
.IsDependentOn("Build")
.Does(() =>
{
if (IsRunningOnWindows())
{
MSBuild("src/SharpCompress/SharpCompress.csproj", c => c
.SetConfiguration("Release")
.SetVerbosity(Verbosity.Minimal)
.UseToolVersion(MSBuildToolVersion.VS2019)
.WithProperty("NoBuild", "true")
.WithTarget("Pack"));
}
else
{
Information("Skipping Pack as this is not Windows");
}
});
Task("Default")
.IsDependentOn("Restore")
.IsDependentOn("Build")
.IsDependentOn("Test")
.IsDependentOn("Pack");
Task("RunTests")
.IsDependentOn("Restore")
.IsDependentOn("Build")
.IsDependentOn("Test");
RunTarget(target);

228
build.ps1
View File

@@ -1,228 +0,0 @@
##########################################################################
# This is the Cake bootstrapper script for PowerShell.
# This file was downloaded from https://github.com/cake-build/resources
# Feel free to change this file to fit your needs.
##########################################################################
<#
.SYNOPSIS
This is a Powershell script to bootstrap a Cake build.
.DESCRIPTION
This Powershell script will download NuGet if missing, restore NuGet tools (including Cake)
and execute your Cake build script with the parameters you provide.
.PARAMETER Script
The build script to execute.
.PARAMETER Target
The build script target to run.
.PARAMETER Configuration
The build configuration to use.
.PARAMETER Verbosity
Specifies the amount of information to be displayed.
.PARAMETER Experimental
Tells Cake to use the latest Roslyn release.
.PARAMETER WhatIf
Performs a dry run of the build script.
No tasks will be executed.
.PARAMETER Mono
Tells Cake to use the Mono scripting engine.
.PARAMETER SkipToolPackageRestore
Skips restoring of packages.
.PARAMETER ScriptArgs
Remaining arguments are added here.
.LINK
http://cakebuild.net
#>
[CmdletBinding()]
Param(
[string]$Script = "build.cake",
[string]$Target = "Default",
[ValidateSet("Release", "Debug")]
[string]$Configuration = "Release",
[ValidateSet("Quiet", "Minimal", "Normal", "Verbose", "Diagnostic")]
[string]$Verbosity = "Verbose",
[switch]$Experimental,
[Alias("DryRun","Noop")]
[switch]$WhatIf,
[switch]$Mono,
[switch]$SkipToolPackageRestore,
[Parameter(Position=0,Mandatory=$false,ValueFromRemainingArguments=$true)]
[string[]]$ScriptArgs
)
[Reflection.Assembly]::LoadWithPartialName("System.Security") | Out-Null
function MD5HashFile([string] $filePath)
{
if ([string]::IsNullOrEmpty($filePath) -or !(Test-Path $filePath -PathType Leaf))
{
return $null
}
[System.IO.Stream] $file = $null;
[System.Security.Cryptography.MD5] $md5 = $null;
try
{
$md5 = [System.Security.Cryptography.MD5]::Create()
$file = [System.IO.File]::OpenRead($filePath)
return [System.BitConverter]::ToString($md5.ComputeHash($file))
}
finally
{
if ($file -ne $null)
{
$file.Dispose()
}
}
}
Write-Host "Preparing to run build script..."
if(!$PSScriptRoot){
$PSScriptRoot = Split-Path $MyInvocation.MyCommand.Path -Parent
}
$TOOLS_DIR = Join-Path $PSScriptRoot "tools"
$ADDINS_DIR = Join-Path $TOOLS_DIR "addins"
$MODULES_DIR = Join-Path $TOOLS_DIR "modules"
$NUGET_EXE = Join-Path $TOOLS_DIR "nuget.exe"
$CAKE_EXE = Join-Path $TOOLS_DIR "Cake/Cake.exe"
$NUGET_URL = "https://dist.nuget.org/win-x86-commandline/latest/nuget.exe"
$PACKAGES_CONFIG = Join-Path $TOOLS_DIR "packages.config"
$PACKAGES_CONFIG_MD5 = Join-Path $TOOLS_DIR "packages.config.md5sum"
$ADDINS_PACKAGES_CONFIG = Join-Path $ADDINS_DIR "packages.config"
$MODULES_PACKAGES_CONFIG = Join-Path $MODULES_DIR "packages.config"
# Should we use mono?
$UseMono = "";
if($Mono.IsPresent) {
Write-Verbose -Message "Using the Mono based scripting engine."
$UseMono = "-mono"
}
# Should we use the new Roslyn?
$UseExperimental = "";
if($Experimental.IsPresent -and !($Mono.IsPresent)) {
Write-Verbose -Message "Using experimental version of Roslyn."
$UseExperimental = "-experimental"
}
# Is this a dry run?
$UseDryRun = "";
if($WhatIf.IsPresent) {
$UseDryRun = "-dryrun"
}
# Make sure tools folder exists
if ((Test-Path $PSScriptRoot) -and !(Test-Path $TOOLS_DIR)) {
Write-Verbose -Message "Creating tools directory..."
New-Item -Path $TOOLS_DIR -Type directory | out-null
}
# Make sure that packages.config exist.
if (!(Test-Path $PACKAGES_CONFIG)) {
Write-Verbose -Message "Downloading packages.config..."
try { (New-Object System.Net.WebClient).DownloadFile("http://cakebuild.net/download/bootstrapper/packages", $PACKAGES_CONFIG) } catch {
Throw "Could not download packages.config."
}
}
# Try find NuGet.exe in path if not exists
if (!(Test-Path $NUGET_EXE)) {
Write-Verbose -Message "Trying to find nuget.exe in PATH..."
$existingPaths = $Env:Path -Split ';' | Where-Object { (![string]::IsNullOrEmpty($_)) -and (Test-Path $_ -PathType Container) }
$NUGET_EXE_IN_PATH = Get-ChildItem -Path $existingPaths -Filter "nuget.exe" | Select -First 1
if ($NUGET_EXE_IN_PATH -ne $null -and (Test-Path $NUGET_EXE_IN_PATH.FullName)) {
Write-Verbose -Message "Found in PATH at $($NUGET_EXE_IN_PATH.FullName)."
$NUGET_EXE = $NUGET_EXE_IN_PATH.FullName
}
}
# Try download NuGet.exe if not exists
if (!(Test-Path $NUGET_EXE)) {
Write-Verbose -Message "Downloading NuGet.exe..."
try {
(New-Object System.Net.WebClient).DownloadFile($NUGET_URL, $NUGET_EXE)
} catch {
Throw "Could not download NuGet.exe."
}
}
# Save nuget.exe path to environment to be available to child processed
$ENV:NUGET_EXE = $NUGET_EXE
# Restore tools from NuGet?
if(-Not $SkipToolPackageRestore.IsPresent) {
Push-Location
Set-Location $TOOLS_DIR
# Check for changes in packages.config and remove installed tools if true.
[string] $md5Hash = MD5HashFile($PACKAGES_CONFIG)
if((!(Test-Path $PACKAGES_CONFIG_MD5)) -Or
($md5Hash -ne (Get-Content $PACKAGES_CONFIG_MD5 ))) {
Write-Verbose -Message "Missing or changed package.config hash..."
Remove-Item * -Recurse -Exclude packages.config,nuget.exe
}
Write-Verbose -Message "Restoring tools from NuGet..."
$NuGetOutput = Invoke-Expression "&`"$NUGET_EXE`" install -ExcludeVersion -OutputDirectory `"$TOOLS_DIR`""
if ($LASTEXITCODE -ne 0) {
Throw "An error occured while restoring NuGet tools."
}
else
{
$md5Hash | Out-File $PACKAGES_CONFIG_MD5 -Encoding "ASCII"
}
Write-Verbose -Message ($NuGetOutput | out-string)
Pop-Location
}
# Restore addins from NuGet
if (Test-Path $ADDINS_PACKAGES_CONFIG) {
Push-Location
Set-Location $ADDINS_DIR
Write-Verbose -Message "Restoring addins from NuGet..."
$NuGetOutput = Invoke-Expression "&`"$NUGET_EXE`" install -ExcludeVersion -OutputDirectory `"$ADDINS_DIR`""
if ($LASTEXITCODE -ne 0) {
Throw "An error occured while restoring NuGet addins."
}
Write-Verbose -Message ($NuGetOutput | out-string)
Pop-Location
}
# Restore modules from NuGet
if (Test-Path $MODULES_PACKAGES_CONFIG) {
Push-Location
Set-Location $MODULES_DIR
Write-Verbose -Message "Restoring modules from NuGet..."
$NuGetOutput = Invoke-Expression "&`"$NUGET_EXE`" install -ExcludeVersion -OutputDirectory `"$MODULES_DIR`""
if ($LASTEXITCODE -ne 0) {
Throw "An error occured while restoring NuGet modules."
}
Write-Verbose -Message ($NuGetOutput | out-string)
Pop-Location
}
# Make sure that Cake has been installed.
if (!(Test-Path $CAKE_EXE)) {
Throw "Could not find Cake.exe at $CAKE_EXE"
}
# Start Cake
Write-Host "Running build script..."
Invoke-Expression "& `"$CAKE_EXE`" `"$Script`" -target=`"$Target`" -configuration=`"$Configuration`" -verbosity=`"$Verbosity`" $UseMono $UseDryRun $UseExperimental $ScriptArgs"
exit $LASTEXITCODE

View File

@@ -1,42 +0,0 @@
#!/usr/bin/env bash
##########################################################################
# This is the Cake bootstrapper script for Linux and OS X.
# This file was downloaded from https://github.com/cake-build/resources
# Feel free to change this file to fit your needs.
##########################################################################
# Define directories.
SCRIPT_DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
TOOLS_DIR=$SCRIPT_DIR/tools
CAKE_VERSION=0.27.1
CAKE_DLL=$TOOLS_DIR/Cake.CoreCLR.$CAKE_VERSION/Cake.dll
# Make sure the tools folder exist.
if [ ! -d "$TOOLS_DIR" ]; then
mkdir "$TOOLS_DIR"
fi
###########################################################################
# INSTALL CAKE
###########################################################################
if [ ! -f "$CAKE_DLL" ]; then
curl -Lsfo Cake.CoreCLR.zip "https://www.nuget.org/api/v2/package/Cake.CoreCLR/$CAKE_VERSION" && unzip -q Cake.CoreCLR.zip -d "$TOOLS_DIR/Cake.CoreCLR.$CAKE_VERSION" && rm -f Cake.CoreCLR.zip
if [ $? -ne 0 ]; then
echo "An error occured while installing Cake."
exit 1
fi
fi
# Make sure that Cake has been installed.
if [ ! -f "$CAKE_DLL" ]; then
echo "Could not find Cake.exe at '$CAKE_DLL'."
exit 1
fi
###########################################################################
# RUN BUILD SCRIPT
###########################################################################
# Start Cake
exec dotnet "$CAKE_DLL" "$@"

76
build/Program.cs Normal file
View File

@@ -0,0 +1,76 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Runtime.InteropServices;
using GlobExpressions;
using static Bullseye.Targets;
using static SimpleExec.Command;
class Program
{
private const string Clean = "clean";
private const string Build = "build";
private const string Test = "test";
private const string Publish = "publish";
static void Main(string[] args)
{
Target(Clean,
ForEach("**/bin", "**/obj"),
dir =>
{
IEnumerable<string> GetDirectories(string d)
{
return Glob.Directories(".", d);
}
void RemoveDirectory(string d)
{
if (Directory.Exists(d))
{
Console.WriteLine(d);
Directory.Delete(d, true);
}
}
foreach (var d in GetDirectories(dir))
{
RemoveDirectory(d);
}
});
Target(Build, ForEach("net46", "netstandard2.0", "netstandard2.1"),
framework =>
{
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows) && framework == "net46")
{
return;
}
Run("dotnet", "build src/SharpCompress/SharpCompress.csproj -c Release");
});
Target(Test, DependsOn(Build), ForEach("netcoreapp3.1"),
framework =>
{
IEnumerable<string> GetFiles(string d)
{
return Glob.Files(".", d);
}
foreach (var file in GetFiles("**/*.Test.csproj"))
{
Run("dotnet", $"test {file} -c Release -f {framework}");
}
});
Target(Publish, DependsOn(Test),
() =>
{
Run("dotnet", "pack src/SharpCompress/SharpCompress.csproj -c Release -o artifacts/");
});
Target("default", DependsOn(Publish), () => Console.WriteLine("Done!"));
RunTargetsAndExit(args);
}
}

14
build/build.csproj Normal file
View File

@@ -0,0 +1,14 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>netcoreapp3.1</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Bullseye" Version="3.3.0" />
<PackageReference Include="Glob" Version="1.1.7" />
<PackageReference Include="SimpleExec" Version="6.2.0" />
</ItemGroup>
</Project>

5
global.json Normal file
View File

@@ -0,0 +1,5 @@
{
"sdk": {
"version": "3.1.302"
}
}

View File

@@ -14,11 +14,11 @@ namespace SharpCompress.Archives
private readonly LazyReadOnlyCollection<TVolume> lazyVolumes;
private readonly LazyReadOnlyCollection<TEntry> lazyEntries;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionBegin;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionEnd;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionBegin;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionEnd;
public event EventHandler<CompressedBytesReadEventArgs> CompressedBytesRead;
public event EventHandler<FilePartExtractionBeginEventArgs> FilePartExtractionBegin;
public event EventHandler<CompressedBytesReadEventArgs>? CompressedBytesRead;
public event EventHandler<FilePartExtractionBeginEventArgs>? FilePartExtractionBegin;
protected ReaderOptions ReaderOptions { get; }
@@ -48,12 +48,14 @@ namespace SharpCompress.Archives
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
}
#nullable disable
internal AbstractArchive(ArchiveType type)
{
Type = type;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
}
#nullable enable
public ArchiveType Type { get; }
@@ -121,21 +123,19 @@ namespace SharpCompress.Archives
void IExtractionListener.FireCompressedBytesRead(long currentPartCompressedBytes, long compressedReadBytes)
{
CompressedBytesRead?.Invoke(this, new CompressedBytesReadEventArgs
{
CurrentFilePartCompressedBytesRead = currentPartCompressedBytes,
CompressedBytesRead = compressedReadBytes
});
CompressedBytesRead?.Invoke(this, new CompressedBytesReadEventArgs(
currentFilePartCompressedBytesRead: currentPartCompressedBytes,
compressedBytesRead: compressedReadBytes
));
}
void IExtractionListener.FireFilePartExtractionBegin(string name, long size, long compressedSize)
{
FilePartExtractionBegin?.Invoke(this, new FilePartExtractionBeginEventArgs
{
CompressedSize = compressedSize,
Size = size,
Name = name
});
FilePartExtractionBegin?.Invoke(this, new FilePartExtractionBeginEventArgs(
compressedSize : compressedSize,
size : size,
name : name
));
}
/// <summary>

View File

@@ -18,7 +18,7 @@ namespace SharpCompress.Archives
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <returns></returns>
public static IArchive Open(Stream stream, ReaderOptions readerOptions = null)
public static IArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
@@ -86,7 +86,7 @@ namespace SharpCompress.Archives
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
public static IArchive Open(string filePath, ReaderOptions options = null)
public static IArchive Open(string filePath, ReaderOptions? options = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), options);
@@ -97,7 +97,7 @@ namespace SharpCompress.Archives
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
public static IArchive Open(FileInfo fileInfo, ReaderOptions options = null)
public static IArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
options = options ?? new ReaderOptions { LeaveStreamOpen = false };
@@ -135,7 +135,7 @@ namespace SharpCompress.Archives
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(string sourceArchive, string destinationDirectory,
ExtractionOptions options = null)
ExtractionOptions? options = null)
{
using (IArchive archive = Open(sourceArchive))
{

View File

@@ -18,7 +18,7 @@ namespace SharpCompress.Archives.GZip
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(string filePath, ReaderOptions readerOptions = null)
public static GZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
@@ -29,7 +29,7 @@ namespace SharpCompress.Archives.GZip
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new GZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
@@ -40,7 +40,7 @@ namespace SharpCompress.Archives.GZip
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
public static GZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
return new GZipArchive(stream, readerOptions ?? new ReaderOptions());

View File

@@ -15,7 +15,7 @@ namespace SharpCompress.Archives.GZip
public virtual Stream OpenEntryStream()
{
//this is to reset the stream to be read multiple times
var part = Parts.Single() as GZipFilePart;
var part = (GZipFilePart)Parts.Single();
if (part.GetRawStream().Position != part.EntryStartPosition)
{
part.GetRawStream().Position = part.EntryStartPosition;

View File

@@ -1,4 +1,6 @@
using System;
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
@@ -6,7 +8,7 @@ using SharpCompress.IO;
namespace SharpCompress.Archives.GZip
{
internal class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArchiveEntry
internal sealed class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArchiveEntry
{
private readonly bool closeStream;
private readonly Stream stream;

View File

@@ -8,22 +8,17 @@ namespace SharpCompress.Archives
{
public static void WriteTo(this IArchiveEntry archiveEntry, Stream streamToWriteTo)
{
if (archiveEntry.Archive.Type == ArchiveType.Rar && archiveEntry.Archive.IsSolid)
{
throw new InvalidFormatException("Cannot use Archive random access on SOLID Rar files.");
}
if (archiveEntry.IsDirectory)
{
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
}
var streamListener = archiveEntry.Archive as IArchiveExtractionListener;
var streamListener = (IArchiveExtractionListener)archiveEntry.Archive;
streamListener.EnsureEntriesLoaded();
streamListener.FireEntryExtractionBegin(archiveEntry);
streamListener.FireFilePartExtractionBegin(archiveEntry.Key, archiveEntry.Size, archiveEntry.CompressedSize);
var entryStream = archiveEntry.OpenEntryStream();
if (entryStream == null)
if (entryStream is null)
{
return;
}
@@ -41,7 +36,7 @@ namespace SharpCompress.Archives
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(this IArchiveEntry entry, string destinationDirectory,
ExtractionOptions options = null)
ExtractionOptions? options = null)
{
ExtractionMethods.WriteEntryToDirectory(entry, destinationDirectory, options,
entry.WriteToFile);
@@ -50,8 +45,9 @@ namespace SharpCompress.Archives
/// <summary>
/// Extract to specific file
/// </summary>
public static void WriteToFile(this IArchiveEntry entry, string destinationFileName,
ExtractionOptions options = null)
public static void WriteToFile(this IArchiveEntry entry,
string destinationFileName,
ExtractionOptions? options = null)
{
ExtractionMethods.WriteEntryToFile(entry, destinationFileName, options,

View File

@@ -9,7 +9,7 @@ namespace SharpCompress.Archives
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(this IArchive archive, string destinationDirectory,
ExtractionOptions options = null)
ExtractionOptions? options = null)
{
foreach (IArchiveEntry entry in archive.Entries.Where(x => !x.IsDirectory))
{

View File

@@ -3,9 +3,9 @@ using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Archives.Rar
{
internal class FileInfoRarFilePart : SeekableFilePart
internal sealed class FileInfoRarFilePart : SeekableFilePart
{
internal FileInfoRarFilePart(FileInfoRarArchiveVolume volume, string password, MarkHeader mh, FileHeader fh, FileInfo fi)
internal FileInfoRarFilePart(FileInfoRarArchiveVolume volume, string? password, MarkHeader mh, FileHeader fh, FileInfo fi)
: base(mh, fh, volume.Stream, password)
{
FileInfo = fi;

View File

@@ -65,7 +65,7 @@ namespace SharpCompress.Archives.Rar
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
public static RarArchive Open(string filePath, ReaderOptions options = null)
public static RarArchive Open(string filePath, ReaderOptions? options = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
return new RarArchive(new FileInfo(filePath), options ?? new ReaderOptions());
@@ -76,7 +76,7 @@ namespace SharpCompress.Archives.Rar
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
public static RarArchive Open(FileInfo fileInfo, ReaderOptions options = null)
public static RarArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new RarArchive(fileInfo, options ?? new ReaderOptions());
@@ -87,7 +87,7 @@ namespace SharpCompress.Archives.Rar
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
public static RarArchive Open(Stream stream, ReaderOptions options = null)
public static RarArchive Open(Stream stream, ReaderOptions? options = null)
{
stream.CheckNotNull(nameof(stream));
return Open(stream.AsEnumerable(), options ?? new ReaderOptions());
@@ -98,7 +98,7 @@ namespace SharpCompress.Archives.Rar
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
public static RarArchive Open(IEnumerable<Stream> streams, ReaderOptions options = null)
public static RarArchive Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
{
streams.CheckNotNull(nameof(streams));
return new RarArchive(streams, options ?? new ReaderOptions());
@@ -121,7 +121,7 @@ namespace SharpCompress.Archives.Rar
}
}
public static bool IsRarFile(Stream stream, ReaderOptions options = null)
public static bool IsRarFile(Stream stream, ReaderOptions? options = null)
{
try
{

View File

@@ -57,11 +57,6 @@ namespace SharpCompress.Archives.Rar
public Stream OpenEntryStream()
{
if (archive.IsSolid)
{
throw new InvalidOperationException("Use ExtractAllEntries to extract SOLID archives.");
}
if (IsRarV3)
{
return new RarStream(archive.UnpackV1.Value, FileHeader, new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive));

View File

@@ -34,20 +34,20 @@ namespace SharpCompress.Archives.Rar
{
yield break; //if file isn't volume then there is no reason to look
}
fileInfo = GetNextFileInfo(ah, part.FileParts.FirstOrDefault() as FileInfoRarFilePart);
fileInfo = GetNextFileInfo(ah, part.FileParts.FirstOrDefault() as FileInfoRarFilePart)!;
//we use fileinfo because rar is dumb and looks at file names rather than archive info for another volume
while (fileInfo != null && fileInfo.Exists)
{
part = new FileInfoRarArchiveVolume(fileInfo, options);
fileInfo = GetNextFileInfo(ah, part.FileParts.FirstOrDefault() as FileInfoRarFilePart);
fileInfo = GetNextFileInfo(ah, part.FileParts.FirstOrDefault() as FileInfoRarFilePart)!;
yield return part;
}
}
private static FileInfo GetNextFileInfo(ArchiveHeader ah, FileInfoRarFilePart currentFilePart)
private static FileInfo? GetNextFileInfo(ArchiveHeader ah, FileInfoRarFilePart? currentFilePart)
{
if (currentFilePart == null)
if (currentFilePart is null)
{
return null;
}

View File

@@ -7,9 +7,9 @@ namespace SharpCompress.Archives.Rar
internal class SeekableFilePart : RarFilePart
{
private readonly Stream stream;
private readonly string password;
private readonly string? password;
internal SeekableFilePart(MarkHeader mh, FileHeader fh, Stream stream, string password)
internal SeekableFilePart(MarkHeader mh, FileHeader fh, Stream stream, string? password)
: base(mh, fh)
{
this.stream = stream;
@@ -21,7 +21,7 @@ namespace SharpCompress.Archives.Rar
stream.Position = FileHeader.DataStartPosition;
if (FileHeader.R4Salt != null)
{
return new RarCryptoWrapper(stream, password, FileHeader.R4Salt);
return new RarCryptoWrapper(stream, password!, FileHeader.R4Salt);
}
return stream;
}

View File

@@ -1,4 +1,6 @@
using System;
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -108,7 +110,7 @@ namespace SharpCompress.Archives.SevenZip
private void LoadFactory(Stream stream)
{
if (database == null)
if (database is null)
{
stream.Position = 0;
var reader = new ArchiveReader();
@@ -129,12 +131,12 @@ namespace SharpCompress.Archives.SevenZip
}
}
private static readonly byte[] SIGNATURE = {(byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C};
private static ReadOnlySpan<byte> SIGNATURE => new byte[] {(byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C};
private static bool SignatureMatch(Stream stream)
{
BinaryReader reader = new BinaryReader(stream);
byte[] signatureBytes = reader.ReadBytes(6);
ReadOnlySpan<byte> signatureBytes = reader.ReadBytes(6);
return signatureBytes.SequenceEqual(SIGNATURE);
}
@@ -154,7 +156,7 @@ namespace SharpCompress.Archives.SevenZip
}
}
private class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
private sealed class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
{
private readonly SevenZipArchive archive;
private CFolder currentFolder;
@@ -180,7 +182,7 @@ namespace SharpCompress.Archives.SevenZip
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
{
currentFolder = group.Key;
if (group.Key == null)
if (group.Key is null)
{
currentStream = Stream.Null;
}

View File

@@ -20,7 +20,7 @@ namespace SharpCompress.Archives.Tar
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(string filePath, ReaderOptions readerOptions = null)
public static TarArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
@@ -31,7 +31,7 @@ namespace SharpCompress.Archives.Tar
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
public static TarArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new TarArchive(fileInfo, readerOptions ?? new ReaderOptions());
@@ -42,7 +42,7 @@ namespace SharpCompress.Archives.Tar
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(Stream stream, ReaderOptions readerOptions = null)
public static TarArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
return new TarArchive(stream, readerOptions ?? new ReaderOptions());
@@ -118,8 +118,8 @@ namespace SharpCompress.Archives.Tar
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
{
Stream stream = volumes.Single().Stream;
TarHeader previousHeader = null;
foreach (TarHeader header in TarHeaderFactory.ReadHeader(StreamingMode.Seekable, stream, ReaderOptions.ArchiveEncoding))
TarHeader? previousHeader = null;
foreach (TarHeader? header in TarHeaderFactory.ReadHeader(StreamingMode.Seekable, stream, ReaderOptions.ArchiveEncoding))
{
if (header != null)
{

View File

@@ -1,4 +1,6 @@
using System;
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
@@ -6,7 +8,7 @@ using SharpCompress.IO;
namespace SharpCompress.Archives.Tar
{
internal class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiveEntry
internal sealed class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiveEntry
{
private readonly bool closeStream;
private readonly Stream stream;

View File

@@ -15,7 +15,9 @@ namespace SharpCompress.Archives.Zip
{
public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
{
#nullable disable
private readonly SeekableZipHeaderFactory headerFactory;
#nullable enable
/// <summary>
/// Gets or sets the compression level applied to files added to the archive,
@@ -28,7 +30,7 @@ namespace SharpCompress.Archives.Zip
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(string filePath, ReaderOptions readerOptions = null)
public static ZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
@@ -39,7 +41,7 @@ namespace SharpCompress.Archives.Zip
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new ZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
@@ -50,18 +52,18 @@ namespace SharpCompress.Archives.Zip
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
public static ZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
return new ZipArchive(stream, readerOptions ?? new ReaderOptions());
}
public static bool IsZipFile(string filePath, string password = null)
public static bool IsZipFile(string filePath, string? password = null)
{
return IsZipFile(new FileInfo(filePath), password);
}
public static bool IsZipFile(FileInfo fileInfo, string password = null)
public static bool IsZipFile(FileInfo fileInfo, string? password = null)
{
if (!fileInfo.Exists)
{
@@ -73,14 +75,14 @@ namespace SharpCompress.Archives.Zip
}
}
public static bool IsZipFile(Stream stream, string password = null)
public static bool IsZipFile(Stream stream, string? password = null)
{
StreamingZipHeaderFactory headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding());
try
{
ZipHeader header =
headerFactory.ReadStreamHeader(stream).FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header == null)
if (header is null)
{
return false;
}
@@ -147,13 +149,13 @@ namespace SharpCompress.Archives.Zip
{
yield return new ZipArchiveEntry(this,
new SeekableZipFilePart(headerFactory,
h as DirectoryEntryHeader,
(DirectoryEntryHeader)h,
stream));
}
break;
case ZipHeaderType.DirectoryEnd:
{
byte[] bytes = (h as DirectoryEndHeader).Comment;
byte[] bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
volume.Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
yield break;
}

View File

@@ -6,7 +6,7 @@ namespace SharpCompress.Archives.Zip
{
public class ZipArchiveEntry : ZipEntry, IArchiveEntry
{
internal ZipArchiveEntry(ZipArchive archive, SeekableZipFilePart part)
internal ZipArchiveEntry(ZipArchive archive, SeekableZipFilePart? part)
: base(part)
{
Archive = archive;
@@ -25,6 +25,6 @@ namespace SharpCompress.Archives.Zip
#endregion
public string Comment => (Parts.Single() as SeekableZipFilePart).Comment;
public string? Comment => ((SeekableZipFilePart)Parts.Single()).Comment;
}
}

View File

@@ -18,24 +18,30 @@ namespace SharpCompress.Common
/// <summary>
/// Set this encoding when you want to force it for all encoding operations.
/// </summary>
public Encoding Forced { get; set; }
public Encoding? Forced { get; set; }
/// <summary>
/// Set this when you want to use a custom method for all decoding operations.
/// </summary>
/// <returns>string Func(bytes, index, length)</returns>
public Func<byte[], int, int, string> CustomDecoder { get; set; }
public Func<byte[], int, int, string>? CustomDecoder { get; set; }
public ArchiveEncoding()
: this(Encoding.Default, Encoding.Default)
{
Default = Encoding.GetEncoding(437);
Password = Encoding.GetEncoding(437);
}
public ArchiveEncoding(Encoding def, Encoding password)
{
Default = def;
Password = password;
}
#if NETSTANDARD1_3 || NETSTANDARD2_0 || NETSTANDARD2_1
static ArchiveEncoding()
{
Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
}
#endif
public string Decode(byte[] bytes)
{

View File

@@ -2,16 +2,22 @@
namespace SharpCompress.Common
{
public class CompressedBytesReadEventArgs : EventArgs
public sealed class CompressedBytesReadEventArgs : EventArgs
{
public CompressedBytesReadEventArgs(long compressedBytesRead, long currentFilePartCompressedBytesRead)
{
CompressedBytesRead = compressedBytesRead;
CurrentFilePartCompressedBytesRead = currentFilePartCompressedBytesRead;
}
/// <summary>
/// Compressed bytes read for the current entry
/// </summary>
public long CompressedBytesRead { get; internal set; }
public long CompressedBytesRead { get; }
/// <summary>
/// Current file part read for Multipart files (e.g. Rar)
/// </summary>
public long CurrentFilePartCompressedBytesRead { get; internal set; }
public long CurrentFilePartCompressedBytesRead { get; }
}
}

View File

@@ -18,7 +18,7 @@ namespace SharpCompress.Common
/// <summary>
/// The target of a symlink entry internal to the Archive. Will be null if not a symlink.
/// </summary>
public abstract string LinkTarget { get; }
public abstract string? LinkTarget { get; }
/// <summary>
/// The compressed file size
@@ -71,12 +71,10 @@ namespace SharpCompress.Common
public abstract bool IsSplitAfter { get; }
/// <inheritdoc/>
public override string ToString()
{
return Key;
}
public override string ToString() => Key;
internal abstract IEnumerable<FilePart> Parts { get; }
internal bool IsSolid { get; set; }
internal virtual void Close()

View File

@@ -8,17 +8,19 @@ namespace SharpCompress.Common
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteEntryToDirectory(IEntry entry, string destinationDirectory,
ExtractionOptions options, Action<string, ExtractionOptions> write)
public static void WriteEntryToDirectory(IEntry entry,
string destinationDirectory,
ExtractionOptions? options,
Action<string, ExtractionOptions?> write)
{
string destinationFileName;
string file = Path.GetFileName(entry.Key);
string fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
options = options ?? new ExtractionOptions()
{
Overwrite = true
};
options ??= new ExtractionOptions()
{
Overwrite = true
};
if (options.ExtractFullPath)
{
@@ -61,12 +63,12 @@ namespace SharpCompress.Common
}
public static void WriteEntryToFile(IEntry entry, string destinationFileName,
ExtractionOptions options,
ExtractionOptions? options,
Action<string, FileMode> openAndWrite)
{
if (entry.LinkTarget != null)
{
if (null == options.WriteSymbolicLink)
if (options?.WriteSymbolicLink is null)
{
throw new ExtractionException("Entry is a symbolic link but ExtractionOptions.WriteSymbolicLink delegate is null");
}
@@ -75,10 +77,10 @@ namespace SharpCompress.Common
else
{
FileMode fm = FileMode.Create;
options = options ?? new ExtractionOptions()
{
Overwrite = true
};
options ??= new ExtractionOptions()
{
Overwrite = true
};
if (!options.Overwrite)
{

View File

@@ -1,4 +1,6 @@
namespace SharpCompress.Common
using System;
namespace SharpCompress.Common
{
public class ExtractionOptions
{
@@ -29,6 +31,10 @@
/// </summary>
public delegate void SymbolicLinkWriterDelegate(string sourcePath, string targetPath);
public SymbolicLinkWriterDelegate WriteSymbolicLink;
public SymbolicLinkWriterDelegate WriteSymbolicLink =
(sourcePath, targetPath) =>
{
Console.WriteLine($"Could not write symlink {sourcePath} -> {targetPath}, for more information please see https://github.com/dotnet/runtime/issues/24271");
};
}
}

View File

@@ -14,7 +14,7 @@ namespace SharpCompress.Common
internal abstract string FilePartName { get; }
internal abstract Stream GetCompressedStream();
internal abstract Stream GetRawStream();
internal abstract Stream? GetRawStream();
internal bool Skipped { get; set; }
}
}

View File

@@ -2,21 +2,28 @@
namespace SharpCompress.Common
{
public class FilePartExtractionBeginEventArgs : EventArgs
public sealed class FilePartExtractionBeginEventArgs : EventArgs
{
public FilePartExtractionBeginEventArgs(string name, long size, long compressedSize)
{
Name = name;
Size = size;
CompressedSize = compressedSize;
}
/// <summary>
/// File name for the part for the current entry
/// </summary>
public string Name { get; internal set; }
public string Name { get; }
/// <summary>
/// Uncompressed size of the current entry in the part
/// </summary>
public long Size { get; internal set; }
public long Size { get; }
/// <summary>
/// Compressed size of the current entry in the part
/// </summary>
public long CompressedSize { get; internal set; }
public long CompressedSize { get; }
}
}

View File

@@ -1,7 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace SharpCompress.Common.GZip
{
@@ -20,7 +19,7 @@ namespace SharpCompress.Common.GZip
public override string Key => _filePart.FilePartName;
public override string LinkTarget => null;
public override string? LinkTarget => null;
public override long CompressedSize => 0;

View File

@@ -10,11 +10,11 @@ namespace SharpCompress.Common.GZip
{
internal class GZipFilePart : FilePart
{
private string _name;
private string? _name;
private readonly Stream _stream;
internal GZipFilePart(Stream stream, ArchiveEncoding archiveEncoding)
: base(archiveEncoding)
: base(archiveEncoding)
{
ReadAndValidateGzipHeader(stream);
EntryStartPosition = stream.Position;
@@ -25,7 +25,7 @@ namespace SharpCompress.Common.GZip
internal DateTime? DateModified { get; private set; }
internal override string FilePartName => _name;
internal override string FilePartName => _name!;
internal override Stream GetCompressedStream()
{

View File

@@ -1,5 +1,4 @@
using System.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common
{

View File

@@ -10,7 +10,7 @@ namespace SharpCompress.Common
long Crc { get; }
DateTime? CreatedTime { get; }
string Key { get; }
string LinkTarget { get; }
string? LinkTarget { get; }
bool IsDirectory { get; }
bool IsEncrypted { get; }
bool IsSplitAfter { get; }

View File

@@ -7,8 +7,10 @@ namespace SharpCompress.Common.Rar.Headers
public AvHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Av)
{
if (IsRar5)
if (IsRar5)
{
throw new InvalidFormatException("unexpected rar5 record");
}
}
protected override void ReadFinish(MarkingBinaryReader reader)

View File

@@ -1,4 +1,6 @@
using SharpCompress.IO;
#nullable disable
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{

View File

@@ -2,7 +2,7 @@ using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{
internal class ArchiveHeader : RarHeader
internal sealed class ArchiveHeader : RarHeader
{
public ArchiveHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Archive)
@@ -38,7 +38,11 @@ namespace SharpCompress.Common.Rar.Headers
private void ReadLocator(MarkingBinaryReader reader) {
var size = reader.ReadRarVIntUInt16();
var type = reader.ReadRarVIntUInt16();
if (type != 1) throw new InvalidFormatException("expected locator record");
if (type != 1)
{
throw new InvalidFormatException("expected locator record");
}
var flags = reader.ReadRarVIntUInt16();
const ushort hasQuickOpenOffset = 0x01;
const ushort hasRecoveryOffset = 0x02;
@@ -74,7 +78,7 @@ namespace SharpCompress.Common.Rar.Headers
public bool IsVolume => HasFlag(IsRar5 ? ArchiveFlagsV5.VOLUME : ArchiveFlagsV4.VOLUME);
// RAR5: Volume number field is present. True for all volumes except first.
public bool IsFirstVolume => IsRar5 ? VolumeNumber == null : HasFlag(ArchiveFlagsV4.FIRST_VOLUME);
public bool IsFirstVolume => IsRar5 ? VolumeNumber is null : HasFlag(ArchiveFlagsV4.FIRST_VOLUME);
public bool IsSolid => HasFlag(IsRar5 ? ArchiveFlagsV5.SOLID : ArchiveFlagsV4.SOLID);
}

View File

@@ -7,7 +7,10 @@ namespace SharpCompress.Common.Rar.Headers
protected CommentHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Comment)
{
if (IsRar5) throw new InvalidFormatException("unexpected rar5 record");
if (IsRar5)
{
throw new InvalidFormatException("unexpected rar5 record");
}
}
protected override void ReadFinish(MarkingBinaryReader reader)

View File

@@ -1,3 +1,5 @@
#nullable disable
#if !Rar2017_64bit
using nint = System.Int32;
using nuint = System.UInt32;

View File

@@ -46,19 +46,38 @@ namespace SharpCompress.Common.Rar.Headers
if (b == 0x61)
{
b = GetByte(stream); start++;
if (b != 0x72) continue;
if (b != 0x72)
{
continue;
}
b = GetByte(stream); start++;
if (b != 0x21) continue;
if (b != 0x21)
{
continue;
}
b = GetByte(stream); start++;
if (b != 0x1a) continue;
if (b != 0x1a)
{
continue;
}
b = GetByte(stream); start++;
if (b != 0x07) continue;
if (b != 0x07)
{
continue;
}
b = GetByte(stream); start++;
if (b == 1)
{
b = GetByte(stream); start++;
if (b != 0) continue;
if (b != 0)
{
continue;
}
return new MarkHeader(true); // Rar5
}
else if (b == 0)
@@ -69,9 +88,17 @@ namespace SharpCompress.Common.Rar.Headers
else if (b == 0x45)
{
b = GetByte(stream); start++;
if (b != 0x7e) continue;
if (b != 0x7e)
{
continue;
}
b = GetByte(stream); start++;
if (b != 0x5e) continue;
if (b != 0x5e)
{
continue;
}
throw new InvalidFormatException("Rar format version pre-4 is unsupported.");
}
}

View File

@@ -3,12 +3,15 @@
namespace SharpCompress.Common.Rar.Headers
{
// ProtectHeader is part of the Recovery Record feature
internal class ProtectHeader : RarHeader
internal sealed class ProtectHeader : RarHeader
{
public ProtectHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Protect)
{
if (IsRar5) throw new InvalidFormatException("unexpected rar5 record");
if (IsRar5)
{
throw new InvalidFormatException("unexpected rar5 record");
}
}
protected override void ReadFinish(MarkingBinaryReader reader)
@@ -23,6 +26,6 @@ namespace SharpCompress.Common.Rar.Headers
internal byte Version { get; private set; }
internal ushort RecSectors { get; private set; }
internal uint TotalBlocks { get; private set; }
internal byte[] Mark { get; private set; }
internal byte[]? Mark { get; private set; }
}
}

View File

@@ -11,7 +11,7 @@ namespace SharpCompress.Common.Rar.Headers
private readonly HeaderType _headerType;
private readonly bool _isRar5;
internal static RarHeader TryReadBase(RarCrcBinaryReader reader, bool isRar5, ArchiveEncoding archiveEncoding)
internal static RarHeader? TryReadBase(RarCrcBinaryReader reader, bool isRar5, ArchiveEncoding archiveEncoding)
{
try
{

View File

@@ -1,4 +1,3 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.IO;
@@ -26,7 +25,7 @@ namespace SharpCompress.Common.Rar.Headers
_isRar5 = markHeader.IsRar5;
yield return markHeader;
RarHeader header;
RarHeader? header;
while ((header = TryReadNextHeader(stream)) != null)
{
yield return header;
@@ -39,7 +38,7 @@ namespace SharpCompress.Common.Rar.Headers
}
}
private RarHeader TryReadNextHeader(Stream stream)
private RarHeader? TryReadNextHeader(Stream stream)
{
RarCrcBinaryReader reader;
if (!IsEncrypted)
@@ -48,7 +47,7 @@ namespace SharpCompress.Common.Rar.Headers
}
else
{
if (Options.Password == null)
if (Options.Password is null)
{
throw new CryptographicException("Encrypted Rar archive has no password specified.");
}
@@ -56,7 +55,7 @@ namespace SharpCompress.Common.Rar.Headers
}
var header = RarHeader.TryReadBase(reader, _isRar5, Options.ArchiveEncoding);
if (header == null)
if (header is null)
{
return null;
}
@@ -128,13 +127,13 @@ namespace SharpCompress.Common.Rar.Headers
case StreamingMode.Streaming:
{
var ms = new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize);
if (fh.R4Salt == null)
if (fh.R4Salt is null)
{
fh.PackedStream = ms;
}
else
{
fh.PackedStream = new RarCryptoWrapper(ms, Options.Password, fh.R4Salt);
fh.PackedStream = new RarCryptoWrapper(ms, Options.Password!, fh.R4Salt);
}
}
break;

View File

@@ -7,7 +7,10 @@ namespace SharpCompress.Common.Rar.Headers
protected SignHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Sign)
{
if (IsRar5) throw new InvalidFormatException("unexpected rar5 record");
if (IsRar5)
{
throw new InvalidFormatException("unexpected rar5 record");
}
}
protected override void ReadFinish(MarkingBinaryReader reader)

View File

@@ -3,7 +3,7 @@ using System.IO;
namespace SharpCompress.Common.Rar
{
internal class RarCryptoBinaryReader : RarCrcBinaryReader
internal sealed class RarCryptoBinaryReader : RarCrcBinaryReader
{
private RarRijndael _rijndael;
private byte[] _salt;
@@ -19,7 +19,9 @@ namespace SharpCompress.Common.Rar
// coderb: not sure why this was being done at this logical point
//SkipQueue();
byte[] salt = ReadBytes(8);
InitializeAes(salt);
_salt = salt;
_rijndael = RarRijndael.InitializeFrom(_password, salt);
}
// track read count ourselves rather than using the underlying stream since we buffer
@@ -39,12 +41,6 @@ namespace SharpCompress.Common.Rar
private bool UseEncryption => _salt != null;
internal void InitializeAes(byte[] salt)
{
_salt = salt;
_rijndael = RarRijndael.InitializeFrom(_password, salt);
}
public override byte ReadByte()
{
if (UseEncryption)
@@ -81,7 +77,9 @@ namespace SharpCompress.Common.Rar
byte[] cipherText = ReadBytesNoCrc(16);
var readBytes = _rijndael.ProcessBlock(cipherText);
foreach (var readByte in readBytes)
{
_data.Enqueue(readByte);
}
}
}

View File

@@ -4,7 +4,7 @@ using System.IO;
namespace SharpCompress.Common.Rar
{
internal class RarCryptoWrapper : Stream
internal sealed class RarCryptoWrapper : Stream
{
private readonly Stream _actualStream;
private readonly byte[] _salt;
@@ -35,7 +35,7 @@ namespace SharpCompress.Common.Rar
public override int Read(byte[] buffer, int offset, int count)
{
if (_salt == null)
if (_salt is null)
{
return _actualStream.Read(buffer, offset, count);
}
@@ -58,12 +58,15 @@ namespace SharpCompress.Common.Rar
var readBytes = _rijndael.ProcessBlock(cipherText);
foreach (var readByte in readBytes)
{
_data.Enqueue(readByte);
}
}
for (int i = 0; i < count; i++)
{
buffer[offset + i] = _data.Dequeue();
}
}
return count;
}
@@ -88,7 +91,7 @@ namespace SharpCompress.Common.Rar
if (_rijndael != null)
{
_rijndael.Dispose();
_rijndael = null;
_rijndael = null!;
}
base.Dispose(disposing);
}

View File

@@ -22,7 +22,7 @@ namespace SharpCompress.Common.Rar
/// </summary>
public override string Key => FileHeader.FileName;
public override string LinkTarget => null;
public override string? LinkTarget => null;
/// <summary>
/// The entry last modified time in the archive, if recorded

View File

@@ -19,7 +19,7 @@ namespace SharpCompress.Common.Rar
internal FileHeader FileHeader { get; }
internal override Stream GetRawStream()
internal override Stream? GetRawStream()
{
return null;
}

View File

@@ -1,4 +1,6 @@
using System;
#nullable disable
using System;
using System.Security.Cryptography;
using System.Text;
using SharpCompress.Crypto;

View File

@@ -21,7 +21,9 @@ namespace SharpCompress.Common.Rar
_headerFactory = new RarHeaderFactory(mode, options);
}
#nullable disable
internal ArchiveHeader ArchiveHeader { get; private set; }
#nullable enable
internal StreamingMode Mode => _headerFactory.StreamingMode;
@@ -31,25 +33,25 @@ namespace SharpCompress.Common.Rar
internal IEnumerable<RarFilePart> GetVolumeFileParts()
{
MarkHeader lastMarkHeader = null;
MarkHeader? lastMarkHeader = null;
foreach (var header in _headerFactory.ReadHeaders(Stream))
{
switch (header.HeaderType)
{
case HeaderType.Mark:
{
lastMarkHeader = header as MarkHeader;
lastMarkHeader = (MarkHeader)header;
}
break;
case HeaderType.Archive:
{
ArchiveHeader = header as ArchiveHeader;
ArchiveHeader = (ArchiveHeader)header;
}
break;
case HeaderType.File:
{
var fh = header as FileHeader;
yield return CreateFilePart(lastMarkHeader, fh);
var fh = (FileHeader)header;
yield return CreateFilePart(lastMarkHeader!, fh);
}
break;
}
@@ -58,7 +60,7 @@ namespace SharpCompress.Common.Rar
private void EnsureArchiveHeaderLoaded()
{
if (ArchiveHeader == null)
if (ArchiveHeader is null)
{
if (Mode == StreamingMode.Streaming)
{

View File

@@ -3,15 +3,16 @@ using SharpCompress.Readers;
namespace SharpCompress.Common
{
public class ReaderExtractionEventArgs<T> : EventArgs
public sealed class ReaderExtractionEventArgs<T> : EventArgs
{
internal ReaderExtractionEventArgs(T entry, ReaderProgress readerProgress = null)
internal ReaderExtractionEventArgs(T entry, ReaderProgress? readerProgress = null)
{
Item = entry;
ReaderProgress = readerProgress;
}
public T Item { get; }
public ReaderProgress ReaderProgress { get; }
public ReaderProgress? ReaderProgress { get; }
}
}

View File

@@ -1,4 +1,6 @@
using System;
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Compressors.LZMA;
@@ -35,7 +37,7 @@ namespace SharpCompress.Common.SevenZip
_packSizes.Clear();
_packCrCs.Clear();
_folders.Clear();
_numUnpackStreamsVector = null;
_numUnpackStreamsVector = null!;
_files.Clear();
_packStreamStartPositions.Clear();
@@ -96,7 +98,7 @@ namespace SharpCompress.Common.SevenZip
_folderStartFileIndex.Add(i); // check it
if (_numUnpackStreamsVector[folderIndex] != 0)
if (_numUnpackStreamsVector![folderIndex] != 0)
{
break;
}
@@ -114,7 +116,7 @@ namespace SharpCompress.Common.SevenZip
indexInFolder++;
if (indexInFolder >= _numUnpackStreamsVector[folderIndex])
if (indexInFolder >= _numUnpackStreamsVector![folderIndex])
{
folderIndex++;
indexInFolder = 0;

View File

@@ -1,4 +1,6 @@
using System;
#nullable disable
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
@@ -465,7 +467,7 @@ namespace SharpCompress.Common.SevenZip
SkipData();
}
if (packCrCs == null)
if (packCrCs is null)
{
packCrCs = new List<uint?>(numPackStreams);
for (int i = 0; i < numPackStreams; i++)
@@ -611,7 +613,7 @@ namespace SharpCompress.Common.SevenZip
SkipData();
}
if (numUnpackStreamsInFolders == null)
if (numUnpackStreamsInFolders is null)
{
numUnpackStreamsInFolders = new List<int>(folders.Count);
for (int i = 0; i < folders.Count; i++)
@@ -703,7 +705,7 @@ namespace SharpCompress.Common.SevenZip
}
else if (type == BlockType.End)
{
if (digests == null)
if (digests is null)
{
digests = new List<uint?>(numDigestsTotal);
for (int i = 0; i < numDigestsTotal; i++)
@@ -1487,17 +1489,12 @@ namespace SharpCompress.Common.SevenZip
public void Extract(ArchiveDatabase db, int[] indices)
{
int numItems;
bool allFilesMode = (indices == null);
if (allFilesMode)
{
numItems = db._files.Count;
}
else
{
numItems = indices.Length;
}
bool allFilesMode = (indices is null);
int numItems = allFilesMode
? db._files.Count
: indices.Length;
if (numItems == 0)
{
return;

View File

@@ -1,4 +1,6 @@
namespace SharpCompress.Common.SevenZip
#nullable disable
namespace SharpCompress.Common.SevenZip
{
internal class CCoderInfo
{

View File

@@ -1,4 +1,6 @@
using System;
#nullable disable
using System;
namespace SharpCompress.Common.SevenZip
{

View File

@@ -18,7 +18,7 @@ namespace SharpCompress.Common.SevenZip
public override string Key => FilePart.Header.Name;
public override string LinkTarget => null;
public override string? LinkTarget => null;
public override long CompressedSize => 0;
@@ -38,7 +38,7 @@ namespace SharpCompress.Common.SevenZip
public override bool IsSplitAfter => false;
public override int? Attrib => (int)FilePart.Header.Attrib;
public override int? Attrib => FilePart.Header.Attrib.HasValue ? (int?)FilePart.Header.Attrib.Value : null;
internal override IEnumerable<FilePart> Parts => FilePart.AsEnumerable<FilePart>();
}

View File

@@ -25,12 +25,12 @@ namespace SharpCompress.Common.SevenZip
}
internal CFileItem Header { get; }
internal CFolder Folder { get; }
internal CFolder? Folder { get; }
internal int Index { get; }
internal override string FilePartName => Header.Name;
internal override Stream GetRawStream()
internal override Stream? GetRawStream()
{
return null;
}
@@ -39,11 +39,11 @@ namespace SharpCompress.Common.SevenZip
{
if (!Header.HasStream)
{
return null;
return null!;
}
var folderStream = _database.GetFolderStream(_stream, Folder, _database.PasswordProvider);
var folderStream = _database.GetFolderStream(_stream, Folder!, _database.PasswordProvider);
int firstFileIndex = _database._folderStartFileIndex[_database._folders.IndexOf(Folder)];
int firstFileIndex = _database._folderStartFileIndex[_database._folders.IndexOf(Folder!)];
int skipCount = Index - firstFileIndex;
long skipSize = 0;
for (int i = 0; i < skipCount; i++)
@@ -61,7 +61,7 @@ namespace SharpCompress.Common.SevenZip
{
get
{
if (_type == null)
if (_type is null)
{
_type = GetCompression();
}
@@ -82,7 +82,7 @@ namespace SharpCompress.Common.SevenZip
internal CompressionType GetCompression()
{
var coder = Folder._coders.First();
var coder = Folder!._coders.First();
switch (coder._methodId._id)
{
case K_LZMA:

View File

@@ -1,5 +1,4 @@
using System.IO;
using SharpCompress.Archives;
using SharpCompress.Readers;
namespace SharpCompress.Common.SevenZip

View File

@@ -1,4 +1,6 @@
using System;
#nullable disable
using System;
using System.Buffers.Binary;
using System.IO;
using System.Text;
@@ -196,7 +198,7 @@ namespace SharpCompress.Common.Tar.Headers
{
name.CopyTo(buffer);
int i = Math.Min(length, name.Length);
buffer.Slice(i, length - i).Fill(0);
buffer.Slice(i, length - i).Clear();
}
private static void WriteStringBytes(string name, byte[] buffer, int offset, int length)

View File

@@ -1,9 +1,10 @@
using System;
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
using System.Text;
namespace SharpCompress.Common.Tar
{

View File

@@ -4,7 +4,7 @@ using SharpCompress.IO;
namespace SharpCompress.Common.Tar
{
internal class TarFilePart : FilePart
internal sealed class TarFilePart : FilePart
{
private readonly Stream _seekableStream;
@@ -23,13 +23,13 @@ namespace SharpCompress.Common.Tar
{
if (_seekableStream != null)
{
_seekableStream.Position = Header.DataStartPosition.Value;
return new ReadOnlySubStream(_seekableStream, Header.Size);
_seekableStream.Position = Header.DataStartPosition!.Value;
return new TarReadOnlySubStream(_seekableStream, Header.Size);
}
return Header.PackedStream;
}
internal override Stream GetRawStream()
internal override Stream? GetRawStream()
{
return null;
}

View File

@@ -2,17 +2,16 @@
using System.IO;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
using System.Text;
namespace SharpCompress.Common.Tar
{
internal static class TarHeaderFactory
{
internal static IEnumerable<TarHeader> ReadHeader(StreamingMode mode, Stream stream, ArchiveEncoding archiveEncoding)
internal static IEnumerable<TarHeader?> ReadHeader(StreamingMode mode, Stream stream, ArchiveEncoding archiveEncoding)
{
while (true)
{
TarHeader header = null;
TarHeader? header = null;
try
{
BinaryReader reader = new BinaryReader(stream);

View File

@@ -20,22 +20,24 @@ namespace SharpCompress.Common.Tar
{
return;
}
_isDisposed = true;
if (disposing)
{
long skipBytes = _amountRead % 512;
if (skipBytes == 0)
// Ensure we read all remaining blocks for this entry.
Stream.Skip(BytesLeftToRead);
_amountRead += BytesLeftToRead;
// If the last block wasn't a full 512 bytes, skip the remaining padding bytes.
var bytesInLastBlock = _amountRead % 512;
if (bytesInLastBlock != 0)
{
return;
Stream.Skip(512 - bytesInLastBlock);
}
skipBytes = 512 - skipBytes;
if (skipBytes == 0)
{
return;
}
var buffer = new byte[skipBytes];
Stream.ReadFully(buffer);
}
base.Dispose(disposing);
}

View File

@@ -33,7 +33,7 @@ namespace SharpCompress.Common.Zip.Headers
public ushort CommentLength { get; private set; }
public byte[] Comment { get; private set; }
public byte[]? Comment { get; private set; }
public ushort TotalNumberOfEntries { get; private set; }

View File

@@ -1,6 +1,5 @@
using System.IO;
using System.Linq;
using System.Text;
namespace SharpCompress.Common.Zip.Headers
{
@@ -93,6 +92,6 @@ namespace SharpCompress.Common.Zip.Headers
public ushort DiskNumberStart { get; set; }
public string Comment { get; private set; }
public string? Comment { get; private set; }
}
}

View File

@@ -1,5 +1,4 @@
using System;
using System.IO;
using System.IO;
namespace SharpCompress.Common.Zip.Headers
{

View File

@@ -1,6 +1,5 @@
using System.IO;
using System.Linq;
using System.Text;
namespace SharpCompress.Common.Zip.Headers
{

View File

@@ -1,4 +1,6 @@
using System;
#nullable disable
using System;
using System.Buffers.Binary;
using System.Text;

View File

@@ -1,5 +1,4 @@
using System;
using System.IO;
using System.IO;
namespace SharpCompress.Common.Zip.Headers
{
@@ -44,6 +43,6 @@ namespace SharpCompress.Common.Zip.Headers
public long DirectoryStartOffsetRelativeToDisk { get; private set; }
public byte[] DataSector { get; private set; }
public byte[]? DataSector { get; private set; }
}
}

View File

@@ -1,4 +1,6 @@
using System;
#nullable disable
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
@@ -52,7 +54,7 @@ namespace SharpCompress.Common.Zip.Headers
internal PkwareTraditionalEncryptionData ComposeEncryptionData(Stream archiveStream)
{
if (archiveStream == null)
if (archiveStream is null)
{
throw new ArgumentNullException(nameof(archiveStream));
}
@@ -60,7 +62,7 @@ namespace SharpCompress.Common.Zip.Headers
var buffer = new byte[12];
archiveStream.ReadFully(buffer);
PkwareTraditionalEncryptionData encryptionData = PkwareTraditionalEncryptionData.ForRead(Password, this, buffer);
PkwareTraditionalEncryptionData encryptionData = PkwareTraditionalEncryptionData.ForRead(Password!, this, buffer);
return encryptionData;
}

View File

@@ -40,7 +40,7 @@ namespace SharpCompress.Common.Zip
throw new NotSupportedException("This stream does not encrypt via Read()");
}
if (buffer == null)
if (buffer is null)
{
throw new ArgumentNullException(nameof(buffer));
}
@@ -64,7 +64,7 @@ namespace SharpCompress.Common.Zip
return;
}
byte[] plaintext = null;
byte[] plaintext;
if (offset != 0)
{
plaintext = new byte[count];

View File

@@ -1,5 +1,4 @@
using System;
using System.Text;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors.Deflate;
@@ -65,9 +64,9 @@ namespace SharpCompress.Common.Zip
public byte[] Encrypt(byte[] plainText, int length)
{
if (plainText == null)
if (plainText is null)
{
throw new ArgumentNullException("plaintext");
throw new ArgumentNullException(nameof(plainText));
}
if (length > plainText.Length)

View File

@@ -27,25 +27,25 @@ namespace SharpCompress.Common.Zip
return base.GetCompressedStream();
}
internal string Comment => (Header as DirectoryEntryHeader).Comment;
internal string? Comment => ((DirectoryEntryHeader)Header).Comment;
private void LoadLocalHeader()
{
bool hasData = Header.HasData;
Header = _headerFactory.GetLocalHeader(BaseStream, Header as DirectoryEntryHeader);
Header = _headerFactory.GetLocalHeader(BaseStream, ((DirectoryEntryHeader)Header));
Header.HasData = hasData;
}
protected override Stream CreateBaseStream()
{
BaseStream.Position = Header.DataStartPosition.Value;
BaseStream.Position = Header.DataStartPosition!.Value;
if ((Header.CompressedSize == 0)
&& FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor)
&& (_directoryEntryHeader?.HasData == true)
&& (_directoryEntryHeader?.CompressedSize != 0))
{
return new ReadOnlySubStream(BaseStream, _directoryEntryHeader.CompressedSize);
return new ReadOnlySubStream(BaseStream, _directoryEntryHeader!.CompressedSize);
}
return BaseStream;

View File

@@ -3,16 +3,15 @@ using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
using System.Text;
namespace SharpCompress.Common.Zip
{
internal class SeekableZipHeaderFactory : ZipHeaderFactory
internal sealed class SeekableZipHeaderFactory : ZipHeaderFactory
{
private const int MAX_ITERATIONS_FOR_DIRECTORY_HEADER = 4096;
private bool _zip64;
internal SeekableZipHeaderFactory(string password, ArchiveEncoding archiveEncoding)
internal SeekableZipHeaderFactory(string? password, ArchiveEncoding archiveEncoding)
: base(StreamingMode.Seekable, password, archiveEncoding)
{
}
@@ -35,7 +34,9 @@ namespace SharpCompress.Common.Zip
stream.Seek(zip64Locator.RelativeOffsetOfTheEndOfDirectoryRecord, SeekOrigin.Begin);
uint zip64Signature = reader.ReadUInt32();
if (zip64Signature != ZIP64_END_OF_CENTRAL_DIRECTORY)
{
throw new ArchiveException("Failed to locate the Zip64 Header");
}
var zip64Entry = new Zip64DirectoryEndHeader();
zip64Entry.Read(reader);
@@ -54,8 +55,10 @@ namespace SharpCompress.Common.Zip
var nextHeader = ReadHeader(signature, reader, _zip64);
position = stream.Position;
if (nextHeader == null)
if (nextHeader is null)
{
yield break;
}
if (nextHeader is DirectoryEntryHeader entryHeader)
{
@@ -99,7 +102,7 @@ namespace SharpCompress.Common.Zip
BinaryReader reader = new BinaryReader(stream);
uint signature = reader.ReadUInt32();
var localEntryHeader = ReadHeader(signature, reader, _zip64) as LocalEntryHeader;
if (localEntryHeader == null)
if (localEntryHeader is null)
{
throw new InvalidOperationException();
}

View File

@@ -5,9 +5,9 @@ using SharpCompress.IO;
namespace SharpCompress.Common.Zip
{
internal class StreamingZipFilePart : ZipFilePart
internal sealed class StreamingZipFilePart : ZipFilePart
{
private Stream _decompressionStream;
private Stream? _decompressionStream;
internal StreamingZipFilePart(ZipFileEntry header, Stream stream)
: base(header, stream)
@@ -41,14 +41,11 @@ namespace SharpCompress.Common.Zip
}
if (Header.HasData && !Skipped)
{
if (_decompressionStream == null)
{
_decompressionStream = GetCompressedStream();
}
_decompressionStream ??= GetCompressedStream();
_decompressionStream.Skip();
DeflateStream deflateStream = _decompressionStream as DeflateStream;
if (deflateStream != null)
if (_decompressionStream is DeflateStream deflateStream)
{
rewindableStream.Rewind(deflateStream.InputBuffer);
}

View File

@@ -2,13 +2,12 @@
using System.IO;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
using System.Text;
namespace SharpCompress.Common.Zip
{
internal class StreamingZipHeaderFactory : ZipHeaderFactory
{
internal StreamingZipHeaderFactory(string password, ArchiveEncoding archiveEncoding)
internal StreamingZipHeaderFactory(string? password, ArchiveEncoding archiveEncoding)
: base(StreamingMode.Streaming, password, archiveEncoding)
{
}
@@ -16,9 +15,10 @@ namespace SharpCompress.Common.Zip
internal IEnumerable<ZipHeader> ReadStreamHeader(Stream stream)
{
RewindableStream rewindableStream;
if (stream is RewindableStream)
if (stream is RewindableStream rs)
{
rewindableStream = stream as RewindableStream;
rewindableStream = rs;
}
else
{
@@ -26,12 +26,12 @@ namespace SharpCompress.Common.Zip
}
while (true)
{
ZipHeader header = null;
ZipHeader? header;
BinaryReader reader = new BinaryReader(rewindableStream);
if (_lastEntryHeader != null &&
(FlagUtility.HasFlag(_lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor) || _lastEntryHeader.IsZip64))
{
reader = (_lastEntryHeader.Part as StreamingZipFilePart).FixStreamedFileLocation(ref rewindableStream);
reader = ((StreamingZipFilePart)_lastEntryHeader.Part).FixStreamedFileLocation(ref rewindableStream);
long? pos = rewindableStream.CanSeek ? (long?)rewindableStream.Position : null;
uint crc = reader.ReadUInt32();
if (crc == POST_DATA_DESCRIPTOR)
@@ -49,7 +49,7 @@ namespace SharpCompress.Common.Zip
_lastEntryHeader = null;
uint headerBytes = reader.ReadUInt32();
header = ReadHeader(headerBytes, reader);
if (header == null) { yield break; }
if (header is null) { yield break; }
//entry could be zero bytes so we need to know that.
if (header.ZipHeaderType == ZipHeaderType.LocalEntry)

View File

@@ -1,3 +1,5 @@
#nullable disable
using System;
using System.Buffers.Binary;
using System.Security.Cryptography;
@@ -25,14 +27,9 @@ namespace SharpCompress.Common.Zip
Initialize();
}
internal byte[] IvBytes
{
get; set;
}
internal byte[] KeyBytes
{
get; set;
}
internal byte[] IvBytes { get; set; }
internal byte[] KeyBytes { get; set; }
private int KeySizeInBytes
{
@@ -42,16 +39,13 @@ namespace SharpCompress.Common.Zip
internal static int KeyLengthInBytes(WinzipAesKeySize keySize)
{
switch (keySize)
return keySize switch
{
case WinzipAesKeySize.KeySize128:
return 16;
case WinzipAesKeySize.KeySize192:
return 24;
case WinzipAesKeySize.KeySize256:
return 32;
}
throw new InvalidOperationException();
WinzipAesKeySize.KeySize128 => 16,
WinzipAesKeySize.KeySize192 => 24,
WinzipAesKeySize.KeySize256 => 32,
_ => throw new InvalidOperationException(),
};
}
private void Initialize()

View File

@@ -1,4 +1,6 @@
using System;
#nullable disable
using System;
using System.Collections.Generic;
using SharpCompress.Common.Zip.Headers;

View File

@@ -100,7 +100,7 @@ namespace SharpCompress.Common.Zip
case ZipCompressionMethod.WinzipAes:
{
ExtraData data = Header.Extra.Where(x => x.Type == ExtraDataType.WinZipAes).SingleOrDefault();
if (data == null)
if (data is null)
{
throw new InvalidFormatException("No Winzip AES extra data found.");
}

View File

@@ -3,7 +3,6 @@ using System.IO;
using System.Linq;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
using System.Text;
namespace SharpCompress.Common.Zip
{
@@ -19,19 +18,19 @@ namespace SharpCompress.Common.Zip
internal const uint ZIP64_END_OF_CENTRAL_DIRECTORY = 0x06064b50;
internal const uint ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR = 0x07064b50;
protected LocalEntryHeader _lastEntryHeader;
private readonly string _password;
protected LocalEntryHeader? _lastEntryHeader;
private readonly string? _password;
private readonly StreamingMode _mode;
private readonly ArchiveEncoding _archiveEncoding;
protected ZipHeaderFactory(StreamingMode mode, string password, ArchiveEncoding archiveEncoding)
protected ZipHeaderFactory(StreamingMode mode, string? password, ArchiveEncoding archiveEncoding)
{
this._mode = mode;
this._password = password;
this._archiveEncoding = archiveEncoding;
}
protected ZipHeader ReadHeader(uint headerBytes, BinaryReader reader, bool zip64 = false)
protected ZipHeader? ReadHeader(uint headerBytes, BinaryReader reader, bool zip64 = false)
{
switch (headerBytes)
{
@@ -52,7 +51,7 @@ namespace SharpCompress.Common.Zip
}
case POST_DATA_DESCRIPTOR:
{
if (FlagUtility.HasFlag(_lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor))
if (FlagUtility.HasFlag(_lastEntryHeader!.Flags, HeaderFlags.UsePostDataDescriptor))
{
_lastEntryHeader.Crc = reader.ReadUInt32();
_lastEntryHeader.CompressedSize = zip64 ? (long)reader.ReadUInt64() : reader.ReadUInt32();
@@ -121,7 +120,7 @@ namespace SharpCompress.Common.Zip
throw new NotSupportedException("SharpCompress cannot currently read non-seekable Zip Streams with encrypted data that has been written in a non-seekable manner.");
}
if (_password == null)
if (_password is null)
{
throw new CryptographicException("No password supplied for encrypted zip.");
}

View File

@@ -10,6 +10,6 @@ namespace SharpCompress.Common.Zip
{
}
public string Comment { get; internal set; }
public string? Comment { get; internal set; }
}
}

View File

@@ -87,7 +87,7 @@ namespace SharpCompress.Compressors.ADC
/// <param name="output">Buffer to hold decompressed data</param>
/// <param name="bufferSize">Max size for decompressed data</param>
/// <returns>How many bytes are stored on <paramref name="output"/></returns>
public static int Decompress(byte[] input, out byte[] output, int bufferSize = 262144)
public static int Decompress(byte[] input, out byte[]? output, int bufferSize = 262144)
{
return Decompress(new MemoryStream(input), out output, bufferSize);
}
@@ -99,11 +99,11 @@ namespace SharpCompress.Compressors.ADC
/// <param name="output">Buffer to hold decompressed data</param>
/// <param name="bufferSize">Max size for decompressed data</param>
/// <returns>How many bytes are stored on <paramref name="output"/></returns>
public static int Decompress(Stream input, out byte[] output, int bufferSize = 262144)
public static int Decompress(Stream input, out byte[]? output, int bufferSize = 262144)
{
output = null;
if (input == null || input.Length == 0)
if (input is null || input.Length == 0)
{
return 0;
}
@@ -211,7 +211,7 @@ namespace SharpCompress.Compressors.ADC
}
output = new byte[outPosition];
Array.Copy(buffer, 0, output, 0, outPosition);
Array.Copy(buffer, output, outPosition);
return position - start;
}
}

View File

@@ -23,6 +23,9 @@
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
#nullable disable
using System;
using System.IO;
@@ -31,7 +34,7 @@ namespace SharpCompress.Compressors.ADC
/// <summary>
/// Provides a forward readable only stream that decompresses ADC data
/// </summary>
public class ADCStream : Stream
public sealed class ADCStream : Stream
{
/// <summary>
/// This stream holds the compressed data
@@ -103,7 +106,7 @@ namespace SharpCompress.Compressors.ADC
{
return 0;
}
if (buffer == null)
if (buffer is null)
{
throw new ArgumentNullException(nameof(buffer));
}
@@ -122,7 +125,7 @@ namespace SharpCompress.Compressors.ADC
int size = -1;
if (_outBuffer == null)
if (_outBuffer is null)
{
size = ADCBase.Decompress(_stream, out _outBuffer);
_outPosition = 0;
@@ -142,7 +145,7 @@ namespace SharpCompress.Compressors.ADC
toCopy -= piece;
size = ADCBase.Decompress(_stream, out _outBuffer);
_outPosition = 0;
if (size == 0 || _outBuffer == null || _outBuffer.Length == 0)
if (size == 0 || _outBuffer is null || _outBuffer.Length == 0)
{
return copied;
}

View File

@@ -1,4 +1,6 @@
using System.IO;
#nullable disable
using System.IO;
/*
* Copyright 2001,2004-2005 The Apache Software Foundation

View File

@@ -22,6 +22,8 @@
* great code.
*/
#nullable disable
namespace SharpCompress.Compressors.BZip2
{
/**
@@ -35,14 +37,14 @@ namespace SharpCompress.Compressors.BZip2
* start of the BZIP2 stream to make it compatible with other PGP programs.
*/
internal class CBZip2OutputStream : Stream
internal sealed class CBZip2OutputStream : Stream
{
protected const int SETMASK = (1 << 21);
protected const int CLEARMASK = (~SETMASK);
protected const int GREATER_ICOST = 15;
protected const int LESSER_ICOST = 0;
protected const int SMALL_THRESH = 20;
protected const int DEPTH_THRESH = 10;
private const int SETMASK = (1 << 21);
private const int CLEARMASK = (~SETMASK);
private const int GREATER_ICOST = 15;
private const int LESSER_ICOST = 0;
private const int SMALL_THRESH = 20;
private const int DEPTH_THRESH = 10;
/*
If you are ever unlucky/improbable enough
@@ -52,7 +54,7 @@ namespace SharpCompress.Compressors.BZip2
stack go above 27 elems, so the following
limit seems very generous.
*/
protected const int QSORT_STACK_SIZE = 1000;
private const int QSORT_STACK_SIZE = 1000;
private bool finished;
private static void Panic()
@@ -76,7 +78,7 @@ namespace SharpCompress.Compressors.BZip2
}
}
protected static void HbMakeCodeLengths(char[] len, int[] freq,
private static void HbMakeCodeLengths(char[] len, int[] freq,
int alphaSize, int maxLen)
{
/*
@@ -1786,8 +1788,7 @@ namespace SharpCompress.Compressors.BZip2
zptr = new int[n];
ftab = new int[65537];
if (block == null || quadrant == null || zptr == null
|| ftab == null)
if (block is null || quadrant is null || zptr is null || ftab is null)
{
//int totalDraw = (n + 1 + NUM_OVERSHOOT_BYTES) + (n + NUM_OVERSHOOT_BYTES) + n + 65537;
//compressOutOfMemory ( totalDraw, n );

View File

@@ -111,9 +111,9 @@ namespace SharpCompress.Compressors.Deflate
/// <param name="input">The stream over which to calculate the CRC32</param>
/// <param name="output">The stream into which to deflate the input</param>
/// <returns>the CRC32 calculation</returns>
public UInt32 GetCrc32AndCopy(Stream input, Stream output)
public UInt32 GetCrc32AndCopy(Stream input, Stream? output)
{
if (input == null)
if (input is null)
{
throw new ZlibException("The input stream must not be null.");
}
@@ -159,7 +159,7 @@ namespace SharpCompress.Compressors.Deflate
return _InternalComputeCrc32((UInt32)W, B);
}
internal Int32 _InternalComputeCrc32(UInt32 W, byte B)
internal int _InternalComputeCrc32(UInt32 W, byte B)
{
return (Int32)(crc32Table[(W ^ B) & 0xFF] ^ (W >> 8));
}
@@ -173,7 +173,7 @@ namespace SharpCompress.Compressors.Deflate
/// <param name="count">how many bytes within the block to slurp</param>
public void SlurpBlock(byte[] block, int offset, int count)
{
if (block == null)
if (block is null)
{
throw new ZlibException("The data buffer must not be null.");
}

View File

@@ -1,3 +1,5 @@
#nullable disable
// Deflate.cs
// ------------------------------------------------------------------
//
@@ -1756,7 +1758,7 @@ namespace SharpCompress.Compressors.Deflate
int length = dictionary.Length;
int index = 0;
if (dictionary == null || status != INIT_STATE)
if (dictionary is null || status != INIT_STATE)
{
throw new ZlibException("Stream error.");
}
@@ -1796,8 +1798,8 @@ namespace SharpCompress.Compressors.Deflate
{
int old_flush;
if (_codec.OutputBuffer == null ||
(_codec.InputBuffer == null && _codec.AvailableBytesIn != 0) ||
if (_codec.OutputBuffer is null ||
(_codec.InputBuffer is null && _codec.AvailableBytesIn != 0) ||
(status == FINISH_STATE && flush != FlushType.Finish))
{
_codec.Message = _ErrorMessage[ZlibConstants.Z_NEED_DICT - (ZlibConstants.Z_STREAM_ERROR)];

View File

@@ -37,7 +37,7 @@ namespace SharpCompress.Compressors.Deflate
public DeflateStream(Stream stream, CompressionMode mode,
CompressionLevel level = CompressionLevel.Default,
Encoding forceEncoding = null)
Encoding? forceEncoding = null)
{
_baseStream = new ZlibBaseStream(stream, mode, level, ZlibStreamFlavor.DEFLATE, forceEncoding);
}

View File

@@ -39,8 +39,8 @@ namespace SharpCompress.Compressors.Deflate
public DateTime? LastModified { get; set; }
private string _comment;
private string _fileName;
private string? _comment;
private string? _fileName;
internal ZlibBaseStream BaseStream;
private bool _disposed;
@@ -345,44 +345,43 @@ namespace SharpCompress.Compressors.Deflate
#endregion Stream methods
public String Comment
public string? Comment
{
get => _comment;
set
{
if (_disposed)
{
throw new ObjectDisposedException("GZipStream");
throw new ObjectDisposedException(nameof(GZipStream));
}
_comment = value;
}
}
public string FileName
public string? FileName
{
get => _fileName;
set
{
if (_disposed)
{
throw new ObjectDisposedException("GZipStream");
throw new ObjectDisposedException(nameof(GZipStream));
}
_fileName = value;
if (_fileName == null)
if (_fileName is null)
{
return;
}
if (_fileName.IndexOf("/") != -1)
if (_fileName.Contains("/"))
{
_fileName = _fileName.Replace("/", "\\");
_fileName = _fileName.Replace('/', '\\');
}
if (_fileName.EndsWith("\\"))
{
throw new InvalidOperationException("Illegal filename");
}
var index = _fileName.IndexOf("\\");
if (index != -1)
if (_fileName.Contains("\\"))
{
// trim any leading path
int length = _fileName.Length;
@@ -403,13 +402,13 @@ namespace SharpCompress.Compressors.Deflate
private int EmitHeader()
{
byte[] commentBytes = (Comment == null) ? null
byte[]? commentBytes = (Comment is null) ? null
: _encoding.GetBytes(Comment);
byte[] filenameBytes = (FileName == null) ? null
byte[]? filenameBytes = (FileName is null) ? null
: _encoding.GetBytes(FileName);
int cbLength = (Comment == null) ? 0 : commentBytes.Length + 1;
int fnLength = (FileName == null) ? 0 : filenameBytes.Length + 1;
int cbLength = (commentBytes is null) ? 0 : commentBytes.Length + 1;
int fnLength = (filenameBytes is null) ? 0 : filenameBytes.Length + 1;
int bufferLength = 10 + cbLength + fnLength;
var header = new byte[bufferLength];
@@ -440,7 +439,7 @@ namespace SharpCompress.Compressors.Deflate
LastModified = DateTime.Now;
}
TimeSpan delta = LastModified.Value - UNIX_EPOCH;
var timet = (Int32)delta.TotalSeconds;
var timet = (int)delta.TotalSeconds;
BinaryPrimitives.WriteInt32LittleEndian(header.AsSpan(i), timet);
i += 4;

View File

@@ -59,6 +59,8 @@
//
// -----------------------------------------------------------------------
#nullable disable
using System;
namespace SharpCompress.Compressors.Deflate
@@ -542,7 +544,7 @@ namespace SharpCompress.Compressors.Deflate
private void initWorkArea(int vsize)
{
if (hn == null)
if (hn is null)
{
hn = new int[1];
v = new int[vsize];

View File

@@ -1,3 +1,5 @@
#nullable disable
// Inflate.cs
// ------------------------------------------------------------------
//
@@ -367,7 +369,7 @@ namespace SharpCompress.Compressors.Deflate
return Flush(r);
}
t = 258 + (t & 0x1f) + ((t >> 5) & 0x1f);
if (blens == null || blens.Length < t)
if (blens is null || blens.Length < t)
{
blens = new int[t];
}
@@ -1651,7 +1653,7 @@ namespace SharpCompress.Compressors.Deflate
{
int b;
if (_codec.InputBuffer == null)
if (_codec.InputBuffer is null)
{
throw new ZlibException("InputBuffer is null. ");
}

View File

@@ -1,3 +1,5 @@
#nullable disable
// Tree.cs
// ------------------------------------------------------------------
//

View File

@@ -380,13 +380,13 @@ namespace SharpCompress.Compressors.Deflate
internal static readonly StaticTree Distances;
internal static readonly StaticTree BitLengths;
internal short[] treeCodes; // static tree or null
internal int[] extraBits; // extra bits for each code or null
internal short[]? treeCodes; // static tree or null
internal int[]? extraBits; // extra bits for each code or null
internal int extraBase; // base index for extra_bits
internal int elems; // max number of elements in the tree
internal int maxLength; // max bit length for the codes
private StaticTree(short[] treeCodes, int[] extraBits, int extraBase, int elems, int maxLength)
private StaticTree(short[]? treeCodes, int[]? extraBits, int extraBase, int elems, int maxLength)
{
this.treeCodes = treeCodes;
this.extraBits = extraBits;
@@ -423,9 +423,9 @@ namespace SharpCompress.Compressors.Deflate
// NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1
private static readonly int NMAX = 5552;
internal static uint Adler32(uint adler, byte[] buf, int index, int len)
internal static uint Adler32(uint adler, byte[]? buf, int index, int len)
{
if (buf == null)
if (buf is null)
{
return 1;
}

View File

@@ -1,3 +1,5 @@
#nullable disable
// ZlibBaseStream.cs
// ------------------------------------------------------------------
//
@@ -65,17 +67,7 @@ namespace SharpCompress.Compressors.Deflate
private readonly Encoding _encoding;
internal int Crc32
{
get
{
if (crc == null)
{
return 0;
}
return crc.Crc32Result;
}
}
internal int Crc32 => crc?.Crc32Result ?? 0;
public ZlibBaseStream(Stream stream,
CompressionMode compressionMode,
@@ -106,7 +98,7 @@ namespace SharpCompress.Compressors.Deflate
{
get
{
if (_z == null)
if (_z is null)
{
bool wantRfc1950Header = (_flavor == ZlibStreamFlavor.ZLIB);
_z = new ZlibCodec();
@@ -126,17 +118,10 @@ namespace SharpCompress.Compressors.Deflate
private byte[] workingBuffer
{
get
{
if (_workingBuffer == null)
{
_workingBuffer = new byte[_bufferSize];
}
return _workingBuffer;
}
get => _workingBuffer ??= new byte[_bufferSize];
}
public override void Write(Byte[] buffer, int offset, int count)
public override void Write(byte[] buffer, int offset, int count)
{
// workitem 7159
// calculate the CRC on the unccompressed data (before writing)
@@ -193,7 +178,7 @@ namespace SharpCompress.Compressors.Deflate
private void finish()
{
if (_z == null)
if (_z is null)
{
return;
}
@@ -213,7 +198,7 @@ namespace SharpCompress.Compressors.Deflate
if (rc != ZlibConstants.Z_STREAM_END && rc != ZlibConstants.Z_OK)
{
string verb = (_wantCompress ? "de" : "in") + "flating";
if (_z.Message == null)
if (_z.Message is null)
{
throw new ZlibException(String.Format("{0}: (rc = {1})", verb, rc));
}
@@ -323,7 +308,7 @@ namespace SharpCompress.Compressors.Deflate
private void end()
{
if (z == null)
if (z is null)
{
return;
}
@@ -348,7 +333,7 @@ namespace SharpCompress.Compressors.Deflate
base.Dispose(disposing);
if (disposing)
{
if (_stream == null)
if (_stream is null)
{
return;
}
@@ -527,7 +512,7 @@ namespace SharpCompress.Compressors.Deflate
{
return 0; // workitem 8557
}
if (buffer == null)
if (buffer is null)
{
throw new ArgumentNullException(nameof(buffer));
}

Some files were not shown because too many files have changed in this diff Show More