mirror of
https://github.com/adamhathcock/sharpcompress.git
synced 2026-02-04 13:34:59 +00:00
Compare commits
169 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e08e4e5d9f | ||
|
|
dd710ec308 | ||
|
|
5cfc608010 | ||
|
|
997c11ef25 | ||
|
|
249f11f543 | ||
|
|
eeb6761a9f | ||
|
|
0c35abdebe | ||
|
|
30da0b91ed | ||
|
|
d9c53e1c82 | ||
|
|
14e6d95559 | ||
|
|
8cdc49cb85 | ||
|
|
5c11075d36 | ||
|
|
be34fe2056 | ||
|
|
403baf05a6 | ||
|
|
a51b56339a | ||
|
|
7e9fb645cb | ||
|
|
f48a6d47dc | ||
|
|
5b52463e4c | ||
|
|
6f08bb72d8 | ||
|
|
045093f453 | ||
|
|
566c49ce53 | ||
|
|
d1d2758ee0 | ||
|
|
5b86c40d5b | ||
|
|
53393e744e | ||
|
|
15209178ce | ||
|
|
ea688e1f4c | ||
|
|
fe4cc8e6cb | ||
|
|
1f37ced35a | ||
|
|
949e90351f | ||
|
|
2dd17e3882 | ||
|
|
c4f7433584 | ||
|
|
9405a7cf4b | ||
|
|
cd677440ce | ||
|
|
c06f4bc5a8 | ||
|
|
db02e8b634 | ||
|
|
d6fe729068 | ||
|
|
ef3d4da286 | ||
|
|
813bd5ae80 | ||
|
|
f40d3342c8 | ||
|
|
9738b812c4 | ||
|
|
c6a011df17 | ||
|
|
7d2dc58766 | ||
|
|
d234f2d509 | ||
|
|
cdba5ec419 | ||
|
|
9cf8a3dbbe | ||
|
|
2b4f02997e | ||
|
|
bcdfd992a3 | ||
|
|
3a820c52bd | ||
|
|
4a7337b223 | ||
|
|
1d8afb817e | ||
|
|
0f06c3d934 | ||
|
|
9d5cb8d119 | ||
|
|
a28d686eb9 | ||
|
|
ac525a8ec2 | ||
|
|
52c44befa2 | ||
|
|
c64251c341 | ||
|
|
bdc57d3c33 | ||
|
|
7edc437df2 | ||
|
|
57e4395e7d | ||
|
|
ee17dca9e5 | ||
|
|
e9f3add5b9 | ||
|
|
faf1a9f7e4 | ||
|
|
5357bd07c7 | ||
|
|
8c0e2cbd25 | ||
|
|
674f3b4f28 | ||
|
|
6e42e00974 | ||
|
|
8598885258 | ||
|
|
669e40d53c | ||
|
|
1adcce6c62 | ||
|
|
147be6e6e1 | ||
|
|
5879999094 | ||
|
|
477a30cf5b | ||
|
|
2fec03e1ac | ||
|
|
9a17449a02 | ||
|
|
087a6aad8c | ||
|
|
e243a8e88f | ||
|
|
b57df8026a | ||
|
|
a1d45b44cd | ||
|
|
e47e1d220a | ||
|
|
0129a933df | ||
|
|
fa241bb0d7 | ||
|
|
d8804ae108 | ||
|
|
8090d269e7 | ||
|
|
b0101f20c5 | ||
|
|
dd48e4299a | ||
|
|
c61ee0c24f | ||
|
|
9576867c34 | ||
|
|
4426a24298 | ||
|
|
3b43c1e413 | ||
|
|
aa6575c8f9 | ||
|
|
0268713960 | ||
|
|
f36167d425 | ||
|
|
33ffcb9308 | ||
|
|
a649c25a91 | ||
|
|
fa1e773960 | ||
|
|
62f7238796 | ||
|
|
d4ccf73340 | ||
|
|
5ddb0f96bc | ||
|
|
75a6db8f4c | ||
|
|
ae5635319b | ||
|
|
98ed3080d0 | ||
|
|
c618eacad4 | ||
|
|
3b11e6ef97 | ||
|
|
40af9359db | ||
|
|
d6bf9dae42 | ||
|
|
13917941ff | ||
|
|
28f04329ae | ||
|
|
404a6b231d | ||
|
|
184596da3c | ||
|
|
f00f393687 | ||
|
|
cbbfb89619 | ||
|
|
6a5cf11dd0 | ||
|
|
fc1d0a0464 | ||
|
|
74af1759eb | ||
|
|
ee3162ad71 | ||
|
|
4357165163 | ||
|
|
6973436b94 | ||
|
|
7750ed7106 | ||
|
|
773158e9d8 | ||
|
|
4db615597d | ||
|
|
6bdf2365fc | ||
|
|
a7944f28c5 | ||
|
|
426d459284 | ||
|
|
b00b461ada | ||
|
|
84834b6348 | ||
|
|
f521fd35ff | ||
|
|
2979fceecf | ||
|
|
b12e8e793f | ||
|
|
c77ec59a28 | ||
|
|
42ba8cf828 | ||
|
|
c7618fc895 | ||
|
|
d055b34efe | ||
|
|
b7f635f540 | ||
|
|
5e95a54260 | ||
|
|
4354e82bb5 | ||
|
|
ab7bdc24dc | ||
|
|
81997fe1ba | ||
|
|
de6759a83f | ||
|
|
233dc33130 | ||
|
|
39b07f45f1 | ||
|
|
802662a165 | ||
|
|
2859848fc4 | ||
|
|
b734d00062 | ||
|
|
02a17d22f6 | ||
|
|
7bfff472c6 | ||
|
|
5aa146be17 | ||
|
|
a0bfc22a29 | ||
|
|
6ed46b5fcc | ||
|
|
904e40ef57 | ||
|
|
00ff119ec4 | ||
|
|
60d2511e80 | ||
|
|
ed56a4aa4a | ||
|
|
5b6a1c97e3 | ||
|
|
8bfc9ef4de | ||
|
|
fa949e089e | ||
|
|
c296ca7660 | ||
|
|
538b38869f | ||
|
|
ce328ed90b | ||
|
|
632bae725d | ||
|
|
4f824b1d9a | ||
|
|
120aee8039 | ||
|
|
3b2b341c4d | ||
|
|
4cad40f637 | ||
|
|
2c64380019 | ||
|
|
ccb9593de2 | ||
|
|
921a99fc32 | ||
|
|
400d2c1774 | ||
|
|
3b2e273832 | ||
|
|
43c839eb89 |
12
.config/dotnet-tools.json
Normal file
12
.config/dotnet-tools.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"version": 1,
|
||||
"isRoot": true,
|
||||
"tools": {
|
||||
"dotnet-format": {
|
||||
"version": "4.1.131201",
|
||||
"commands": [
|
||||
"dotnet-format"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
15
.github/workflows/dotnetcore.yml
vendored
15
.github/workflows/dotnetcore.yml
vendored
@@ -1,5 +1,5 @@
|
||||
name: SharpCompress
|
||||
on: [push]
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -12,6 +12,13 @@ jobs:
|
||||
- uses: actions/checkout@v1
|
||||
- uses: actions/setup-dotnet@v1
|
||||
with:
|
||||
dotnet-version: 3.1.100
|
||||
- name: Run the Cake script
|
||||
uses: ecampidoglio/cake-action@master
|
||||
dotnet-version: 5.0.101
|
||||
- run: dotnet run -p build/build.csproj
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ matrix.os }}-sharpcompress.nupkg
|
||||
path: artifacts/*
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ matrix.os }}-sharpcompress.snupkg
|
||||
path: artifacts/*
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -17,3 +17,4 @@ tools
|
||||
.idea/
|
||||
|
||||
.DS_Store
|
||||
*.snupkg
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
# SharpCompress
|
||||
|
||||
SharpCompress is a compression library in pure C# for .NET Standard 1.3 and 2.0 that can unrar, un7zip, unzip, untar unbzip2 and ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip are implemented.
|
||||
SharpCompress is a compression library in pure C# for .NET Standard 2.0, 2.1, .NET Core 3.1 and .NET 5.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
|
||||
|
||||
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).
|
||||
|
||||
AppVeyor Build -
|
||||
[](https://ci.appveyor.com/project/adamhathcock/sharpcompress/branch/master)
|
||||
|
||||
GitHub Actions Build -
|
||||
[](https://circleci.com/gh/adamhathcock/sharpcompress)
|
||||
|
||||
|
||||
@@ -13,6 +13,8 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SharpCompress", "src\SharpC
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SharpCompress.Test", "tests\SharpCompress.Test\SharpCompress.Test.csproj", "{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "build", "build\build.csproj", "{D4D613CB-5E94-47FB-85BE-B8423D20C545}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|Any CPU = Debug|Any CPU
|
||||
@@ -27,6 +29,10 @@ Global
|
||||
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
EndGlobalSection
|
||||
GlobalSection(SolutionProperties) = preSolution
|
||||
HideSolutionNode = FALSE
|
||||
|
||||
@@ -126,4 +126,7 @@
|
||||
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EAddAccessorOwnerDeclarationBracesMigration/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002ECSharpPlaceAttributeOnSameLineMigration/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateBlankLinesAroundFieldToBlankLinesAroundProperty/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateThisQualifierSettings/@EntryIndexedValue">True</s:Boolean></wpf:ResourceDictionary>
|
||||
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateThisQualifierSettings/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:String x:Key="/Default/Environment/UnitTesting/UnitTestSessionStore/Sessions/=6af8f80e_002D9fdd_002D4223_002D8e02_002D473db916f9b2/@EntryIndexedValue"><SessionState ContinuousTestingIsOn="False" ContinuousTestingMode="0" FrameworkVersion="{x:Null}" IsLocked="False" Name="All tests from Solution" PlatformMonoPreference="{x:Null}" PlatformType="{x:Null}" xmlns="urn:schemas-jetbrains-com:jetbrains-ut-session" xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml">
|
||||
<Solution />
|
||||
</SessionState></s:String></wpf:ResourceDictionary>
|
||||
|
||||
20
appveyor.yml
20
appveyor.yml
@@ -1,20 +0,0 @@
|
||||
version: '{build}'
|
||||
image: Visual Studio 2019
|
||||
|
||||
pull_requests:
|
||||
do_not_increment_build_number: true
|
||||
|
||||
branches:
|
||||
only:
|
||||
- master
|
||||
|
||||
nuget:
|
||||
disable_publish_on_pr: true
|
||||
|
||||
build_script:
|
||||
- ps: .\build.ps1
|
||||
|
||||
test: off
|
||||
|
||||
artifacts:
|
||||
- path: src\SharpCompress\bin\Release\*.nupkg
|
||||
89
build.cake
89
build.cake
@@ -1,89 +0,0 @@
|
||||
var target = Argument("target", "Default");
|
||||
var tag = Argument("tag", "cake");
|
||||
|
||||
Task("Restore")
|
||||
.Does(() =>
|
||||
{
|
||||
DotNetCoreRestore(".");
|
||||
});
|
||||
|
||||
Task("Build")
|
||||
.IsDependentOn("Restore")
|
||||
.Does(() =>
|
||||
{
|
||||
if (IsRunningOnWindows())
|
||||
{
|
||||
MSBuild("./sharpcompress.sln", c =>
|
||||
{
|
||||
c.SetConfiguration("Release")
|
||||
.SetVerbosity(Verbosity.Minimal)
|
||||
.UseToolVersion(MSBuildToolVersion.VS2019);
|
||||
});
|
||||
}
|
||||
else
|
||||
{
|
||||
var settings = new DotNetCoreBuildSettings
|
||||
{
|
||||
Framework = "netstandard1.3",
|
||||
Configuration = "Release",
|
||||
NoRestore = true
|
||||
};
|
||||
|
||||
DotNetCoreBuild("./src/SharpCompress/SharpCompress.csproj", settings);
|
||||
|
||||
settings.Framework = "netstandard2.0";
|
||||
DotNetCoreBuild("./src/SharpCompress/SharpCompress.csproj", settings);
|
||||
|
||||
settings.Framework = "netstandard2.1";
|
||||
DotNetCoreBuild("./src/SharpCompress/SharpCompress.csproj", settings);
|
||||
}
|
||||
});
|
||||
|
||||
Task("Test")
|
||||
.IsDependentOn("Build")
|
||||
.Does(() =>
|
||||
{
|
||||
var files = GetFiles("tests/**/*.csproj");
|
||||
foreach(var file in files)
|
||||
{
|
||||
var settings = new DotNetCoreTestSettings
|
||||
{
|
||||
Configuration = "Release",
|
||||
Framework = "netcoreapp3.1"
|
||||
};
|
||||
DotNetCoreTest(file.ToString(), settings);
|
||||
}
|
||||
});
|
||||
|
||||
Task("Pack")
|
||||
.IsDependentOn("Build")
|
||||
.Does(() =>
|
||||
{
|
||||
if (IsRunningOnWindows())
|
||||
{
|
||||
MSBuild("src/SharpCompress/SharpCompress.csproj", c => c
|
||||
.SetConfiguration("Release")
|
||||
.SetVerbosity(Verbosity.Minimal)
|
||||
.UseToolVersion(MSBuildToolVersion.VS2019)
|
||||
.WithProperty("NoBuild", "true")
|
||||
.WithTarget("Pack"));
|
||||
}
|
||||
else
|
||||
{
|
||||
Information("Skipping Pack as this is not Windows");
|
||||
}
|
||||
});
|
||||
|
||||
Task("Default")
|
||||
.IsDependentOn("Restore")
|
||||
.IsDependentOn("Build")
|
||||
.IsDependentOn("Test")
|
||||
.IsDependentOn("Pack");
|
||||
|
||||
Task("RunTests")
|
||||
.IsDependentOn("Restore")
|
||||
.IsDependentOn("Build")
|
||||
.IsDependentOn("Test");
|
||||
|
||||
|
||||
RunTarget(target);
|
||||
228
build.ps1
228
build.ps1
@@ -1,228 +0,0 @@
|
||||
##########################################################################
|
||||
# This is the Cake bootstrapper script for PowerShell.
|
||||
# This file was downloaded from https://github.com/cake-build/resources
|
||||
# Feel free to change this file to fit your needs.
|
||||
##########################################################################
|
||||
|
||||
<#
|
||||
|
||||
.SYNOPSIS
|
||||
This is a Powershell script to bootstrap a Cake build.
|
||||
|
||||
.DESCRIPTION
|
||||
This Powershell script will download NuGet if missing, restore NuGet tools (including Cake)
|
||||
and execute your Cake build script with the parameters you provide.
|
||||
|
||||
.PARAMETER Script
|
||||
The build script to execute.
|
||||
.PARAMETER Target
|
||||
The build script target to run.
|
||||
.PARAMETER Configuration
|
||||
The build configuration to use.
|
||||
.PARAMETER Verbosity
|
||||
Specifies the amount of information to be displayed.
|
||||
.PARAMETER Experimental
|
||||
Tells Cake to use the latest Roslyn release.
|
||||
.PARAMETER WhatIf
|
||||
Performs a dry run of the build script.
|
||||
No tasks will be executed.
|
||||
.PARAMETER Mono
|
||||
Tells Cake to use the Mono scripting engine.
|
||||
.PARAMETER SkipToolPackageRestore
|
||||
Skips restoring of packages.
|
||||
.PARAMETER ScriptArgs
|
||||
Remaining arguments are added here.
|
||||
|
||||
.LINK
|
||||
http://cakebuild.net
|
||||
|
||||
#>
|
||||
|
||||
[CmdletBinding()]
|
||||
Param(
|
||||
[string]$Script = "build.cake",
|
||||
[string]$Target = "Default",
|
||||
[ValidateSet("Release", "Debug")]
|
||||
[string]$Configuration = "Release",
|
||||
[ValidateSet("Quiet", "Minimal", "Normal", "Verbose", "Diagnostic")]
|
||||
[string]$Verbosity = "Verbose",
|
||||
[switch]$Experimental,
|
||||
[Alias("DryRun","Noop")]
|
||||
[switch]$WhatIf,
|
||||
[switch]$Mono,
|
||||
[switch]$SkipToolPackageRestore,
|
||||
[Parameter(Position=0,Mandatory=$false,ValueFromRemainingArguments=$true)]
|
||||
[string[]]$ScriptArgs
|
||||
)
|
||||
|
||||
[Reflection.Assembly]::LoadWithPartialName("System.Security") | Out-Null
|
||||
function MD5HashFile([string] $filePath)
|
||||
{
|
||||
if ([string]::IsNullOrEmpty($filePath) -or !(Test-Path $filePath -PathType Leaf))
|
||||
{
|
||||
return $null
|
||||
}
|
||||
|
||||
[System.IO.Stream] $file = $null;
|
||||
[System.Security.Cryptography.MD5] $md5 = $null;
|
||||
try
|
||||
{
|
||||
$md5 = [System.Security.Cryptography.MD5]::Create()
|
||||
$file = [System.IO.File]::OpenRead($filePath)
|
||||
return [System.BitConverter]::ToString($md5.ComputeHash($file))
|
||||
}
|
||||
finally
|
||||
{
|
||||
if ($file -ne $null)
|
||||
{
|
||||
$file.Dispose()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host "Preparing to run build script..."
|
||||
|
||||
if(!$PSScriptRoot){
|
||||
$PSScriptRoot = Split-Path $MyInvocation.MyCommand.Path -Parent
|
||||
}
|
||||
|
||||
$TOOLS_DIR = Join-Path $PSScriptRoot "tools"
|
||||
$ADDINS_DIR = Join-Path $TOOLS_DIR "addins"
|
||||
$MODULES_DIR = Join-Path $TOOLS_DIR "modules"
|
||||
$NUGET_EXE = Join-Path $TOOLS_DIR "nuget.exe"
|
||||
$CAKE_EXE = Join-Path $TOOLS_DIR "Cake/Cake.exe"
|
||||
$NUGET_URL = "https://dist.nuget.org/win-x86-commandline/latest/nuget.exe"
|
||||
$PACKAGES_CONFIG = Join-Path $TOOLS_DIR "packages.config"
|
||||
$PACKAGES_CONFIG_MD5 = Join-Path $TOOLS_DIR "packages.config.md5sum"
|
||||
$ADDINS_PACKAGES_CONFIG = Join-Path $ADDINS_DIR "packages.config"
|
||||
$MODULES_PACKAGES_CONFIG = Join-Path $MODULES_DIR "packages.config"
|
||||
|
||||
# Should we use mono?
|
||||
$UseMono = "";
|
||||
if($Mono.IsPresent) {
|
||||
Write-Verbose -Message "Using the Mono based scripting engine."
|
||||
$UseMono = "-mono"
|
||||
}
|
||||
|
||||
# Should we use the new Roslyn?
|
||||
$UseExperimental = "";
|
||||
if($Experimental.IsPresent -and !($Mono.IsPresent)) {
|
||||
Write-Verbose -Message "Using experimental version of Roslyn."
|
||||
$UseExperimental = "-experimental"
|
||||
}
|
||||
|
||||
# Is this a dry run?
|
||||
$UseDryRun = "";
|
||||
if($WhatIf.IsPresent) {
|
||||
$UseDryRun = "-dryrun"
|
||||
}
|
||||
|
||||
# Make sure tools folder exists
|
||||
if ((Test-Path $PSScriptRoot) -and !(Test-Path $TOOLS_DIR)) {
|
||||
Write-Verbose -Message "Creating tools directory..."
|
||||
New-Item -Path $TOOLS_DIR -Type directory | out-null
|
||||
}
|
||||
|
||||
# Make sure that packages.config exist.
|
||||
if (!(Test-Path $PACKAGES_CONFIG)) {
|
||||
Write-Verbose -Message "Downloading packages.config..."
|
||||
try { (New-Object System.Net.WebClient).DownloadFile("http://cakebuild.net/download/bootstrapper/packages", $PACKAGES_CONFIG) } catch {
|
||||
Throw "Could not download packages.config."
|
||||
}
|
||||
}
|
||||
|
||||
# Try find NuGet.exe in path if not exists
|
||||
if (!(Test-Path $NUGET_EXE)) {
|
||||
Write-Verbose -Message "Trying to find nuget.exe in PATH..."
|
||||
$existingPaths = $Env:Path -Split ';' | Where-Object { (![string]::IsNullOrEmpty($_)) -and (Test-Path $_ -PathType Container) }
|
||||
$NUGET_EXE_IN_PATH = Get-ChildItem -Path $existingPaths -Filter "nuget.exe" | Select -First 1
|
||||
if ($NUGET_EXE_IN_PATH -ne $null -and (Test-Path $NUGET_EXE_IN_PATH.FullName)) {
|
||||
Write-Verbose -Message "Found in PATH at $($NUGET_EXE_IN_PATH.FullName)."
|
||||
$NUGET_EXE = $NUGET_EXE_IN_PATH.FullName
|
||||
}
|
||||
}
|
||||
|
||||
# Try download NuGet.exe if not exists
|
||||
if (!(Test-Path $NUGET_EXE)) {
|
||||
Write-Verbose -Message "Downloading NuGet.exe..."
|
||||
try {
|
||||
(New-Object System.Net.WebClient).DownloadFile($NUGET_URL, $NUGET_EXE)
|
||||
} catch {
|
||||
Throw "Could not download NuGet.exe."
|
||||
}
|
||||
}
|
||||
|
||||
# Save nuget.exe path to environment to be available to child processed
|
||||
$ENV:NUGET_EXE = $NUGET_EXE
|
||||
|
||||
# Restore tools from NuGet?
|
||||
if(-Not $SkipToolPackageRestore.IsPresent) {
|
||||
Push-Location
|
||||
Set-Location $TOOLS_DIR
|
||||
|
||||
# Check for changes in packages.config and remove installed tools if true.
|
||||
[string] $md5Hash = MD5HashFile($PACKAGES_CONFIG)
|
||||
if((!(Test-Path $PACKAGES_CONFIG_MD5)) -Or
|
||||
($md5Hash -ne (Get-Content $PACKAGES_CONFIG_MD5 ))) {
|
||||
Write-Verbose -Message "Missing or changed package.config hash..."
|
||||
Remove-Item * -Recurse -Exclude packages.config,nuget.exe
|
||||
}
|
||||
|
||||
Write-Verbose -Message "Restoring tools from NuGet..."
|
||||
$NuGetOutput = Invoke-Expression "&`"$NUGET_EXE`" install -ExcludeVersion -OutputDirectory `"$TOOLS_DIR`""
|
||||
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Throw "An error occured while restoring NuGet tools."
|
||||
}
|
||||
else
|
||||
{
|
||||
$md5Hash | Out-File $PACKAGES_CONFIG_MD5 -Encoding "ASCII"
|
||||
}
|
||||
Write-Verbose -Message ($NuGetOutput | out-string)
|
||||
|
||||
Pop-Location
|
||||
}
|
||||
|
||||
# Restore addins from NuGet
|
||||
if (Test-Path $ADDINS_PACKAGES_CONFIG) {
|
||||
Push-Location
|
||||
Set-Location $ADDINS_DIR
|
||||
|
||||
Write-Verbose -Message "Restoring addins from NuGet..."
|
||||
$NuGetOutput = Invoke-Expression "&`"$NUGET_EXE`" install -ExcludeVersion -OutputDirectory `"$ADDINS_DIR`""
|
||||
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Throw "An error occured while restoring NuGet addins."
|
||||
}
|
||||
|
||||
Write-Verbose -Message ($NuGetOutput | out-string)
|
||||
|
||||
Pop-Location
|
||||
}
|
||||
|
||||
# Restore modules from NuGet
|
||||
if (Test-Path $MODULES_PACKAGES_CONFIG) {
|
||||
Push-Location
|
||||
Set-Location $MODULES_DIR
|
||||
|
||||
Write-Verbose -Message "Restoring modules from NuGet..."
|
||||
$NuGetOutput = Invoke-Expression "&`"$NUGET_EXE`" install -ExcludeVersion -OutputDirectory `"$MODULES_DIR`""
|
||||
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Throw "An error occured while restoring NuGet modules."
|
||||
}
|
||||
|
||||
Write-Verbose -Message ($NuGetOutput | out-string)
|
||||
|
||||
Pop-Location
|
||||
}
|
||||
|
||||
# Make sure that Cake has been installed.
|
||||
if (!(Test-Path $CAKE_EXE)) {
|
||||
Throw "Could not find Cake.exe at $CAKE_EXE"
|
||||
}
|
||||
|
||||
# Start Cake
|
||||
Write-Host "Running build script..."
|
||||
Invoke-Expression "& `"$CAKE_EXE`" `"$Script`" -target=`"$Target`" -configuration=`"$Configuration`" -verbosity=`"$Verbosity`" $UseMono $UseDryRun $UseExperimental $ScriptArgs"
|
||||
exit $LASTEXITCODE
|
||||
42
build.sh
42
build.sh
@@ -1,42 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
##########################################################################
|
||||
# This is the Cake bootstrapper script for Linux and OS X.
|
||||
# This file was downloaded from https://github.com/cake-build/resources
|
||||
# Feel free to change this file to fit your needs.
|
||||
##########################################################################
|
||||
|
||||
# Define directories.
|
||||
SCRIPT_DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
|
||||
TOOLS_DIR=$SCRIPT_DIR/tools
|
||||
CAKE_VERSION=0.27.1
|
||||
CAKE_DLL=$TOOLS_DIR/Cake.CoreCLR.$CAKE_VERSION/Cake.dll
|
||||
|
||||
# Make sure the tools folder exist.
|
||||
if [ ! -d "$TOOLS_DIR" ]; then
|
||||
mkdir "$TOOLS_DIR"
|
||||
fi
|
||||
|
||||
###########################################################################
|
||||
# INSTALL CAKE
|
||||
###########################################################################
|
||||
|
||||
if [ ! -f "$CAKE_DLL" ]; then
|
||||
curl -Lsfo Cake.CoreCLR.zip "https://www.nuget.org/api/v2/package/Cake.CoreCLR/$CAKE_VERSION" && unzip -q Cake.CoreCLR.zip -d "$TOOLS_DIR/Cake.CoreCLR.$CAKE_VERSION" && rm -f Cake.CoreCLR.zip
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "An error occured while installing Cake."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Make sure that Cake has been installed.
|
||||
if [ ! -f "$CAKE_DLL" ]; then
|
||||
echo "Could not find Cake.exe at '$CAKE_DLL'."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
###########################################################################
|
||||
# RUN BUILD SCRIPT
|
||||
###########################################################################
|
||||
|
||||
# Start Cake
|
||||
exec dotnet "$CAKE_DLL" "$@"
|
||||
83
build/Program.cs
Normal file
83
build/Program.cs
Normal file
@@ -0,0 +1,83 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Runtime.InteropServices;
|
||||
using GlobExpressions;
|
||||
using static Bullseye.Targets;
|
||||
using static SimpleExec.Command;
|
||||
|
||||
class Program
|
||||
{
|
||||
private const string Clean = "clean";
|
||||
private const string Format = "format";
|
||||
private const string Build = "build";
|
||||
private const string Test = "test";
|
||||
private const string Publish = "publish";
|
||||
|
||||
static void Main(string[] args)
|
||||
{
|
||||
Target(Clean,
|
||||
ForEach("**/bin", "**/obj"),
|
||||
dir =>
|
||||
{
|
||||
IEnumerable<string> GetDirectories(string d)
|
||||
{
|
||||
return Glob.Directories(".", d);
|
||||
}
|
||||
|
||||
void RemoveDirectory(string d)
|
||||
{
|
||||
if (Directory.Exists(d))
|
||||
{
|
||||
Console.WriteLine(d);
|
||||
Directory.Delete(d, true);
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var d in GetDirectories(dir))
|
||||
{
|
||||
RemoveDirectory(d);
|
||||
}
|
||||
});
|
||||
|
||||
Target(Format, () =>
|
||||
{
|
||||
Run("dotnet", "tool restore");
|
||||
Run("dotnet", "format --check");
|
||||
});
|
||||
|
||||
Target(Build, DependsOn(Format),
|
||||
framework =>
|
||||
{
|
||||
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows) && framework == "net46")
|
||||
{
|
||||
return;
|
||||
}
|
||||
Run("dotnet", "build src/SharpCompress/SharpCompress.csproj -c Release");
|
||||
});
|
||||
|
||||
Target(Test, DependsOn(Build), ForEach("net5.0"),
|
||||
framework =>
|
||||
{
|
||||
IEnumerable<string> GetFiles(string d)
|
||||
{
|
||||
return Glob.Files(".", d);
|
||||
}
|
||||
|
||||
foreach (var file in GetFiles("**/*.Test.csproj"))
|
||||
{
|
||||
Run("dotnet", $"test {file} -c Release -f {framework}");
|
||||
}
|
||||
});
|
||||
|
||||
Target(Publish, DependsOn(Test),
|
||||
() =>
|
||||
{
|
||||
Run("dotnet", "pack src/SharpCompress/SharpCompress.csproj -c Release -o artifacts/");
|
||||
});
|
||||
|
||||
Target("default", DependsOn(Publish), () => Console.WriteLine("Done!"));
|
||||
|
||||
RunTargetsAndExit(args);
|
||||
}
|
||||
}
|
||||
14
build/build.csproj
Normal file
14
build/build.csproj
Normal file
@@ -0,0 +1,14 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net5.0</TargetFramework>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Bullseye" Version="3.6.0" />
|
||||
<PackageReference Include="Glob" Version="1.1.8" />
|
||||
<PackageReference Include="SimpleExec" Version="6.4.0" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
5
global.json
Normal file
5
global.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"sdk": {
|
||||
"version": "5.0.101"
|
||||
}
|
||||
}
|
||||
285
src/SharpCompress/Algorithms/Alder32.cs
Normal file
285
src/SharpCompress/Algorithms/Alder32.cs
Normal file
@@ -0,0 +1,285 @@
|
||||
// Copyright (c) Six Labors and contributors.
|
||||
// Licensed under the GNU Affero General Public License, Version 3.
|
||||
|
||||
using System;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Runtime.InteropServices;
|
||||
#if !NETSTANDARD2_0 && !NETSTANDARD2_1
|
||||
using System.Runtime.Intrinsics;
|
||||
using System.Runtime.Intrinsics.X86;
|
||||
#endif
|
||||
|
||||
namespace SharpCompress.Algorithms
|
||||
{
|
||||
/// <summary>
|
||||
/// Calculates the 32 bit Adler checksum of a given buffer according to
|
||||
/// RFC 1950. ZLIB Compressed Data Format Specification version 3.3)
|
||||
/// </summary>
|
||||
internal static class Adler32
|
||||
{
|
||||
/// <summary>
|
||||
/// The default initial seed value of a Adler32 checksum calculation.
|
||||
/// </summary>
|
||||
public const uint SeedValue = 1U;
|
||||
|
||||
#if !NETSTANDARD2_0 && !NETSTANDARD2_1
|
||||
private const int MinBufferSize = 64;
|
||||
#endif
|
||||
|
||||
// Largest prime smaller than 65536
|
||||
private const uint BASE = 65521;
|
||||
|
||||
// NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1
|
||||
private const uint NMAX = 5552;
|
||||
|
||||
/// <summary>
|
||||
/// Calculates the Adler32 checksum with the bytes taken from the span.
|
||||
/// </summary>
|
||||
/// <param name="buffer">The readonly span of bytes.</param>
|
||||
/// <returns>The <see cref="uint"/>.</returns>
|
||||
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
||||
public static uint Calculate(ReadOnlySpan<byte> buffer)
|
||||
{
|
||||
return Calculate(SeedValue, buffer);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Calculates the Adler32 checksum with the bytes taken from the span and seed.
|
||||
/// </summary>
|
||||
/// <param name="adler">The input Adler32 value.</param>
|
||||
/// <param name="buffer">The readonly span of bytes.</param>
|
||||
/// <returns>The <see cref="uint"/>.</returns>
|
||||
public static uint Calculate(uint adler, ReadOnlySpan<byte> buffer)
|
||||
{
|
||||
if (buffer.IsEmpty)
|
||||
{
|
||||
return SeedValue;
|
||||
}
|
||||
|
||||
#if !NETSTANDARD2_0 && !NETSTANDARD2_1
|
||||
if (Sse3.IsSupported && buffer.Length >= MinBufferSize)
|
||||
{
|
||||
return CalculateSse(adler, buffer);
|
||||
}
|
||||
|
||||
return CalculateScalar(adler, buffer);
|
||||
#else
|
||||
return CalculateScalar(adler, buffer);
|
||||
#endif
|
||||
}
|
||||
|
||||
// Based on https://github.com/chromium/chromium/blob/master/third_party/zlib/adler32_simd.c
|
||||
#if !NETSTANDARD2_0 && !NETSTANDARD2_1
|
||||
private static unsafe uint CalculateSse(uint adler, ReadOnlySpan<byte> buffer)
|
||||
{
|
||||
uint s1 = adler & 0xFFFF;
|
||||
uint s2 = (adler >> 16) & 0xFFFF;
|
||||
|
||||
// Process the data in blocks.
|
||||
const int BLOCK_SIZE = 1 << 5;
|
||||
|
||||
uint length = (uint)buffer.Length;
|
||||
uint blocks = length / BLOCK_SIZE;
|
||||
length -= blocks * BLOCK_SIZE;
|
||||
|
||||
int index = 0;
|
||||
fixed (byte* bufferPtr = &buffer[0])
|
||||
{
|
||||
index += (int)blocks * BLOCK_SIZE;
|
||||
var localBufferPtr = bufferPtr;
|
||||
|
||||
// _mm_setr_epi8 on x86
|
||||
var tap1 = Vector128.Create(32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17);
|
||||
var tap2 = Vector128.Create(16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1);
|
||||
Vector128<byte> zero = Vector128<byte>.Zero;
|
||||
var ones = Vector128.Create((short)1);
|
||||
|
||||
while (blocks > 0)
|
||||
{
|
||||
uint n = NMAX / BLOCK_SIZE; /* The NMAX constraint. */
|
||||
if (n > blocks)
|
||||
{
|
||||
n = blocks;
|
||||
}
|
||||
|
||||
blocks -= n;
|
||||
|
||||
// Process n blocks of data. At most NMAX data bytes can be
|
||||
// processed before s2 must be reduced modulo BASE.
|
||||
Vector128<int> v_ps = Vector128.CreateScalar(s1 * n).AsInt32();
|
||||
Vector128<int> v_s2 = Vector128.CreateScalar(s2).AsInt32();
|
||||
Vector128<int> v_s1 = Vector128<int>.Zero;
|
||||
|
||||
do
|
||||
{
|
||||
// Load 32 input bytes.
|
||||
Vector128<byte> bytes1 = Sse3.LoadDquVector128(localBufferPtr);
|
||||
Vector128<byte> bytes2 = Sse3.LoadDquVector128(localBufferPtr + 16);
|
||||
|
||||
// Add previous block byte sum to v_ps.
|
||||
v_ps = Sse2.Add(v_ps, v_s1);
|
||||
|
||||
// Horizontally add the bytes for s1, multiply-adds the
|
||||
// bytes by [ 32, 31, 30, ... ] for s2.
|
||||
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes1, zero).AsInt32());
|
||||
Vector128<short> mad1 = Ssse3.MultiplyAddAdjacent(bytes1, tap1);
|
||||
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad1, ones));
|
||||
|
||||
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes2, zero).AsInt32());
|
||||
Vector128<short> mad2 = Ssse3.MultiplyAddAdjacent(bytes2, tap2);
|
||||
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad2, ones));
|
||||
|
||||
localBufferPtr += BLOCK_SIZE;
|
||||
}
|
||||
while (--n > 0);
|
||||
|
||||
v_s2 = Sse2.Add(v_s2, Sse2.ShiftLeftLogical(v_ps, 5));
|
||||
|
||||
// Sum epi32 ints v_s1(s2) and accumulate in s1(s2).
|
||||
const byte S2301 = 0b1011_0001; // A B C D -> B A D C
|
||||
const byte S1032 = 0b0100_1110; // A B C D -> C D A B
|
||||
|
||||
v_s1 = Sse2.Add(v_s1, Sse2.Shuffle(v_s1, S2301));
|
||||
v_s1 = Sse2.Add(v_s1, Sse2.Shuffle(v_s1, S1032));
|
||||
|
||||
s1 += (uint)v_s1.ToScalar();
|
||||
|
||||
v_s2 = Sse2.Add(v_s2, Sse2.Shuffle(v_s2, S2301));
|
||||
v_s2 = Sse2.Add(v_s2, Sse2.Shuffle(v_s2, S1032));
|
||||
|
||||
s2 = (uint)v_s2.ToScalar();
|
||||
|
||||
// Reduce.
|
||||
s1 %= BASE;
|
||||
s2 %= BASE;
|
||||
}
|
||||
}
|
||||
|
||||
ref byte bufferRef = ref MemoryMarshal.GetReference(buffer);
|
||||
|
||||
if (length > 0)
|
||||
{
|
||||
if (length >= 16)
|
||||
{
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
length -= 16;
|
||||
}
|
||||
|
||||
while (length-- > 0)
|
||||
{
|
||||
s2 += s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
}
|
||||
|
||||
if (s1 >= BASE)
|
||||
{
|
||||
s1 -= BASE;
|
||||
}
|
||||
|
||||
s2 %= BASE;
|
||||
}
|
||||
|
||||
return s1 | (s2 << 16);
|
||||
}
|
||||
#endif
|
||||
|
||||
private static uint CalculateScalar(uint adler, ReadOnlySpan<byte> buffer)
|
||||
{
|
||||
uint s1 = adler & 0xFFFF;
|
||||
uint s2 = (adler >> 16) & 0xFFFF;
|
||||
uint k;
|
||||
|
||||
ref byte bufferRef = ref MemoryMarshal.GetReference<byte>(buffer);
|
||||
uint length = (uint)buffer.Length;
|
||||
int index = 0;
|
||||
|
||||
while (length > 0)
|
||||
{
|
||||
k = length < NMAX ? length : NMAX;
|
||||
length -= k;
|
||||
|
||||
while (k >= 16)
|
||||
{
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
k -= 16;
|
||||
}
|
||||
|
||||
if (k != 0)
|
||||
{
|
||||
do
|
||||
{
|
||||
s1 += Unsafe.Add(ref bufferRef, index++);
|
||||
s2 += s1;
|
||||
}
|
||||
while (--k != 0);
|
||||
}
|
||||
|
||||
s1 %= BASE;
|
||||
s2 %= BASE;
|
||||
}
|
||||
|
||||
return (s2 << 16) | s1;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,29 +2,25 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives
|
||||
{
|
||||
public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtractionListener
|
||||
public abstract class AbstractArchive<TEntry, TVolume> : IArchive
|
||||
where TEntry : IArchiveEntry
|
||||
where TVolume : IVolume
|
||||
{
|
||||
private readonly LazyReadOnlyCollection<TVolume> lazyVolumes;
|
||||
private readonly LazyReadOnlyCollection<TEntry> lazyEntries;
|
||||
|
||||
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionBegin;
|
||||
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionEnd;
|
||||
|
||||
public event EventHandler<CompressedBytesReadEventArgs> CompressedBytesRead;
|
||||
public event EventHandler<FilePartExtractionBeginEventArgs> FilePartExtractionBegin;
|
||||
|
||||
protected ReaderOptions ReaderOptions { get; }
|
||||
protected ReaderOptions ReaderOptions { get; } = new ();
|
||||
|
||||
private bool disposed;
|
||||
|
||||
internal AbstractArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerOptions)
|
||||
internal AbstractArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerOptions, CancellationToken cancellationToken)
|
||||
{
|
||||
Type = type;
|
||||
if (!fileInfo.Exists)
|
||||
@@ -33,40 +29,30 @@ namespace SharpCompress.Archives
|
||||
}
|
||||
ReaderOptions = readerOptions;
|
||||
readerOptions.LeaveStreamOpen = false;
|
||||
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(fileInfo));
|
||||
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
|
||||
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(fileInfo, cancellationToken));
|
||||
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes, cancellationToken));
|
||||
}
|
||||
|
||||
|
||||
protected abstract IEnumerable<TVolume> LoadVolumes(FileInfo file);
|
||||
protected abstract IAsyncEnumerable<TVolume> LoadVolumes(FileInfo file, CancellationToken cancellationToken);
|
||||
|
||||
internal AbstractArchive(ArchiveType type, IEnumerable<Stream> streams, ReaderOptions readerOptions)
|
||||
internal AbstractArchive(ArchiveType type, IAsyncEnumerable<Stream> streams, ReaderOptions readerOptions, CancellationToken cancellationToken)
|
||||
{
|
||||
Type = type;
|
||||
ReaderOptions = readerOptions;
|
||||
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(streams.Select(CheckStreams)));
|
||||
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
|
||||
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(streams.Select(CheckStreams), cancellationToken));
|
||||
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes, cancellationToken));
|
||||
}
|
||||
|
||||
internal AbstractArchive(ArchiveType type)
|
||||
{
|
||||
Type = type;
|
||||
lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
|
||||
lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
|
||||
lazyVolumes = new LazyReadOnlyCollection<TVolume>( AsyncEnumerable.Empty<TVolume>());
|
||||
lazyEntries = new LazyReadOnlyCollection<TEntry>(AsyncEnumerable.Empty<TEntry>());
|
||||
}
|
||||
|
||||
public ArchiveType Type { get; }
|
||||
|
||||
void IArchiveExtractionListener.FireEntryExtractionBegin(IArchiveEntry entry)
|
||||
{
|
||||
EntryExtractionBegin?.Invoke(this, new ArchiveExtractionEventArgs<IArchiveEntry>(entry));
|
||||
}
|
||||
|
||||
void IArchiveExtractionListener.FireEntryExtractionEnd(IArchiveEntry entry)
|
||||
{
|
||||
EntryExtractionEnd?.Invoke(this, new ArchiveExtractionEventArgs<IArchiveEntry>(entry));
|
||||
}
|
||||
|
||||
private static Stream CheckStreams(Stream stream)
|
||||
{
|
||||
if (!stream.CanSeek || !stream.CanRead)
|
||||
@@ -79,65 +65,48 @@ namespace SharpCompress.Archives
|
||||
/// <summary>
|
||||
/// Returns an ReadOnlyCollection of all the RarArchiveEntries across the one or many parts of the RarArchive.
|
||||
/// </summary>
|
||||
public virtual ICollection<TEntry> Entries { get { return lazyEntries; } }
|
||||
public virtual IAsyncEnumerable<TEntry> Entries => lazyEntries;
|
||||
|
||||
/// <summary>
|
||||
/// Returns an ReadOnlyCollection of all the RarArchiveVolumes across the one or many parts of the RarArchive.
|
||||
/// </summary>
|
||||
public ICollection<TVolume> Volumes { get { return lazyVolumes; } }
|
||||
public IAsyncEnumerable<TVolume> Volumes => lazyVolumes;
|
||||
|
||||
/// <summary>
|
||||
/// The total size of the files compressed in the archive.
|
||||
/// </summary>
|
||||
public virtual long TotalSize { get { return Entries.Aggregate(0L, (total, cf) => total + cf.CompressedSize); } }
|
||||
public virtual async ValueTask<long> TotalSizeAsync()
|
||||
{
|
||||
await EnsureEntriesLoaded();
|
||||
return await Entries.AggregateAsync(0L, (total, cf) => total + cf.CompressedSize);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The total size of the files as uncompressed in the archive.
|
||||
/// </summary>
|
||||
public virtual long TotalUncompressSize { get { return Entries.Aggregate(0L, (total, cf) => total + cf.Size); } }
|
||||
public virtual async ValueTask<long> TotalUncompressedSizeAsync()
|
||||
{
|
||||
await EnsureEntriesLoaded();
|
||||
return await Entries.AggregateAsync(0L, (total, cf) => total + cf.Size);
|
||||
}
|
||||
|
||||
protected abstract IEnumerable<TVolume> LoadVolumes(IEnumerable<Stream> streams);
|
||||
protected abstract IEnumerable<TEntry> LoadEntries(IEnumerable<TVolume> volumes);
|
||||
protected abstract IAsyncEnumerable<TVolume> LoadVolumes(IAsyncEnumerable<Stream> streams, CancellationToken cancellationToken);
|
||||
protected abstract IAsyncEnumerable<TEntry> LoadEntries(IAsyncEnumerable<TVolume> volumes, CancellationToken cancellationToken);
|
||||
|
||||
IEnumerable<IArchiveEntry> IArchive.Entries { get { return Entries.Cast<IArchiveEntry>(); } }
|
||||
IAsyncEnumerable<IArchiveEntry> IArchive.Entries => Entries.Select(x => (IArchiveEntry)x);
|
||||
|
||||
IEnumerable<IVolume> IArchive.Volumes { get { return lazyVolumes.Cast<IVolume>(); } }
|
||||
IAsyncEnumerable<IVolume> IArchive.Volumes => lazyVolumes.Select(x => (IVolume)x);
|
||||
|
||||
public virtual void Dispose()
|
||||
public virtual async ValueTask DisposeAsync()
|
||||
{
|
||||
if (!disposed)
|
||||
{
|
||||
lazyVolumes.ForEach(v => v.Dispose());
|
||||
lazyEntries.GetLoaded().Cast<Entry>().ForEach(x => x.Close());
|
||||
await lazyVolumes.ForEachAsync(async v => await v.DisposeAsync());
|
||||
await lazyEntries.GetLoaded().Cast<Entry>().ForEachAsync(async x => await x.CloseAsync());
|
||||
disposed = true;
|
||||
}
|
||||
}
|
||||
|
||||
void IArchiveExtractionListener.EnsureEntriesLoaded()
|
||||
{
|
||||
lazyEntries.EnsureFullyLoaded();
|
||||
lazyVolumes.EnsureFullyLoaded();
|
||||
}
|
||||
|
||||
void IExtractionListener.FireCompressedBytesRead(long currentPartCompressedBytes, long compressedReadBytes)
|
||||
{
|
||||
CompressedBytesRead?.Invoke(this, new CompressedBytesReadEventArgs
|
||||
{
|
||||
CurrentFilePartCompressedBytesRead = currentPartCompressedBytes,
|
||||
CompressedBytesRead = compressedReadBytes
|
||||
});
|
||||
}
|
||||
|
||||
void IExtractionListener.FireFilePartExtractionBegin(string name, long size, long compressedSize)
|
||||
{
|
||||
FilePartExtractionBegin?.Invoke(this, new FilePartExtractionBeginEventArgs
|
||||
{
|
||||
CompressedSize = compressedSize,
|
||||
Size = size,
|
||||
Name = name
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Use this method to extract all entries in an archive in order.
|
||||
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
|
||||
@@ -149,29 +118,32 @@ namespace SharpCompress.Archives
|
||||
/// occur if this is used at the same time as other extraction methods on this instance.
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
public IReader ExtractAllEntries()
|
||||
public async ValueTask<IReader> ExtractAllEntries()
|
||||
{
|
||||
((IArchiveExtractionListener)this).EnsureEntriesLoaded();
|
||||
return CreateReaderForSolidExtraction();
|
||||
await EnsureEntriesLoaded();
|
||||
return await CreateReaderForSolidExtraction();
|
||||
}
|
||||
|
||||
public async ValueTask EnsureEntriesLoaded()
|
||||
{
|
||||
await lazyEntries.EnsureFullyLoaded();
|
||||
await lazyVolumes.EnsureFullyLoaded();
|
||||
}
|
||||
|
||||
protected abstract IReader CreateReaderForSolidExtraction();
|
||||
protected abstract ValueTask<IReader> CreateReaderForSolidExtraction();
|
||||
|
||||
/// <summary>
|
||||
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
|
||||
/// </summary>
|
||||
public virtual bool IsSolid { get { return false; } }
|
||||
public virtual ValueTask<bool> IsSolidAsync() => new(false);
|
||||
|
||||
/// <summary>
|
||||
/// The archive can find all the parts of the archive needed to fully extract the archive. This forces the parsing of the entire archive.
|
||||
/// </summary>
|
||||
public bool IsComplete
|
||||
public async ValueTask<bool> IsCompleteAsync()
|
||||
{
|
||||
get
|
||||
{
|
||||
((IArchiveExtractionListener)this).EnsureEntriesLoaded();
|
||||
return Entries.All(x => x.IsComplete);
|
||||
}
|
||||
await EnsureEntriesLoaded();
|
||||
return await Entries.AllAsync(x => x.IsComplete);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Readers;
|
||||
using SharpCompress.Writers;
|
||||
@@ -12,98 +14,126 @@ namespace SharpCompress.Archives
|
||||
where TEntry : IArchiveEntry
|
||||
where TVolume : IVolume
|
||||
{
|
||||
private readonly List<TEntry> newEntries = new List<TEntry>();
|
||||
private readonly List<TEntry> removedEntries = new List<TEntry>();
|
||||
private class RebuildPauseDisposable : IAsyncDisposable
|
||||
{
|
||||
private readonly AbstractWritableArchive<TEntry, TVolume> archive;
|
||||
|
||||
private readonly List<TEntry> modifiedEntries = new List<TEntry>();
|
||||
public RebuildPauseDisposable(AbstractWritableArchive<TEntry, TVolume> archive)
|
||||
{
|
||||
this.archive = archive;
|
||||
archive.pauseRebuilding = true;
|
||||
}
|
||||
|
||||
public async ValueTask DisposeAsync()
|
||||
{
|
||||
archive.pauseRebuilding = false;
|
||||
await archive.RebuildModifiedCollection();
|
||||
}
|
||||
}
|
||||
private readonly List<TEntry> newEntries = new();
|
||||
private readonly List<TEntry> removedEntries = new();
|
||||
|
||||
private readonly List<TEntry> modifiedEntries = new();
|
||||
private bool hasModifications;
|
||||
private bool pauseRebuilding;
|
||||
|
||||
internal AbstractWritableArchive(ArchiveType type)
|
||||
: base(type)
|
||||
{
|
||||
}
|
||||
|
||||
internal AbstractWritableArchive(ArchiveType type, Stream stream, ReaderOptions readerFactoryOptions)
|
||||
: base(type, stream.AsEnumerable(), readerFactoryOptions)
|
||||
internal AbstractWritableArchive(ArchiveType type, Stream stream, ReaderOptions readerFactoryOptions,
|
||||
CancellationToken cancellationToken)
|
||||
: base(type, stream.AsAsyncEnumerable(), readerFactoryOptions, cancellationToken)
|
||||
{
|
||||
}
|
||||
|
||||
internal AbstractWritableArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerFactoryOptions)
|
||||
: base(type, fileInfo, readerFactoryOptions)
|
||||
internal AbstractWritableArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerFactoryOptions,
|
||||
CancellationToken cancellationToken)
|
||||
: base(type, fileInfo, readerFactoryOptions, cancellationToken)
|
||||
{
|
||||
}
|
||||
|
||||
public override ICollection<TEntry> Entries
|
||||
public override IAsyncEnumerable<TEntry> Entries
|
||||
{
|
||||
get
|
||||
{
|
||||
if (hasModifications)
|
||||
{
|
||||
return modifiedEntries;
|
||||
return modifiedEntries.ToAsyncEnumerable();
|
||||
}
|
||||
return base.Entries;
|
||||
}
|
||||
}
|
||||
|
||||
private void RebuildModifiedCollection()
|
||||
public IAsyncDisposable PauseEntryRebuilding()
|
||||
{
|
||||
return new RebuildPauseDisposable(this);
|
||||
}
|
||||
|
||||
private async ValueTask RebuildModifiedCollection()
|
||||
{
|
||||
if (pauseRebuilding)
|
||||
{
|
||||
return;
|
||||
}
|
||||
hasModifications = true;
|
||||
newEntries.RemoveAll(v => removedEntries.Contains(v));
|
||||
modifiedEntries.Clear();
|
||||
modifiedEntries.AddRange(OldEntries.Concat(newEntries));
|
||||
modifiedEntries.AddRange(await OldEntries.Concat(newEntries.ToAsyncEnumerable()).ToListAsync());
|
||||
}
|
||||
|
||||
private IEnumerable<TEntry> OldEntries { get { return base.Entries.Where(x => !removedEntries.Contains(x)); } }
|
||||
private IAsyncEnumerable<TEntry> OldEntries { get { return base.Entries.Where(x => !removedEntries.Contains(x)); } }
|
||||
|
||||
public void RemoveEntry(TEntry entry)
|
||||
public async ValueTask RemoveEntryAsync(TEntry entry)
|
||||
{
|
||||
if (!removedEntries.Contains(entry))
|
||||
{
|
||||
removedEntries.Add(entry);
|
||||
RebuildModifiedCollection();
|
||||
await RebuildModifiedCollection();
|
||||
}
|
||||
}
|
||||
|
||||
void IWritableArchive.RemoveEntry(IArchiveEntry entry)
|
||||
ValueTask IWritableArchive.RemoveEntryAsync(IArchiveEntry entry, CancellationToken cancellationToken)
|
||||
{
|
||||
RemoveEntry((TEntry)entry);
|
||||
return RemoveEntryAsync((TEntry)entry);
|
||||
}
|
||||
|
||||
public TEntry AddEntry(string key, Stream source,
|
||||
long size = 0, DateTime? modified = null)
|
||||
public ValueTask<TEntry> AddEntryAsync(string key, Stream source,
|
||||
long size = 0, DateTime? modified = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
return AddEntry(key, source, false, size, modified);
|
||||
return AddEntryAsync(key, source, false, size, modified, cancellationToken);
|
||||
}
|
||||
|
||||
IArchiveEntry IWritableArchive.AddEntry(string key, Stream source, bool closeStream, long size, DateTime? modified)
|
||||
async ValueTask<IArchiveEntry> IWritableArchive.AddEntryAsync(string key, Stream source, bool closeStream, long size, DateTime? modified, CancellationToken cancellationToken)
|
||||
{
|
||||
return AddEntry(key, source, closeStream, size, modified);
|
||||
return await AddEntryAsync(key, source, closeStream, size, modified, cancellationToken);
|
||||
}
|
||||
|
||||
public TEntry AddEntry(string key, Stream source, bool closeStream,
|
||||
long size = 0, DateTime? modified = null)
|
||||
public async ValueTask<TEntry> AddEntryAsync(string key, Stream source, bool closeStream,
|
||||
long size = 0, DateTime? modified = null, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (key.StartsWith("/")
|
||||
|| key.StartsWith("\\"))
|
||||
if (key.Length > 0 && key[0] is '/' or '\\')
|
||||
{
|
||||
key = key.Substring(1);
|
||||
}
|
||||
if (DoesKeyMatchExisting(key))
|
||||
if (await DoesKeyMatchExisting(key))
|
||||
{
|
||||
throw new ArchiveException("Cannot add entry with duplicate key: " + key);
|
||||
}
|
||||
var entry = CreateEntry(key, source, size, modified, closeStream);
|
||||
var entry = await CreateEntry(key, source, size, modified, closeStream, cancellationToken);
|
||||
newEntries.Add(entry);
|
||||
RebuildModifiedCollection();
|
||||
await RebuildModifiedCollection();
|
||||
return entry;
|
||||
}
|
||||
|
||||
private bool DoesKeyMatchExisting(string key)
|
||||
private async ValueTask<bool> DoesKeyMatchExisting(string key)
|
||||
{
|
||||
foreach (var path in Entries.Select(x => x.Key))
|
||||
await foreach (var path in Entries.Select(x => x.Key))
|
||||
{
|
||||
var p = path.Replace('/', '\\');
|
||||
if (p.StartsWith("\\"))
|
||||
if (p.Length > 0 && p[0] == '\\')
|
||||
{
|
||||
p = p.Substring(1);
|
||||
}
|
||||
@@ -112,34 +142,35 @@ namespace SharpCompress.Archives
|
||||
return false;
|
||||
}
|
||||
|
||||
public void SaveTo(Stream stream, WriterOptions options)
|
||||
public async ValueTask SaveToAsync(Stream stream, WriterOptions options, CancellationToken cancellationToken = default)
|
||||
{
|
||||
//reset streams of new entries
|
||||
newEntries.Cast<IWritableArchiveEntry>().ForEach(x => x.Stream.Seek(0, SeekOrigin.Begin));
|
||||
SaveTo(stream, options, OldEntries, newEntries);
|
||||
await SaveToAsync(stream, options, OldEntries, newEntries.ToAsyncEnumerable(), cancellationToken);
|
||||
}
|
||||
|
||||
protected TEntry CreateEntry(string key, Stream source, long size, DateTime? modified,
|
||||
bool closeStream)
|
||||
protected ValueTask<TEntry> CreateEntry(string key, Stream source, long size, DateTime? modified,
|
||||
bool closeStream, CancellationToken cancellationToken)
|
||||
{
|
||||
if (!source.CanRead || !source.CanSeek)
|
||||
{
|
||||
throw new ArgumentException("Streams must be readable and seekable to use the Writing Archive API");
|
||||
}
|
||||
return CreateEntryInternal(key, source, size, modified, closeStream);
|
||||
return CreateEntryInternal(key, source, size, modified, closeStream, cancellationToken);
|
||||
}
|
||||
|
||||
protected abstract TEntry CreateEntryInternal(string key, Stream source, long size, DateTime? modified,
|
||||
bool closeStream);
|
||||
protected abstract ValueTask<TEntry> CreateEntryInternal(string key, Stream source, long size, DateTime? modified,
|
||||
bool closeStream, CancellationToken cancellationToken);
|
||||
|
||||
protected abstract void SaveTo(Stream stream, WriterOptions options, IEnumerable<TEntry> oldEntries, IEnumerable<TEntry> newEntries);
|
||||
protected abstract ValueTask SaveToAsync(Stream stream, WriterOptions options, IAsyncEnumerable<TEntry> oldEntries, IAsyncEnumerable<TEntry> newEntries,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
public override void Dispose()
|
||||
public override async ValueTask DisposeAsync()
|
||||
{
|
||||
base.Dispose();
|
||||
newEntries.Cast<Entry>().ForEach(x => x.Close());
|
||||
removedEntries.Cast<Entry>().ForEach(x => x.Close());
|
||||
modifiedEntries.Cast<Entry>().ForEach(x => x.Close());
|
||||
await base.DisposeAsync();
|
||||
await newEntries.Cast<Entry>().ForEachAsync(async x => await x.CloseAsync());
|
||||
await removedEntries.Cast<Entry>().ForEachAsync(async x => await x.CloseAsync());
|
||||
await modifiedEntries.Cast<Entry>().ForEachAsync(async x => await x.CloseAsync());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Archives.GZip;
|
||||
using SharpCompress.Archives.Rar;
|
||||
using SharpCompress.Archives.SevenZip;
|
||||
//using SharpCompress.Archives.Rar;
|
||||
//using SharpCompress.Archives.SevenZip;
|
||||
using SharpCompress.Archives.Tar;
|
||||
using SharpCompress.Archives.Zip;
|
||||
using SharpCompress.Common;
|
||||
@@ -10,7 +12,7 @@ using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives
|
||||
{
|
||||
public class ArchiveFactory
|
||||
public static class ArchiveFactory
|
||||
{
|
||||
/// <summary>
|
||||
/// Opens an Archive for random access
|
||||
@@ -18,67 +20,55 @@ namespace SharpCompress.Archives
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
/// <returns></returns>
|
||||
public static IArchive Open(Stream stream, ReaderOptions readerOptions = null)
|
||||
public static async ValueTask<IArchive> OpenAsync(Stream stream, ReaderOptions? readerOptions = null, CancellationToken cancellationToken = default)
|
||||
{
|
||||
stream.CheckNotNull(nameof(stream));
|
||||
if (!stream.CanRead || !stream.CanSeek)
|
||||
{
|
||||
throw new ArgumentException("Stream should be readable and seekable");
|
||||
}
|
||||
readerOptions = readerOptions ?? new ReaderOptions();
|
||||
if (ZipArchive.IsZipFile(stream, null))
|
||||
readerOptions ??= new ReaderOptions();
|
||||
if (await ZipArchive.IsZipFileAsync(stream, null, cancellationToken))
|
||||
{
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
return ZipArchive.Open(stream, readerOptions);
|
||||
}
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
if (SevenZipArchive.IsSevenZipFile(stream))
|
||||
/*if (SevenZipArchive.IsSevenZipFile(stream))
|
||||
{
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
return SevenZipArchive.Open(stream, readerOptions);
|
||||
}
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
if (GZipArchive.IsGZipFile(stream))
|
||||
stream.Seek(0, SeekOrigin.Begin); */
|
||||
if (await GZipArchive.IsGZipFileAsync(stream, cancellationToken))
|
||||
{
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
return GZipArchive.Open(stream, readerOptions);
|
||||
}
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
if (RarArchive.IsRarFile(stream, readerOptions))
|
||||
/* if (RarArchive.IsRarFile(stream, readerOptions))
|
||||
{
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
return RarArchive.Open(stream, readerOptions);
|
||||
}
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
if (TarArchive.IsTarFile(stream))
|
||||
stream.Seek(0, SeekOrigin.Begin); */
|
||||
if (await TarArchive.IsTarFileAsync(stream, cancellationToken))
|
||||
{
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
return TarArchive.Open(stream, readerOptions);
|
||||
}
|
||||
}
|
||||
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip, LZip");
|
||||
}
|
||||
|
||||
public static IWritableArchive Create(ArchiveType type)
|
||||
{
|
||||
switch (type)
|
||||
return type switch
|
||||
{
|
||||
case ArchiveType.Zip:
|
||||
{
|
||||
return ZipArchive.Create();
|
||||
}
|
||||
case ArchiveType.Tar:
|
||||
{
|
||||
return TarArchive.Create();
|
||||
}
|
||||
case ArchiveType.GZip:
|
||||
{
|
||||
return GZipArchive.Create();
|
||||
}
|
||||
default:
|
||||
{
|
||||
throw new NotSupportedException("Cannot create Archives of type: " + type);
|
||||
}
|
||||
}
|
||||
ArchiveType.Zip => ZipArchive.Create(),
|
||||
//ArchiveType.Tar => TarArchive.Create(),
|
||||
ArchiveType.GZip => GZipArchive.Create(),
|
||||
_ => throw new NotSupportedException("Cannot create Archives of type: " + type)
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -86,10 +76,10 @@ namespace SharpCompress.Archives
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="options"></param>
|
||||
public static IArchive Open(string filePath, ReaderOptions options = null)
|
||||
public static ValueTask<IArchive> OpenAsync(string filePath, ReaderOptions? options = null)
|
||||
{
|
||||
filePath.CheckNotNullOrEmpty(nameof(filePath));
|
||||
return Open(new FileInfo(filePath), options);
|
||||
return OpenAsync(new FileInfo(filePath), options);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -97,52 +87,51 @@ namespace SharpCompress.Archives
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="options"></param>
|
||||
public static IArchive Open(FileInfo fileInfo, ReaderOptions options = null)
|
||||
public static async ValueTask<IArchive> OpenAsync(FileInfo fileInfo, ReaderOptions? options = null, CancellationToken cancellationToken = default)
|
||||
{
|
||||
fileInfo.CheckNotNull(nameof(fileInfo));
|
||||
options = options ?? new ReaderOptions { LeaveStreamOpen = false };
|
||||
using (var stream = fileInfo.OpenRead())
|
||||
options ??= new ReaderOptions { LeaveStreamOpen = false };
|
||||
|
||||
await using var stream = fileInfo.OpenRead();
|
||||
if (await ZipArchive.IsZipFileAsync(stream, null, cancellationToken))
|
||||
{
|
||||
if (ZipArchive.IsZipFile(stream, null))
|
||||
{
|
||||
return ZipArchive.Open(fileInfo, options);
|
||||
}
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
if (SevenZipArchive.IsSevenZipFile(stream))
|
||||
{
|
||||
return SevenZipArchive.Open(fileInfo, options);
|
||||
}
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
if (GZipArchive.IsGZipFile(stream))
|
||||
{
|
||||
return GZipArchive.Open(fileInfo, options);
|
||||
}
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
if (RarArchive.IsRarFile(stream, options))
|
||||
{
|
||||
return RarArchive.Open(fileInfo, options);
|
||||
}
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
if (TarArchive.IsTarFile(stream))
|
||||
{
|
||||
return TarArchive.Open(fileInfo, options);
|
||||
}
|
||||
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
|
||||
return ZipArchive.Open(fileInfo, options);
|
||||
}
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
/*if (SevenZipArchive.IsSevenZipFile(stream))
|
||||
{
|
||||
return SevenZipArchive.Open(fileInfo, options);
|
||||
}
|
||||
stream.Seek(0, SeekOrigin.Begin); */
|
||||
if (await GZipArchive.IsGZipFileAsync(stream, cancellationToken))
|
||||
{
|
||||
return GZipArchive.Open(fileInfo, options);
|
||||
}
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
/*if (RarArchive.IsRarFile(stream, options))
|
||||
{
|
||||
return RarArchive.Open(fileInfo, options);
|
||||
}
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
if (TarArchive.IsTarFile(stream))
|
||||
{
|
||||
return TarArchive.Open(fileInfo, options);
|
||||
} */
|
||||
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extract to specific directory, retaining filename
|
||||
/// </summary>
|
||||
public static void WriteToDirectory(string sourceArchive, string destinationDirectory,
|
||||
ExtractionOptions options = null)
|
||||
public static async ValueTask WriteToDirectory(string sourceArchive,
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
using (IArchive archive = Open(sourceArchive))
|
||||
await using IArchive archive = await OpenAsync(sourceArchive);
|
||||
await foreach (IArchiveEntry entry in archive.Entries.WithCancellation(cancellationToken))
|
||||
{
|
||||
foreach (IArchiveEntry entry in archive.Entries)
|
||||
{
|
||||
entry.WriteToDirectory(destinationDirectory, options);
|
||||
}
|
||||
await entry.WriteEntryToDirectoryAsync(destinationDirectory, options, cancellationToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
using System;
|
||||
using System.Buffers;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.GZip;
|
||||
using SharpCompress.Readers;
|
||||
@@ -18,7 +22,7 @@ namespace SharpCompress.Archives.GZip
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static GZipArchive Open(string filePath, ReaderOptions readerOptions = null)
|
||||
public static GZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
filePath.CheckNotNullOrEmpty(nameof(filePath));
|
||||
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
@@ -29,10 +33,11 @@ namespace SharpCompress.Archives.GZip
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
|
||||
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
fileInfo.CheckNotNull(nameof(fileInfo));
|
||||
return new GZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
|
||||
return new GZipArchive(fileInfo, readerOptions ?? new ReaderOptions(), cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -40,10 +45,11 @@ namespace SharpCompress.Archives.GZip
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static GZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
|
||||
public static GZipArchive Open(Stream stream, ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
stream.CheckNotNull(nameof(stream));
|
||||
return new GZipArchive(stream, readerOptions ?? new ReaderOptions());
|
||||
return new GZipArchive(stream, readerOptions ?? new ReaderOptions(), cancellationToken);
|
||||
}
|
||||
|
||||
public static GZipArchive Create()
|
||||
@@ -56,58 +62,58 @@ namespace SharpCompress.Archives.GZip
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="options"></param>
|
||||
internal GZipArchive(FileInfo fileInfo, ReaderOptions options)
|
||||
: base(ArchiveType.GZip, fileInfo, options)
|
||||
internal GZipArchive(FileInfo fileInfo, ReaderOptions options,
|
||||
CancellationToken cancellationToken)
|
||||
: base(ArchiveType.GZip, fileInfo, options, cancellationToken)
|
||||
{
|
||||
}
|
||||
|
||||
protected override IEnumerable<GZipVolume> LoadVolumes(FileInfo file)
|
||||
protected override IAsyncEnumerable<GZipVolume> LoadVolumes(FileInfo file,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
return new GZipVolume(file, ReaderOptions).AsEnumerable();
|
||||
return new GZipVolume(file, ReaderOptions).AsAsyncEnumerable();
|
||||
}
|
||||
|
||||
public static bool IsGZipFile(string filePath)
|
||||
public static ValueTask<bool> IsGZipFileAsync(string filePath, CancellationToken cancellationToken = default)
|
||||
{
|
||||
return IsGZipFile(new FileInfo(filePath));
|
||||
return IsGZipFileAsync(new FileInfo(filePath), cancellationToken);
|
||||
}
|
||||
|
||||
public static bool IsGZipFile(FileInfo fileInfo)
|
||||
public static async ValueTask<bool> IsGZipFileAsync(FileInfo fileInfo, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
using (Stream stream = fileInfo.OpenRead())
|
||||
{
|
||||
return IsGZipFile(stream);
|
||||
}
|
||||
|
||||
await using Stream stream = fileInfo.OpenRead();
|
||||
return await IsGZipFileAsync(stream, cancellationToken);
|
||||
}
|
||||
|
||||
public void SaveTo(string filePath)
|
||||
public Task SaveToAsync(string filePath, CancellationToken cancellationToken = default)
|
||||
{
|
||||
SaveTo(new FileInfo(filePath));
|
||||
return SaveToAsync(new FileInfo(filePath), cancellationToken);
|
||||
}
|
||||
|
||||
public void SaveTo(FileInfo fileInfo)
|
||||
public async Task SaveToAsync(FileInfo fileInfo, CancellationToken cancellationToken = default)
|
||||
{
|
||||
using (var stream = fileInfo.Open(FileMode.Create, FileAccess.Write))
|
||||
{
|
||||
SaveTo(stream, new WriterOptions(CompressionType.GZip));
|
||||
}
|
||||
await using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
|
||||
await SaveToAsync(stream, new WriterOptions(CompressionType.GZip), cancellationToken);
|
||||
}
|
||||
|
||||
public static bool IsGZipFile(Stream stream)
|
||||
public static async ValueTask<bool> IsGZipFileAsync(Stream stream, CancellationToken cancellationToken = default)
|
||||
{
|
||||
// read the header on the first read
|
||||
byte[] header = new byte[10];
|
||||
using var header = MemoryPool<byte>.Shared.Rent(10);
|
||||
var slice = header.Memory.Slice(0, 10);
|
||||
|
||||
// workitem 8501: handle edge case (decompress empty stream)
|
||||
if (!stream.ReadFully(header))
|
||||
if (await stream.ReadAsync(slice, cancellationToken) != 10)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
|
||||
if (slice.Span[0] != 0x1F || slice.Span[1] != 0x8B || slice.Span[2] != 8)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
@@ -120,8 +126,9 @@ namespace SharpCompress.Archives.GZip
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="options"></param>
|
||||
internal GZipArchive(Stream stream, ReaderOptions options)
|
||||
: base(ArchiveType.GZip, stream, options)
|
||||
internal GZipArchive(Stream stream, ReaderOptions options,
|
||||
CancellationToken cancellationToken)
|
||||
: base(ArchiveType.GZip, stream, options, cancellationToken)
|
||||
{
|
||||
}
|
||||
|
||||
@@ -130,51 +137,54 @@ namespace SharpCompress.Archives.GZip
|
||||
{
|
||||
}
|
||||
|
||||
protected override GZipArchiveEntry CreateEntryInternal(string filePath, Stream source, long size, DateTime? modified,
|
||||
bool closeStream)
|
||||
protected override async ValueTask<GZipArchiveEntry> CreateEntryInternal(string filePath, Stream source, long size, DateTime? modified,
|
||||
bool closeStream, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (Entries.Any())
|
||||
if (await Entries.AnyAsync(cancellationToken: cancellationToken))
|
||||
{
|
||||
throw new InvalidOperationException("Only one entry is allowed in a GZip Archive");
|
||||
}
|
||||
return new GZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
|
||||
}
|
||||
|
||||
protected override void SaveTo(Stream stream, WriterOptions options,
|
||||
IEnumerable<GZipArchiveEntry> oldEntries,
|
||||
IEnumerable<GZipArchiveEntry> newEntries)
|
||||
protected override async ValueTask SaveToAsync(Stream stream, WriterOptions options,
|
||||
IAsyncEnumerable<GZipArchiveEntry> oldEntries,
|
||||
IAsyncEnumerable<GZipArchiveEntry> newEntries,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (Entries.Count > 1)
|
||||
if (await Entries.CountAsync(cancellationToken: cancellationToken) > 1)
|
||||
{
|
||||
throw new InvalidOperationException("Only one entry is allowed in a GZip Archive");
|
||||
}
|
||||
using (var writer = new GZipWriter(stream, new GZipWriterOptions(options)))
|
||||
|
||||
await using var writer = new GZipWriter(stream, new GZipWriterOptions(options));
|
||||
await foreach (var entry in oldEntries.Concat(newEntries)
|
||||
.Where(x => !x.IsDirectory)
|
||||
.WithCancellation(cancellationToken))
|
||||
{
|
||||
foreach (var entry in oldEntries.Concat(newEntries)
|
||||
.Where(x => !x.IsDirectory))
|
||||
{
|
||||
using (var entryStream = entry.OpenEntryStream())
|
||||
{
|
||||
writer.Write(entry.Key, entryStream, entry.LastModifiedTime);
|
||||
}
|
||||
}
|
||||
await using var entryStream = await entry.OpenEntryStreamAsync(cancellationToken);
|
||||
await writer.WriteAsync(entry.Key, entryStream, entry.LastModifiedTime, cancellationToken);
|
||||
}
|
||||
}
|
||||
|
||||
protected override IEnumerable<GZipVolume> LoadVolumes(IEnumerable<Stream> streams)
|
||||
protected override async IAsyncEnumerable<GZipVolume> LoadVolumes(IAsyncEnumerable<Stream> streams,
|
||||
[EnumeratorCancellation]CancellationToken cancellationToken)
|
||||
{
|
||||
return new GZipVolume(streams.First(), ReaderOptions).AsEnumerable();
|
||||
yield return new GZipVolume(await streams.FirstAsync(cancellationToken: cancellationToken), ReaderOptions);
|
||||
}
|
||||
|
||||
protected override IEnumerable<GZipArchiveEntry> LoadEntries(IEnumerable<GZipVolume> volumes)
|
||||
protected override async IAsyncEnumerable<GZipArchiveEntry> LoadEntries(IAsyncEnumerable<GZipVolume> volumes,
|
||||
[EnumeratorCancellation]CancellationToken cancellationToken)
|
||||
{
|
||||
Stream stream = volumes.Single().Stream;
|
||||
yield return new GZipArchiveEntry(this, new GZipFilePart(stream, ReaderOptions.ArchiveEncoding));
|
||||
Stream stream = (await volumes.SingleAsync(cancellationToken: cancellationToken)).Stream;
|
||||
var part = new GZipFilePart(ReaderOptions.ArchiveEncoding);
|
||||
await part.Initialize(stream, cancellationToken);
|
||||
yield return new GZipArchiveEntry(this, part);
|
||||
}
|
||||
|
||||
protected override IReader CreateReaderForSolidExtraction()
|
||||
protected override async ValueTask<IReader> CreateReaderForSolidExtraction()
|
||||
{
|
||||
var stream = Volumes.Single().Stream;
|
||||
var stream = (await Volumes.SingleAsync()).Stream;
|
||||
stream.Position = 0;
|
||||
return GZipReader.Open(stream);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.GZip;
|
||||
|
||||
namespace SharpCompress.Archives.GZip
|
||||
@@ -12,15 +14,15 @@ namespace SharpCompress.Archives.GZip
|
||||
Archive = archive;
|
||||
}
|
||||
|
||||
public virtual Stream OpenEntryStream()
|
||||
public virtual async ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
//this is to reset the stream to be read multiple times
|
||||
var part = Parts.Single() as GZipFilePart;
|
||||
var part = (GZipFilePart)Parts.Single();
|
||||
if (part.GetRawStream().Position != part.EntryStartPosition)
|
||||
{
|
||||
part.GetRawStream().Position = part.EntryStartPosition;
|
||||
}
|
||||
return Parts.Single().GetCompressedStream();
|
||||
return await Parts.Single().GetCompressedStreamAsync(cancellationToken);
|
||||
}
|
||||
|
||||
#region IArchiveEntry Members
|
||||
|
||||
@@ -1,12 +1,16 @@
|
||||
using System;
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Archives.GZip
|
||||
{
|
||||
internal class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArchiveEntry
|
||||
internal sealed class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArchiveEntry
|
||||
{
|
||||
private readonly bool closeStream;
|
||||
private readonly Stream stream;
|
||||
@@ -48,18 +52,18 @@ namespace SharpCompress.Archives.GZip
|
||||
|
||||
Stream IWritableArchiveEntry.Stream => stream;
|
||||
|
||||
public override Stream OpenEntryStream()
|
||||
public override ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
//ensure new stream is at the start, this could be reset
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
return new NonDisposingStream(stream);
|
||||
return new(new NonDisposingStream(stream));
|
||||
}
|
||||
|
||||
internal override void Close()
|
||||
internal override async ValueTask CloseAsync()
|
||||
{
|
||||
if (closeStream)
|
||||
{
|
||||
stream.Dispose();
|
||||
await stream.DisposeAsync();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,49 +1,44 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives
|
||||
{
|
||||
public interface IArchive : IDisposable
|
||||
public interface IArchive : IAsyncDisposable
|
||||
{
|
||||
event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionBegin;
|
||||
event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionEnd;
|
||||
|
||||
event EventHandler<CompressedBytesReadEventArgs> CompressedBytesRead;
|
||||
event EventHandler<FilePartExtractionBeginEventArgs> FilePartExtractionBegin;
|
||||
|
||||
IEnumerable<IArchiveEntry> Entries { get; }
|
||||
IEnumerable<IVolume> Volumes { get; }
|
||||
IAsyncEnumerable<IArchiveEntry> Entries { get; }
|
||||
IAsyncEnumerable<IVolume> Volumes { get; }
|
||||
|
||||
ArchiveType Type { get; }
|
||||
|
||||
ValueTask EnsureEntriesLoaded();
|
||||
/// <summary>
|
||||
/// Use this method to extract all entries in an archive in order.
|
||||
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
|
||||
/// extracted sequentially for the best performance.
|
||||
/// </summary>
|
||||
IReader ExtractAllEntries();
|
||||
ValueTask<IReader> ExtractAllEntries();
|
||||
|
||||
/// <summary>
|
||||
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
|
||||
/// Rar Archives can be SOLID while all 7Zip archives are considered SOLID.
|
||||
/// </summary>
|
||||
bool IsSolid { get; }
|
||||
ValueTask<bool> IsSolidAsync();
|
||||
|
||||
/// <summary>
|
||||
/// This checks to see if all the known entries have IsComplete = true
|
||||
/// </summary>
|
||||
bool IsComplete { get; }
|
||||
ValueTask<bool> IsCompleteAsync();
|
||||
|
||||
/// <summary>
|
||||
/// The total size of the files compressed in the archive.
|
||||
/// </summary>
|
||||
long TotalSize { get; }
|
||||
ValueTask<long> TotalSizeAsync();
|
||||
|
||||
/// <summary>
|
||||
/// The total size of the files as uncompressed in the archive.
|
||||
/// </summary>
|
||||
long TotalUncompressSize { get; }
|
||||
ValueTask<long> TotalUncompressedSizeAsync();
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,6 @@
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
|
||||
namespace SharpCompress.Archives
|
||||
@@ -9,7 +11,7 @@ namespace SharpCompress.Archives
|
||||
/// Opens the current entry as a stream that will decompress as it is read.
|
||||
/// Read the entire stream or use SkipEntry on EntryStream.
|
||||
/// </summary>
|
||||
Stream OpenEntryStream();
|
||||
ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// The archive can find all the parts of the archive needed to extract this entry.
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.IO;
|
||||
|
||||
@@ -6,62 +8,53 @@ namespace SharpCompress.Archives
|
||||
{
|
||||
public static class IArchiveEntryExtensions
|
||||
{
|
||||
public static void WriteTo(this IArchiveEntry archiveEntry, Stream streamToWriteTo)
|
||||
public static async ValueTask WriteToAsync(this IArchiveEntry archiveEntry, Stream streamToWriteTo, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (archiveEntry.Archive.Type == ArchiveType.Rar && archiveEntry.Archive.IsSolid)
|
||||
{
|
||||
throw new InvalidFormatException("Cannot use Archive random access on SOLID Rar files.");
|
||||
}
|
||||
|
||||
if (archiveEntry.IsDirectory)
|
||||
{
|
||||
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
|
||||
}
|
||||
|
||||
var streamListener = archiveEntry.Archive as IArchiveExtractionListener;
|
||||
streamListener.EnsureEntriesLoaded();
|
||||
streamListener.FireEntryExtractionBegin(archiveEntry);
|
||||
streamListener.FireFilePartExtractionBegin(archiveEntry.Key, archiveEntry.Size, archiveEntry.CompressedSize);
|
||||
var entryStream = archiveEntry.OpenEntryStream();
|
||||
if (entryStream == null)
|
||||
var archive = archiveEntry.Archive;
|
||||
await archive.EnsureEntriesLoaded();
|
||||
var entryStream = await archiveEntry.OpenEntryStreamAsync(cancellationToken);
|
||||
if (entryStream is null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
using (entryStream)
|
||||
await using (entryStream)
|
||||
{
|
||||
using (Stream s = new ListeningStream(streamListener, entryStream))
|
||||
{
|
||||
s.TransferTo(streamToWriteTo);
|
||||
}
|
||||
await entryStream.TransferToAsync(streamToWriteTo, cancellationToken);
|
||||
}
|
||||
streamListener.FireEntryExtractionEnd(archiveEntry);
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Extract to specific directory, retaining filename
|
||||
/// </summary>
|
||||
public static void WriteToDirectory(this IArchiveEntry entry, string destinationDirectory,
|
||||
ExtractionOptions options = null)
|
||||
public static ValueTask WriteEntryToDirectoryAsync(this IArchiveEntry entry,
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ExtractionMethods.WriteEntryToDirectory(entry, destinationDirectory, options,
|
||||
entry.WriteToFile);
|
||||
return ExtractionMethods.WriteEntryToDirectoryAsync(entry, destinationDirectory, options,
|
||||
entry.WriteToFileAsync, cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extract to specific file
|
||||
/// </summary>
|
||||
public static void WriteToFile(this IArchiveEntry entry, string destinationFileName,
|
||||
ExtractionOptions options = null)
|
||||
public static ValueTask WriteToFileAsync(this IArchiveEntry entry,
|
||||
string destinationFileName,
|
||||
ExtractionOptions? options = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
|
||||
ExtractionMethods.WriteEntryToFile(entry, destinationFileName, options,
|
||||
(x, fm) =>
|
||||
|
||||
return ExtractionMethods.WriteEntryToFileAsync(entry, destinationFileName, options,
|
||||
async (x, fm, ct) =>
|
||||
{
|
||||
using (FileStream fs = File.Open(destinationFileName, fm))
|
||||
{
|
||||
entry.WriteTo(fs);
|
||||
}
|
||||
});
|
||||
await using FileStream fs = File.Open(x, fm);
|
||||
await entry.WriteToAsync(fs, ct);
|
||||
}, cancellationToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,6 @@
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
|
||||
namespace SharpCompress.Archives
|
||||
@@ -8,12 +10,14 @@ namespace SharpCompress.Archives
|
||||
/// <summary>
|
||||
/// Extract to specific directory, retaining filename
|
||||
/// </summary>
|
||||
public static void WriteToDirectory(this IArchive archive, string destinationDirectory,
|
||||
ExtractionOptions options = null)
|
||||
public static async ValueTask WriteToDirectoryAsync(this IArchive archive,
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
foreach (IArchiveEntry entry in archive.Entries.Where(x => !x.IsDirectory))
|
||||
await foreach (IArchiveEntry entry in archive.Entries.Where(x => !x.IsDirectory).WithCancellation(cancellationToken))
|
||||
{
|
||||
entry.WriteToDirectory(destinationDirectory, options);
|
||||
await entry.WriteEntryToDirectoryAsync(destinationDirectory, options, cancellationToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
using SharpCompress.Common;
|
||||
|
||||
namespace SharpCompress.Archives
|
||||
{
|
||||
internal interface IArchiveExtractionListener : IExtractionListener
|
||||
{
|
||||
void EnsureEntriesLoaded();
|
||||
void FireEntryExtractionBegin(IArchiveEntry entry);
|
||||
void FireEntryExtractionEnd(IArchiveEntry entry);
|
||||
}
|
||||
}
|
||||
@@ -1,15 +1,23 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Writers;
|
||||
|
||||
namespace SharpCompress.Archives
|
||||
{
|
||||
public interface IWritableArchive : IArchive
|
||||
{
|
||||
void RemoveEntry(IArchiveEntry entry);
|
||||
ValueTask RemoveEntryAsync(IArchiveEntry entry, CancellationToken cancellationToken = default);
|
||||
|
||||
IArchiveEntry AddEntry(string key, Stream source, bool closeStream, long size = 0, DateTime? modified = null);
|
||||
ValueTask<IArchiveEntry> AddEntryAsync(string key, Stream source, bool closeStream, long size = 0, DateTime? modified = null, CancellationToken cancellationToken = default);
|
||||
|
||||
void SaveTo(Stream stream, WriterOptions options);
|
||||
ValueTask SaveToAsync(Stream stream, WriterOptions options, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Use this to pause entry rebuilding when adding large collections of entries. Dispose when complete. A using statement is recommended.
|
||||
/// </summary>
|
||||
/// <returns>IDisposeable to resume entry rebuilding</returns>
|
||||
IAsyncDisposable PauseEntryRebuilding();
|
||||
}
|
||||
}
|
||||
@@ -1,54 +1,62 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Writers;
|
||||
|
||||
namespace SharpCompress.Archives
|
||||
{
|
||||
public static class IWritableArchiveExtensions
|
||||
{
|
||||
public static void AddEntry(this IWritableArchive writableArchive,
|
||||
string entryPath, string filePath)
|
||||
public static async ValueTask AddEntryAsync(this IWritableArchive writableArchive,
|
||||
string entryPath, string filePath,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var fileInfo = new FileInfo(filePath);
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
throw new FileNotFoundException("Could not AddEntry: " + filePath);
|
||||
}
|
||||
writableArchive.AddEntry(entryPath, new FileInfo(filePath).OpenRead(), true, fileInfo.Length,
|
||||
fileInfo.LastWriteTime);
|
||||
await writableArchive.AddEntryAsync(entryPath, new FileInfo(filePath).OpenRead(), true, fileInfo.Length,
|
||||
fileInfo.LastWriteTime, cancellationToken);
|
||||
}
|
||||
|
||||
public static void SaveTo(this IWritableArchive writableArchive, string filePath, WriterOptions options)
|
||||
public static Task SaveToAsync(this IWritableArchive writableArchive, string filePath, WriterOptions options, CancellationToken cancellationToken = default)
|
||||
{
|
||||
writableArchive.SaveTo(new FileInfo(filePath), options);
|
||||
return writableArchive.SaveToAsync(new FileInfo(filePath), options, cancellationToken);
|
||||
}
|
||||
|
||||
public static void SaveTo(this IWritableArchive writableArchive, FileInfo fileInfo, WriterOptions options)
|
||||
public static async Task SaveToAsync(this IWritableArchive writableArchive, FileInfo fileInfo, WriterOptions options, CancellationToken cancellationToken = default)
|
||||
{
|
||||
using (var stream = fileInfo.Open(FileMode.Create, FileAccess.Write))
|
||||
{
|
||||
writableArchive.SaveTo(stream, options);
|
||||
}
|
||||
await using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
|
||||
await writableArchive.SaveToAsync(stream, options, cancellationToken);
|
||||
}
|
||||
|
||||
public static void AddAllFromDirectory(
|
||||
public static async ValueTask AddAllFromDirectoryAsync(
|
||||
this IWritableArchive writableArchive,
|
||||
string filePath, string searchPattern = "*.*", SearchOption searchOption = SearchOption.AllDirectories)
|
||||
string filePath, string searchPattern = "*.*",
|
||||
SearchOption searchOption = SearchOption.AllDirectories,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
foreach (var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption))
|
||||
await using (writableArchive.PauseEntryRebuilding())
|
||||
{
|
||||
var fileInfo = new FileInfo(path);
|
||||
writableArchive.AddEntry(path.Substring(filePath.Length), fileInfo.OpenRead(), true, fileInfo.Length,
|
||||
fileInfo.LastWriteTime);
|
||||
foreach (var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption))
|
||||
{
|
||||
var fileInfo = new FileInfo(path);
|
||||
await writableArchive.AddEntryAsync(path.Substring(filePath.Length), fileInfo.OpenRead(), true, fileInfo.Length,
|
||||
fileInfo.LastWriteTime,
|
||||
cancellationToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
public static IArchiveEntry AddEntry(this IWritableArchive writableArchive, string key, FileInfo fileInfo)
|
||||
public static ValueTask<IArchiveEntry> AddEntryAsync(this IWritableArchive writableArchive, string key, FileInfo fileInfo,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
throw new ArgumentException("FileInfo does not exist.");
|
||||
}
|
||||
return writableArchive.AddEntry(key, fileInfo.OpenRead(), true, fileInfo.Length, fileInfo.LastWriteTime);
|
||||
return writableArchive.AddEntryAsync(key, fileInfo.OpenRead(), true, fileInfo.Length, fileInfo.LastWriteTime, cancellationToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,9 +3,9 @@ using SharpCompress.Common.Rar.Headers;
|
||||
|
||||
namespace SharpCompress.Archives.Rar
|
||||
{
|
||||
internal class FileInfoRarFilePart : SeekableFilePart
|
||||
internal sealed class FileInfoRarFilePart : SeekableFilePart
|
||||
{
|
||||
internal FileInfoRarFilePart(FileInfoRarArchiveVolume volume, string password, MarkHeader mh, FileHeader fh, FileInfo fi)
|
||||
internal FileInfoRarFilePart(FileInfoRarArchiveVolume volume, string? password, MarkHeader mh, FileHeader fh, FileInfo fi)
|
||||
: base(mh, fh, volume.Stream, password)
|
||||
{
|
||||
FileInfo = fi;
|
||||
|
||||
@@ -10,7 +10,8 @@ using SharpCompress.Readers.Rar;
|
||||
|
||||
namespace SharpCompress.Archives.Rar
|
||||
{
|
||||
public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
public class
|
||||
RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
{
|
||||
internal Lazy<IRarUnpack> UnpackV2017 { get; } = new Lazy<IRarUnpack>(() => new SharpCompress.Compressors.Rar.UnpackV2017.Unpack());
|
||||
internal Lazy<IRarUnpack> UnpackV1 { get; } = new Lazy<IRarUnpack>(() => new SharpCompress.Compressors.Rar.UnpackV1.Unpack());
|
||||
@@ -42,7 +43,7 @@ namespace SharpCompress.Archives.Rar
|
||||
|
||||
protected override IEnumerable<RarArchiveEntry> LoadEntries(IEnumerable<RarVolume> volumes)
|
||||
{
|
||||
return RarArchiveEntryFactory.GetEntries(this, volumes);
|
||||
return RarArchiveEntryFactory.GetEntries(this, volumes, ReaderOptions);
|
||||
}
|
||||
|
||||
protected override IEnumerable<RarVolume> LoadVolumes(IEnumerable<Stream> streams)
|
||||
@@ -65,7 +66,7 @@ namespace SharpCompress.Archives.Rar
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="options"></param>
|
||||
public static RarArchive Open(string filePath, ReaderOptions options = null)
|
||||
public static RarArchive Open(string filePath, ReaderOptions? options = null)
|
||||
{
|
||||
filePath.CheckNotNullOrEmpty(nameof(filePath));
|
||||
return new RarArchive(new FileInfo(filePath), options ?? new ReaderOptions());
|
||||
@@ -76,7 +77,7 @@ namespace SharpCompress.Archives.Rar
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="options"></param>
|
||||
public static RarArchive Open(FileInfo fileInfo, ReaderOptions options = null)
|
||||
public static RarArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
|
||||
{
|
||||
fileInfo.CheckNotNull(nameof(fileInfo));
|
||||
return new RarArchive(fileInfo, options ?? new ReaderOptions());
|
||||
@@ -87,7 +88,7 @@ namespace SharpCompress.Archives.Rar
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="options"></param>
|
||||
public static RarArchive Open(Stream stream, ReaderOptions options = null)
|
||||
public static RarArchive Open(Stream stream, ReaderOptions? options = null)
|
||||
{
|
||||
stream.CheckNotNull(nameof(stream));
|
||||
return Open(stream.AsEnumerable(), options ?? new ReaderOptions());
|
||||
@@ -98,7 +99,7 @@ namespace SharpCompress.Archives.Rar
|
||||
/// </summary>
|
||||
/// <param name="streams"></param>
|
||||
/// <param name="options"></param>
|
||||
public static RarArchive Open(IEnumerable<Stream> streams, ReaderOptions options = null)
|
||||
public static RarArchive Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
|
||||
{
|
||||
streams.CheckNotNull(nameof(streams));
|
||||
return new RarArchive(streams, options ?? new ReaderOptions());
|
||||
@@ -120,8 +121,8 @@ namespace SharpCompress.Archives.Rar
|
||||
return IsRarFile(stream);
|
||||
}
|
||||
}
|
||||
|
||||
public static bool IsRarFile(Stream stream, ReaderOptions options = null)
|
||||
|
||||
public static bool IsRarFile(Stream stream, ReaderOptions? options = null)
|
||||
{
|
||||
try
|
||||
{
|
||||
|
||||
@@ -6,6 +6,7 @@ using SharpCompress.Common;
|
||||
using SharpCompress.Common.Rar;
|
||||
using SharpCompress.Common.Rar.Headers;
|
||||
using SharpCompress.Compressors.Rar;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives.Rar
|
||||
{
|
||||
@@ -13,11 +14,13 @@ namespace SharpCompress.Archives.Rar
|
||||
{
|
||||
private readonly ICollection<RarFilePart> parts;
|
||||
private readonly RarArchive archive;
|
||||
private readonly ReaderOptions readerOptions;
|
||||
|
||||
internal RarArchiveEntry(RarArchive archive, IEnumerable<RarFilePart> parts)
|
||||
internal RarArchiveEntry(RarArchive archive, IEnumerable<RarFilePart> parts, ReaderOptions readerOptions)
|
||||
{
|
||||
this.parts = parts.ToList();
|
||||
this.archive = archive;
|
||||
this.readerOptions = readerOptions;
|
||||
}
|
||||
|
||||
public override CompressionType CompressionType => CompressionType.Rar;
|
||||
@@ -57,30 +60,26 @@ namespace SharpCompress.Archives.Rar
|
||||
|
||||
public Stream OpenEntryStream()
|
||||
{
|
||||
if (archive.IsSolid)
|
||||
{
|
||||
throw new InvalidOperationException("Use ExtractAllEntries to extract SOLID archives.");
|
||||
}
|
||||
|
||||
if (IsRarV3)
|
||||
{
|
||||
return new RarStream(archive.UnpackV1.Value, FileHeader, new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive));
|
||||
}
|
||||
|
||||
|
||||
return new RarStream(archive.UnpackV2017.Value, FileHeader, new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive));
|
||||
}
|
||||
|
||||
public bool IsComplete
|
||||
{
|
||||
get
|
||||
{
|
||||
return parts.Select(fp => fp.FileHeader).Any(fh => !fh.IsSplitAfter);
|
||||
}
|
||||
public bool IsComplete
|
||||
{
|
||||
get
|
||||
{
|
||||
var headers = parts.Select(x => x.FileHeader);
|
||||
return !headers.First().IsSplitBefore && !headers.Last().IsSplitAfter;
|
||||
}
|
||||
}
|
||||
|
||||
private void CheckIncomplete()
|
||||
{
|
||||
if (!IsComplete)
|
||||
if (!readerOptions.DisableCheckIncomplete && !IsComplete)
|
||||
{
|
||||
throw new IncompleteArchiveException("ArchiveEntry is incomplete and cannot perform this operation.");
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using System.Collections.Generic;
|
||||
using SharpCompress.Common.Rar;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives.Rar
|
||||
{
|
||||
@@ -36,11 +37,12 @@ namespace SharpCompress.Archives.Rar
|
||||
}
|
||||
|
||||
internal static IEnumerable<RarArchiveEntry> GetEntries(RarArchive archive,
|
||||
IEnumerable<RarVolume> rarParts)
|
||||
IEnumerable<RarVolume> rarParts,
|
||||
ReaderOptions readerOptions)
|
||||
{
|
||||
foreach (var groupedParts in GetMatchedFileParts(rarParts))
|
||||
{
|
||||
yield return new RarArchiveEntry(archive, groupedParts);
|
||||
yield return new RarArchiveEntry(archive, groupedParts, readerOptions);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@ namespace SharpCompress.Archives.Rar
|
||||
yield return part;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
internal static IEnumerable<RarVolume> GetParts(FileInfo fileInfo, ReaderOptions options)
|
||||
{
|
||||
FileInfoRarArchiveVolume part = new FileInfoRarArchiveVolume(fileInfo, options);
|
||||
@@ -34,20 +34,20 @@ namespace SharpCompress.Archives.Rar
|
||||
{
|
||||
yield break; //if file isn't volume then there is no reason to look
|
||||
}
|
||||
fileInfo = GetNextFileInfo(ah, part.FileParts.FirstOrDefault() as FileInfoRarFilePart);
|
||||
fileInfo = GetNextFileInfo(ah, part.FileParts.FirstOrDefault() as FileInfoRarFilePart)!;
|
||||
//we use fileinfo because rar is dumb and looks at file names rather than archive info for another volume
|
||||
while (fileInfo != null && fileInfo.Exists)
|
||||
{
|
||||
part = new FileInfoRarArchiveVolume(fileInfo, options);
|
||||
|
||||
fileInfo = GetNextFileInfo(ah, part.FileParts.FirstOrDefault() as FileInfoRarFilePart);
|
||||
fileInfo = GetNextFileInfo(ah, part.FileParts.FirstOrDefault() as FileInfoRarFilePart)!;
|
||||
yield return part;
|
||||
}
|
||||
}
|
||||
|
||||
private static FileInfo GetNextFileInfo(ArchiveHeader ah, FileInfoRarFilePart currentFilePart)
|
||||
private static FileInfo? GetNextFileInfo(ArchiveHeader ah, FileInfoRarFilePart? currentFilePart)
|
||||
{
|
||||
if (currentFilePart == null)
|
||||
if (currentFilePart is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
@@ -68,7 +68,7 @@ namespace SharpCompress.Archives.Rar
|
||||
// .rar, .r00, .r01, ...
|
||||
string extension = currentFileInfo.Extension;
|
||||
|
||||
StringBuilder buffer = new StringBuilder(currentFileInfo.FullName.Length);
|
||||
var buffer = new StringBuilder(currentFileInfo.FullName.Length);
|
||||
buffer.Append(currentFileInfo.FullName.Substring(0,
|
||||
currentFileInfo.FullName.Length - extension.Length));
|
||||
if (string.Compare(extension, ".rar", StringComparison.OrdinalIgnoreCase) == 0)
|
||||
@@ -77,8 +77,7 @@ namespace SharpCompress.Archives.Rar
|
||||
}
|
||||
else
|
||||
{
|
||||
int num = 0;
|
||||
if (int.TryParse(extension.Substring(2, 2), out num))
|
||||
if (int.TryParse(extension.Substring(2, 2), out int num))
|
||||
{
|
||||
num++;
|
||||
buffer.Append(".r");
|
||||
@@ -111,12 +110,11 @@ namespace SharpCompress.Archives.Rar
|
||||
}
|
||||
StringBuilder buffer = new StringBuilder(currentFileInfo.FullName.Length);
|
||||
buffer.Append(currentFileInfo.FullName, 0, startIndex);
|
||||
int num = 0;
|
||||
string numString = currentFileInfo.FullName.Substring(startIndex + 5,
|
||||
currentFileInfo.FullName.IndexOf('.', startIndex + 5) -
|
||||
startIndex - 5);
|
||||
buffer.Append(".part");
|
||||
if (int.TryParse(numString, out num))
|
||||
if (int.TryParse(numString, out int num))
|
||||
{
|
||||
num++;
|
||||
for (int i = 0; i < numString.Length - num.ToString().Length; i++)
|
||||
|
||||
@@ -7,9 +7,9 @@ namespace SharpCompress.Archives.Rar
|
||||
internal class SeekableFilePart : RarFilePart
|
||||
{
|
||||
private readonly Stream stream;
|
||||
private readonly string password;
|
||||
private readonly string? password;
|
||||
|
||||
internal SeekableFilePart(MarkHeader mh, FileHeader fh, Stream stream, string password)
|
||||
internal SeekableFilePart(MarkHeader mh, FileHeader fh, Stream stream, string? password)
|
||||
: base(mh, fh)
|
||||
{
|
||||
this.stream = stream;
|
||||
@@ -21,7 +21,7 @@ namespace SharpCompress.Archives.Rar
|
||||
stream.Position = FileHeader.DataStartPosition;
|
||||
if (FileHeader.R4Salt != null)
|
||||
{
|
||||
return new RarCryptoWrapper(stream, password, FileHeader.R4Salt);
|
||||
return new RarCryptoWrapper(stream, password!, FileHeader.R4Salt);
|
||||
}
|
||||
return stream;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
using System;
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
@@ -108,7 +110,7 @@ namespace SharpCompress.Archives.SevenZip
|
||||
|
||||
private void LoadFactory(Stream stream)
|
||||
{
|
||||
if (database == null)
|
||||
if (database is null)
|
||||
{
|
||||
stream.Position = 0;
|
||||
var reader = new ArchiveReader();
|
||||
@@ -129,12 +131,12 @@ namespace SharpCompress.Archives.SevenZip
|
||||
}
|
||||
}
|
||||
|
||||
private static readonly byte[] SIGNATURE = {(byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C};
|
||||
private static ReadOnlySpan<byte> SIGNATURE => new byte[] { (byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C };
|
||||
|
||||
private static bool SignatureMatch(Stream stream)
|
||||
{
|
||||
BinaryReader reader = new BinaryReader(stream);
|
||||
byte[] signatureBytes = reader.ReadBytes(6);
|
||||
ReadOnlySpan<byte> signatureBytes = reader.ReadBytes(6);
|
||||
return signatureBytes.SequenceEqual(SIGNATURE);
|
||||
}
|
||||
|
||||
@@ -154,7 +156,7 @@ namespace SharpCompress.Archives.SevenZip
|
||||
}
|
||||
}
|
||||
|
||||
private class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
|
||||
private sealed class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
|
||||
{
|
||||
private readonly SevenZipArchive archive;
|
||||
private CFolder currentFolder;
|
||||
@@ -180,7 +182,7 @@ namespace SharpCompress.Archives.SevenZip
|
||||
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
|
||||
{
|
||||
currentFolder = group.Key;
|
||||
if (group.Key == null)
|
||||
if (group.Key is null)
|
||||
{
|
||||
currentStream = Stream.Null;
|
||||
}
|
||||
|
||||
@@ -2,6 +2,9 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Tar;
|
||||
using SharpCompress.Common.Tar.Headers;
|
||||
@@ -20,7 +23,7 @@ namespace SharpCompress.Archives.Tar
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static TarArchive Open(string filePath, ReaderOptions readerOptions = null)
|
||||
public static TarArchive Open(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
filePath.CheckNotNullOrEmpty(nameof(filePath));
|
||||
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
@@ -31,10 +34,11 @@ namespace SharpCompress.Archives.Tar
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static TarArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
|
||||
public static TarArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
fileInfo.CheckNotNull(nameof(fileInfo));
|
||||
return new TarArchive(fileInfo, readerOptions ?? new ReaderOptions());
|
||||
return new TarArchive(fileInfo, readerOptions ?? new ReaderOptions(), cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -42,35 +46,35 @@ namespace SharpCompress.Archives.Tar
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static TarArchive Open(Stream stream, ReaderOptions readerOptions = null)
|
||||
public static TarArchive Open(Stream stream, ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
stream.CheckNotNull(nameof(stream));
|
||||
return new TarArchive(stream, readerOptions ?? new ReaderOptions());
|
||||
return new TarArchive(stream, readerOptions ?? new ReaderOptions(), cancellationToken);
|
||||
}
|
||||
|
||||
public static bool IsTarFile(string filePath)
|
||||
public static ValueTask<bool> IsTarFileAsync(string filePath, CancellationToken cancellationToken = default)
|
||||
{
|
||||
return IsTarFile(new FileInfo(filePath));
|
||||
return IsTarFileAsync(new FileInfo(filePath), cancellationToken);
|
||||
}
|
||||
|
||||
public static bool IsTarFile(FileInfo fileInfo)
|
||||
public static async ValueTask<bool> IsTarFileAsync(FileInfo fileInfo, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
using (Stream stream = fileInfo.OpenRead())
|
||||
{
|
||||
return IsTarFile(stream);
|
||||
}
|
||||
|
||||
await using Stream stream = fileInfo.OpenRead();
|
||||
return await IsTarFileAsync(stream, cancellationToken);
|
||||
}
|
||||
|
||||
public static bool IsTarFile(Stream stream)
|
||||
public static async ValueTask<bool> IsTarFileAsync(Stream stream, CancellationToken cancellationToken = default)
|
||||
{
|
||||
try
|
||||
{
|
||||
TarHeader tarHeader = new TarHeader(new ArchiveEncoding());
|
||||
bool readSucceeded = tarHeader.Read(new BinaryReader(stream));
|
||||
TarHeader tarHeader = new(new ArchiveEncoding());
|
||||
bool readSucceeded = await tarHeader.Read(stream, cancellationToken);
|
||||
bool isEmptyArchive = tarHeader.Name.Length == 0 && tarHeader.Size == 0 && Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
|
||||
return readSucceeded || isEmptyArchive;
|
||||
}
|
||||
@@ -79,20 +83,21 @@ namespace SharpCompress.Archives.Tar
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
internal TarArchive(FileInfo fileInfo, ReaderOptions readerOptions)
|
||||
: base(ArchiveType.Tar, fileInfo, readerOptions)
|
||||
internal TarArchive(FileInfo fileInfo, ReaderOptions readerOptions,
|
||||
CancellationToken cancellationToken)
|
||||
: base(ArchiveType.Tar, fileInfo, readerOptions, cancellationToken)
|
||||
{
|
||||
}
|
||||
|
||||
protected override IEnumerable<TarVolume> LoadVolumes(FileInfo file)
|
||||
protected override IAsyncEnumerable<TarVolume> LoadVolumes(FileInfo file, CancellationToken cancellationToken)
|
||||
{
|
||||
return new TarVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
|
||||
return new TarVolume(file.OpenRead(), ReaderOptions).AsAsyncEnumerable();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -100,8 +105,9 @@ namespace SharpCompress.Archives.Tar
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
internal TarArchive(Stream stream, ReaderOptions readerOptions)
|
||||
: base(ArchiveType.Tar, stream, readerOptions)
|
||||
internal TarArchive(Stream stream, ReaderOptions readerOptions,
|
||||
CancellationToken cancellationToken)
|
||||
: base(ArchiveType.Tar, stream, readerOptions, cancellationToken)
|
||||
{
|
||||
}
|
||||
|
||||
@@ -110,16 +116,18 @@ namespace SharpCompress.Archives.Tar
|
||||
{
|
||||
}
|
||||
|
||||
protected override IEnumerable<TarVolume> LoadVolumes(IEnumerable<Stream> streams)
|
||||
protected override async IAsyncEnumerable<TarVolume> LoadVolumes(IAsyncEnumerable<Stream> streams,
|
||||
[EnumeratorCancellation]CancellationToken cancellationToken)
|
||||
{
|
||||
return new TarVolume(streams.First(), ReaderOptions).AsEnumerable();
|
||||
yield return new TarVolume(await streams.FirstAsync(cancellationToken: cancellationToken), ReaderOptions);
|
||||
}
|
||||
|
||||
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
|
||||
protected override async IAsyncEnumerable<TarArchiveEntry> LoadEntries(IAsyncEnumerable<TarVolume> volumes,
|
||||
[EnumeratorCancellation]CancellationToken cancellationToken)
|
||||
{
|
||||
Stream stream = volumes.Single().Stream;
|
||||
TarHeader previousHeader = null;
|
||||
foreach (TarHeader header in TarHeaderFactory.ReadHeader(StreamingMode.Seekable, stream, ReaderOptions.ArchiveEncoding))
|
||||
Stream stream = (await volumes.SingleAsync(cancellationToken: cancellationToken)).Stream;
|
||||
TarHeader? previousHeader = null;
|
||||
await foreach (TarHeader? header in TarHeaderFactory.ReadHeader(StreamingMode.Seekable, stream, ReaderOptions.ArchiveEncoding, cancellationToken))
|
||||
{
|
||||
if (header != null)
|
||||
{
|
||||
@@ -136,11 +144,11 @@ namespace SharpCompress.Archives.Tar
|
||||
|
||||
var oldStreamPos = stream.Position;
|
||||
|
||||
using (var entryStream = entry.OpenEntryStream())
|
||||
await using (var entryStream = await entry.OpenEntryStreamAsync(cancellationToken))
|
||||
{
|
||||
using (var memoryStream = new MemoryStream())
|
||||
await using (var memoryStream = new MemoryStream())
|
||||
{
|
||||
entryStream.TransferTo(memoryStream);
|
||||
await entryStream.TransferToAsync(memoryStream, cancellationToken);
|
||||
memoryStream.Position = 0;
|
||||
var bytes = memoryStream.ToArray();
|
||||
|
||||
@@ -160,38 +168,37 @@ namespace SharpCompress.Archives.Tar
|
||||
|
||||
public static TarArchive Create()
|
||||
{
|
||||
return new TarArchive();
|
||||
return new();
|
||||
}
|
||||
|
||||
protected override TarArchiveEntry CreateEntryInternal(string filePath, Stream source,
|
||||
long size, DateTime? modified, bool closeStream)
|
||||
protected override ValueTask<TarArchiveEntry> CreateEntryInternal(string filePath, Stream source,
|
||||
long size, DateTime? modified, bool closeStream,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
return new TarWritableArchiveEntry(this, source, CompressionType.Unknown, filePath, size, modified,
|
||||
closeStream);
|
||||
return new (new TarWritableArchiveEntry(this, source, CompressionType.Unknown, filePath, size, modified,
|
||||
closeStream));
|
||||
}
|
||||
|
||||
protected override void SaveTo(Stream stream, WriterOptions options,
|
||||
IEnumerable<TarArchiveEntry> oldEntries,
|
||||
IEnumerable<TarArchiveEntry> newEntries)
|
||||
protected override async ValueTask SaveToAsync(Stream stream, WriterOptions options,
|
||||
IAsyncEnumerable<TarArchiveEntry> oldEntries,
|
||||
IAsyncEnumerable<TarArchiveEntry> newEntries,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
using (var writer = new TarWriter(stream, new TarWriterOptions(options)))
|
||||
await using var writer = await TarWriter.CreateAsync(stream, new TarWriterOptions(options), cancellationToken);
|
||||
await foreach (var entry in oldEntries.Concat(newEntries)
|
||||
.Where(x => !x.IsDirectory)
|
||||
.WithCancellation(cancellationToken))
|
||||
{
|
||||
foreach (var entry in oldEntries.Concat(newEntries)
|
||||
.Where(x => !x.IsDirectory))
|
||||
{
|
||||
using (var entryStream = entry.OpenEntryStream())
|
||||
{
|
||||
writer.Write(entry.Key, entryStream, entry.LastModifiedTime, entry.Size);
|
||||
}
|
||||
}
|
||||
await using var entryStream = await entry.OpenEntryStreamAsync(cancellationToken);
|
||||
await writer.WriteAsync(entry.Key, entryStream, entry.LastModifiedTime, entry.Size, cancellationToken);
|
||||
}
|
||||
}
|
||||
|
||||
protected override IReader CreateReaderForSolidExtraction()
|
||||
protected override async ValueTask<IReader> CreateReaderForSolidExtraction()
|
||||
{
|
||||
var stream = Volumes.Single().Stream;
|
||||
var stream = (await Volumes.SingleAsync()).Stream;
|
||||
stream.Position = 0;
|
||||
return TarReader.Open(stream);
|
||||
return await TarReader.OpenAsync(stream);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Tar;
|
||||
|
||||
@@ -13,9 +15,9 @@ namespace SharpCompress.Archives.Tar
|
||||
Archive = archive;
|
||||
}
|
||||
|
||||
public virtual Stream OpenEntryStream()
|
||||
public virtual async ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
return Parts.Single().GetCompressedStream();
|
||||
return await Parts.Single().GetCompressedStreamAsync(cancellationToken);
|
||||
}
|
||||
|
||||
#region IArchiveEntry Members
|
||||
|
||||
@@ -1,12 +1,16 @@
|
||||
using System;
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Archives.Tar
|
||||
{
|
||||
internal class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiveEntry
|
||||
internal sealed class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiveEntry
|
||||
{
|
||||
private readonly bool closeStream;
|
||||
private readonly Stream stream;
|
||||
@@ -47,18 +51,18 @@ namespace SharpCompress.Archives.Tar
|
||||
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
|
||||
Stream IWritableArchiveEntry.Stream => stream;
|
||||
|
||||
public override Stream OpenEntryStream()
|
||||
public override ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
//ensure new stream is at the start, this could be reset
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
return new NonDisposingStream(stream);
|
||||
return new(new NonDisposingStream(stream));
|
||||
}
|
||||
|
||||
internal override void Close()
|
||||
internal override async ValueTask CloseAsync()
|
||||
{
|
||||
if (closeStream)
|
||||
{
|
||||
stream.Dispose();
|
||||
await stream.DisposeAsync();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,10 +2,14 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Zip;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
using SharpCompress.Compressors.Deflate;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
using SharpCompress.Readers.Zip;
|
||||
using SharpCompress.Writers;
|
||||
@@ -15,20 +19,22 @@ namespace SharpCompress.Archives.Zip
|
||||
{
|
||||
public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
{
|
||||
#nullable disable
|
||||
private readonly SeekableZipHeaderFactory headerFactory;
|
||||
#nullable enable
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the compression level applied to files added to the archive,
|
||||
/// if the compression method is set to deflate
|
||||
/// </summary>
|
||||
public CompressionLevel DeflateCompressionLevel { get; set; }
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Constructor expects a filepath to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static ZipArchive Open(string filePath, ReaderOptions readerOptions = null)
|
||||
public static ZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
filePath.CheckNotNullOrEmpty(nameof(filePath));
|
||||
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
@@ -39,10 +45,11 @@ namespace SharpCompress.Archives.Zip
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
|
||||
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
fileInfo.CheckNotNull(nameof(fileInfo));
|
||||
return new ZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
|
||||
return new ZipArchive(fileInfo, readerOptions ?? new ReaderOptions(), cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -50,37 +57,46 @@ namespace SharpCompress.Archives.Zip
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static ZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
|
||||
public static ZipArchive Open(Stream stream, ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
stream.CheckNotNull(nameof(stream));
|
||||
return new ZipArchive(stream, readerOptions ?? new ReaderOptions());
|
||||
}
|
||||
|
||||
public static bool IsZipFile(string filePath, string password = null)
|
||||
{
|
||||
return IsZipFile(new FileInfo(filePath), password);
|
||||
return new ZipArchive(stream, readerOptions ?? new ReaderOptions(), cancellationToken);
|
||||
}
|
||||
|
||||
public static bool IsZipFile(FileInfo fileInfo, string password = null)
|
||||
public static ValueTask<bool> IsZipFile(string filePath, string? password = null)
|
||||
{
|
||||
return IsZipFileAsync(new FileInfo(filePath), password);
|
||||
}
|
||||
|
||||
public static async ValueTask<bool> IsZipFileAsync(FileInfo fileInfo, string? password = null)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
using (Stream stream = fileInfo.OpenRead())
|
||||
{
|
||||
return IsZipFile(stream, password);
|
||||
}
|
||||
|
||||
await using Stream stream = fileInfo.OpenRead();
|
||||
return await IsZipFileAsync(stream, password);
|
||||
}
|
||||
|
||||
public static bool IsZipFile(Stream stream, string password = null)
|
||||
public static async ValueTask<bool> IsZipFileAsync(Stream stream, string? password = null, CancellationToken cancellationToken = default)
|
||||
{
|
||||
StreamingZipHeaderFactory headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding());
|
||||
StreamingZipHeaderFactory headerFactory = new(password, new ArchiveEncoding());
|
||||
try
|
||||
{
|
||||
ZipHeader header =
|
||||
headerFactory.ReadStreamHeader(stream).FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
|
||||
if (header == null)
|
||||
RewindableStream rewindableStream;
|
||||
if (stream is RewindableStream rs)
|
||||
{
|
||||
rewindableStream = rs;
|
||||
}
|
||||
else
|
||||
{
|
||||
rewindableStream = new RewindableStream(stream);
|
||||
}
|
||||
ZipHeader? header = await headerFactory.ReadStreamHeader(rewindableStream, cancellationToken)
|
||||
.FirstOrDefaultAsync(x => x.ZipHeaderType != ZipHeaderType.Split, cancellationToken: cancellationToken);
|
||||
if (header is null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
@@ -95,21 +111,23 @@ namespace SharpCompress.Archives.Zip
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
internal ZipArchive(FileInfo fileInfo, ReaderOptions readerOptions)
|
||||
: base(ArchiveType.Zip, fileInfo, readerOptions)
|
||||
internal ZipArchive(FileInfo fileInfo, ReaderOptions readerOptions,
|
||||
CancellationToken cancellationToken)
|
||||
: base(ArchiveType.Zip, fileInfo, readerOptions, cancellationToken)
|
||||
{
|
||||
headerFactory = new SeekableZipHeaderFactory(readerOptions.Password, readerOptions.ArchiveEncoding);
|
||||
}
|
||||
|
||||
protected override IEnumerable<ZipVolume> LoadVolumes(FileInfo file)
|
||||
protected override IAsyncEnumerable<ZipVolume> LoadVolumes(FileInfo file,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
return new ZipVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
|
||||
return new ZipVolume(file.OpenRead(), ReaderOptions).AsAsyncEnumerable();
|
||||
}
|
||||
|
||||
internal ZipArchive()
|
||||
@@ -122,82 +140,86 @@ namespace SharpCompress.Archives.Zip
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
internal ZipArchive(Stream stream, ReaderOptions readerOptions)
|
||||
: base(ArchiveType.Zip, stream, readerOptions)
|
||||
internal ZipArchive(Stream stream, ReaderOptions readerOptions,
|
||||
CancellationToken cancellationToken)
|
||||
: base(ArchiveType.Zip, stream, readerOptions, cancellationToken)
|
||||
{
|
||||
headerFactory = new SeekableZipHeaderFactory(readerOptions.Password, readerOptions.ArchiveEncoding);
|
||||
}
|
||||
|
||||
protected override IEnumerable<ZipVolume> LoadVolumes(IEnumerable<Stream> streams)
|
||||
protected override async IAsyncEnumerable<ZipVolume> LoadVolumes(IAsyncEnumerable<Stream> streams,
|
||||
[EnumeratorCancellation]CancellationToken cancellationToken)
|
||||
{
|
||||
return new ZipVolume(streams.First(), ReaderOptions).AsEnumerable();
|
||||
yield return new ZipVolume(await streams.FirstAsync(cancellationToken: cancellationToken), ReaderOptions);
|
||||
}
|
||||
|
||||
protected override IEnumerable<ZipArchiveEntry> LoadEntries(IEnumerable<ZipVolume> volumes)
|
||||
protected override async IAsyncEnumerable<ZipArchiveEntry> LoadEntries(IAsyncEnumerable<ZipVolume> volumes,
|
||||
[EnumeratorCancellation]CancellationToken cancellationToken)
|
||||
{
|
||||
var volume = volumes.Single();
|
||||
await Task.CompletedTask;
|
||||
var volume = await volumes.SingleAsync(cancellationToken: cancellationToken);
|
||||
Stream stream = volume.Stream;
|
||||
foreach (ZipHeader h in headerFactory.ReadSeekableHeader(stream))
|
||||
await foreach (ZipHeader h in headerFactory.ReadSeekableHeader(stream, cancellationToken))
|
||||
{
|
||||
if (h != null)
|
||||
{
|
||||
switch (h.ZipHeaderType)
|
||||
{
|
||||
case ZipHeaderType.DirectoryEntry:
|
||||
{
|
||||
yield return new ZipArchiveEntry(this,
|
||||
new SeekableZipFilePart(headerFactory,
|
||||
h as DirectoryEntryHeader,
|
||||
stream));
|
||||
}
|
||||
{
|
||||
yield return new ZipArchiveEntry(this,
|
||||
new SeekableZipFilePart(headerFactory,
|
||||
(DirectoryEntryHeader)h,
|
||||
stream));
|
||||
}
|
||||
break;
|
||||
case ZipHeaderType.DirectoryEnd:
|
||||
{
|
||||
byte[] bytes = (h as DirectoryEndHeader).Comment;
|
||||
volume.Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
|
||||
yield break;
|
||||
}
|
||||
{
|
||||
byte[] bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
|
||||
volume.Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
|
||||
yield break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void SaveTo(Stream stream)
|
||||
public ValueTask SaveToAsync(Stream stream, CancellationToken cancellationToken = default)
|
||||
{
|
||||
SaveTo(stream, new WriterOptions(CompressionType.Deflate));
|
||||
return SaveToAsync(stream, new WriterOptions(CompressionType.Deflate), cancellationToken);
|
||||
}
|
||||
|
||||
protected override void SaveTo(Stream stream, WriterOptions options,
|
||||
IEnumerable<ZipArchiveEntry> oldEntries,
|
||||
IEnumerable<ZipArchiveEntry> newEntries)
|
||||
protected override async ValueTask SaveToAsync(Stream stream, WriterOptions options,
|
||||
IAsyncEnumerable<ZipArchiveEntry> oldEntries,
|
||||
IAsyncEnumerable<ZipArchiveEntry> newEntries,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
using (var writer = new ZipWriter(stream, new ZipWriterOptions(options)))
|
||||
await using var writer = new ZipWriter(stream, new ZipWriterOptions(options));
|
||||
await foreach (var entry in oldEntries.Concat(newEntries)
|
||||
.Where(x => !x.IsDirectory)
|
||||
.WithCancellation(cancellationToken))
|
||||
{
|
||||
foreach (var entry in oldEntries.Concat(newEntries)
|
||||
.Where(x => !x.IsDirectory))
|
||||
await using (var entryStream = await entry.OpenEntryStreamAsync(cancellationToken))
|
||||
{
|
||||
using (var entryStream = entry.OpenEntryStream())
|
||||
{
|
||||
writer.Write(entry.Key, entryStream, entry.LastModifiedTime);
|
||||
}
|
||||
await writer.WriteAsync(entry.Key, entryStream, entry.LastModifiedTime, cancellationToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected override ZipArchiveEntry CreateEntryInternal(string filePath, Stream source, long size, DateTime? modified,
|
||||
bool closeStream)
|
||||
protected override ValueTask<ZipArchiveEntry> CreateEntryInternal(string filePath, Stream source, long size, DateTime? modified,
|
||||
bool closeStream, CancellationToken cancellationToken = default)
|
||||
{
|
||||
return new ZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
|
||||
return new(new ZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream));
|
||||
}
|
||||
|
||||
public static ZipArchive Create()
|
||||
{
|
||||
return new ZipArchive();
|
||||
return new();
|
||||
}
|
||||
|
||||
protected override IReader CreateReaderForSolidExtraction()
|
||||
protected override async ValueTask<IReader> CreateReaderForSolidExtraction()
|
||||
{
|
||||
var stream = Volumes.Single().Stream;
|
||||
var stream = (await Volumes.SingleAsync()).Stream;
|
||||
stream.Position = 0;
|
||||
return ZipReader.Open(stream, ReaderOptions);
|
||||
}
|
||||
|
||||
@@ -1,20 +1,22 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Zip;
|
||||
|
||||
namespace SharpCompress.Archives.Zip
|
||||
{
|
||||
public class ZipArchiveEntry : ZipEntry, IArchiveEntry
|
||||
{
|
||||
internal ZipArchiveEntry(ZipArchive archive, SeekableZipFilePart part)
|
||||
internal ZipArchiveEntry(ZipArchive archive, SeekableZipFilePart? part)
|
||||
: base(part)
|
||||
{
|
||||
Archive = archive;
|
||||
}
|
||||
|
||||
public virtual Stream OpenEntryStream()
|
||||
public virtual ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
return Parts.Single().GetCompressedStream();
|
||||
return Parts.Single().GetCompressedStreamAsync(cancellationToken);
|
||||
}
|
||||
|
||||
#region IArchiveEntry Members
|
||||
@@ -25,6 +27,6 @@ namespace SharpCompress.Archives.Zip
|
||||
|
||||
#endregion
|
||||
|
||||
public string Comment => (Parts.Single() as SeekableZipFilePart).Comment;
|
||||
public string? Comment => ((SeekableZipFilePart)Parts.Single()).Comment;
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,8 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.IO;
|
||||
|
||||
@@ -49,18 +51,18 @@ namespace SharpCompress.Archives.Zip
|
||||
|
||||
Stream IWritableArchiveEntry.Stream => stream;
|
||||
|
||||
public override Stream OpenEntryStream()
|
||||
public override ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
//ensure new stream is at the start, this could be reset
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
return new NonDisposingStream(stream);
|
||||
return new(new NonDisposingStream(stream));
|
||||
}
|
||||
|
||||
internal override void Close()
|
||||
internal override async ValueTask CloseAsync()
|
||||
{
|
||||
if (closeStream && !isDisposed)
|
||||
{
|
||||
stream.Dispose();
|
||||
await stream.DisposeAsync();
|
||||
isDisposed = true;
|
||||
}
|
||||
}
|
||||
|
||||
25
src/SharpCompress/AsyncEnumerable.cs
Normal file
25
src/SharpCompress/AsyncEnumerable.cs
Normal file
@@ -0,0 +1,25 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress
|
||||
{
|
||||
public static class AsyncEnumerable
|
||||
{
|
||||
public static IAsyncEnumerable<T> Empty<T>() => EmptyAsyncEnumerable<T>.Instance;
|
||||
|
||||
|
||||
private class EmptyAsyncEnumerable<T> : IAsyncEnumerator<T>, IAsyncEnumerable<T>
|
||||
{
|
||||
public static readonly EmptyAsyncEnumerable<T> Instance =
|
||||
new();
|
||||
public T Current => default!;
|
||||
public ValueTask DisposeAsync() => default;
|
||||
public ValueTask<bool> MoveNextAsync() => new(false);
|
||||
public IAsyncEnumerator<T> GetAsyncEnumerator(CancellationToken cancellationToken = new CancellationToken())
|
||||
{
|
||||
return this;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -18,24 +18,30 @@ namespace SharpCompress.Common
|
||||
/// <summary>
|
||||
/// Set this encoding when you want to force it for all encoding operations.
|
||||
/// </summary>
|
||||
public Encoding Forced { get; set; }
|
||||
public Encoding? Forced { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Set this when you want to use a custom method for all decoding operations.
|
||||
/// </summary>
|
||||
/// <returns>string Func(bytes, index, length)</returns>
|
||||
public Func<byte[], int, int, string> CustomDecoder { get; set; }
|
||||
//public Func<byte[], int, int, string>? CustomDecoder { get; set; }
|
||||
|
||||
public ArchiveEncoding()
|
||||
: this(Encoding.Default, Encoding.Default)
|
||||
{
|
||||
Default = Encoding.GetEncoding(437);
|
||||
Password = Encoding.GetEncoding(437);
|
||||
}
|
||||
public ArchiveEncoding(Encoding def, Encoding password)
|
||||
{
|
||||
Default = def;
|
||||
Password = password;
|
||||
}
|
||||
|
||||
#if !NET461
|
||||
static ArchiveEncoding()
|
||||
{
|
||||
Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
|
||||
}
|
||||
#endif
|
||||
|
||||
public string Decode(byte[] bytes)
|
||||
{
|
||||
@@ -44,7 +50,12 @@ namespace SharpCompress.Common
|
||||
|
||||
public string Decode(byte[] bytes, int start, int length)
|
||||
{
|
||||
return GetDecoder().Invoke(bytes, start, length);
|
||||
return GetEncoding().GetString(bytes, start, length);
|
||||
}
|
||||
|
||||
public string Decode(ReadOnlySpan<byte> span)
|
||||
{
|
||||
return GetEncoding().GetString(span);
|
||||
}
|
||||
|
||||
public string DecodeUTF8(byte[] bytes)
|
||||
@@ -61,10 +72,5 @@ namespace SharpCompress.Common
|
||||
{
|
||||
return Forced ?? Default ?? Encoding.UTF8;
|
||||
}
|
||||
|
||||
public Func<byte[], int, int, string> GetDecoder()
|
||||
{
|
||||
return CustomDecoder ?? ((bytes, index, count) => GetEncoding().GetString(bytes, index, count));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,16 +2,22 @@
|
||||
|
||||
namespace SharpCompress.Common
|
||||
{
|
||||
public class CompressedBytesReadEventArgs : EventArgs
|
||||
public sealed class CompressedBytesReadEventArgs : EventArgs
|
||||
{
|
||||
public CompressedBytesReadEventArgs(long compressedBytesRead, long currentFilePartCompressedBytesRead)
|
||||
{
|
||||
CompressedBytesRead = compressedBytesRead;
|
||||
CurrentFilePartCompressedBytesRead = currentFilePartCompressedBytesRead;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compressed bytes read for the current entry
|
||||
/// </summary>
|
||||
public long CompressedBytesRead { get; internal set; }
|
||||
public long CompressedBytesRead { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Current file part read for Multipart files (e.g. Rar)
|
||||
/// </summary>
|
||||
public long CurrentFilePartCompressedBytesRead { get; internal set; }
|
||||
public long CurrentFilePartCompressedBytesRead { get; }
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common
|
||||
{
|
||||
@@ -18,7 +19,7 @@ namespace SharpCompress.Common
|
||||
/// <summary>
|
||||
/// The target of a symlink entry internal to the Archive. Will be null if not a symlink.
|
||||
/// </summary>
|
||||
public abstract string LinkTarget { get; }
|
||||
public abstract string? LinkTarget { get; }
|
||||
|
||||
/// <summary>
|
||||
/// The compressed file size
|
||||
@@ -71,16 +72,15 @@ namespace SharpCompress.Common
|
||||
public abstract bool IsSplitAfter { get; }
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override string ToString()
|
||||
{
|
||||
return Key;
|
||||
}
|
||||
public override string ToString() => Key;
|
||||
|
||||
internal abstract IEnumerable<FilePart> Parts { get; }
|
||||
|
||||
internal bool IsSolid { get; set; }
|
||||
|
||||
internal virtual void Close()
|
||||
internal virtual ValueTask CloseAsync()
|
||||
{
|
||||
return new ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Common
|
||||
{
|
||||
public class EntryStream : Stream
|
||||
public class EntryStream : AsyncStream
|
||||
{
|
||||
private readonly IReader _reader;
|
||||
private readonly Stream _stream;
|
||||
@@ -20,25 +23,24 @@ namespace SharpCompress.Common
|
||||
/// <summary>
|
||||
/// When reading a stream from OpenEntryStream, the stream must be completed so use this to finish reading the entire entry.
|
||||
/// </summary>
|
||||
public void SkipEntry()
|
||||
public async ValueTask SkipEntryAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
this.Skip();
|
||||
await this.SkipAsync(cancellationToken);
|
||||
_completed = true;
|
||||
}
|
||||
|
||||
protected override void Dispose(bool disposing)
|
||||
public override async ValueTask DisposeAsync()
|
||||
{
|
||||
if (!(_completed || _reader.Cancelled))
|
||||
{
|
||||
SkipEntry();
|
||||
await SkipEntryAsync();
|
||||
}
|
||||
if (_isDisposed)
|
||||
{
|
||||
return;
|
||||
}
|
||||
_isDisposed = true;
|
||||
base.Dispose(disposing);
|
||||
_stream.Dispose();
|
||||
await _stream.DisposeAsync();
|
||||
}
|
||||
|
||||
public override bool CanRead => true;
|
||||
@@ -46,17 +48,13 @@ namespace SharpCompress.Common
|
||||
public override bool CanSeek => false;
|
||||
|
||||
public override bool CanWrite => false;
|
||||
|
||||
public override void Flush() {
|
||||
}
|
||||
|
||||
public override long Length => _stream.Length;
|
||||
|
||||
public override long Position { get => throw new NotSupportedException(); set => throw new NotSupportedException(); }
|
||||
|
||||
public override int Read(byte[] buffer, int offset, int count)
|
||||
public override async ValueTask<int> ReadAsync(Memory<byte> buffer, CancellationToken cancellationToken = default)
|
||||
{
|
||||
int read = _stream.Read(buffer, offset, count);
|
||||
int read = await _stream.ReadAsync(buffer, cancellationToken);
|
||||
if (read <= 0)
|
||||
{
|
||||
_completed = true;
|
||||
@@ -64,14 +62,14 @@ namespace SharpCompress.Common
|
||||
return read;
|
||||
}
|
||||
|
||||
public override int ReadByte()
|
||||
public override ValueTask WriteAsync(ReadOnlyMemory<byte> buffer, CancellationToken cancellationToken = default)
|
||||
{
|
||||
int value = _stream.ReadByte();
|
||||
if (value == -1)
|
||||
{
|
||||
_completed = true;
|
||||
}
|
||||
return value;
|
||||
throw new NotSupportedException();
|
||||
}
|
||||
|
||||
public override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
|
||||
{
|
||||
throw new NotSupportedException();
|
||||
}
|
||||
|
||||
public override long Seek(long offset, SeekOrigin origin)
|
||||
@@ -83,10 +81,5 @@ namespace SharpCompress.Common
|
||||
{
|
||||
throw new NotSupportedException();
|
||||
}
|
||||
|
||||
public override void Write(byte[] buffer, int offset, int count)
|
||||
{
|
||||
throw new NotSupportedException();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,7 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common
|
||||
{
|
||||
@@ -8,28 +10,29 @@ namespace SharpCompress.Common
|
||||
/// <summary>
|
||||
/// Extract to specific directory, retaining filename
|
||||
/// </summary>
|
||||
public static void WriteEntryToDirectory(IEntry entry, string destinationDirectory,
|
||||
ExtractionOptions options, Action<string, ExtractionOptions> write)
|
||||
public static async ValueTask WriteEntryToDirectoryAsync(IEntry entry,
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options,
|
||||
Func<string, ExtractionOptions?, CancellationToken, ValueTask> write,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
string destinationFileName;
|
||||
string file = Path.GetFileName(entry.Key);
|
||||
string fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
|
||||
|
||||
options = options ?? new ExtractionOptions()
|
||||
{
|
||||
Overwrite = true
|
||||
};
|
||||
options ??= new ExtractionOptions()
|
||||
{
|
||||
Overwrite = true
|
||||
};
|
||||
|
||||
if (options.ExtractFullPath)
|
||||
{
|
||||
string folder = Path.GetDirectoryName(entry.Key);
|
||||
string destdir = Path.GetFullPath(
|
||||
Path.Combine(fullDestinationDirectoryPath, folder)
|
||||
);
|
||||
string folder = Path.GetDirectoryName(entry.Key)!;
|
||||
string destdir = Path.GetFullPath(Path.Combine(fullDestinationDirectoryPath, folder));
|
||||
|
||||
if (!Directory.Exists(destdir))
|
||||
{
|
||||
if (!destdir.StartsWith(fullDestinationDirectoryPath))
|
||||
if (!destdir.StartsWith(fullDestinationDirectoryPath, StringComparison.Ordinal))
|
||||
{
|
||||
throw new ExtractionException("Entry is trying to create a directory outside of the destination directory.");
|
||||
}
|
||||
@@ -39,7 +42,7 @@ namespace SharpCompress.Common
|
||||
destinationFileName = Path.Combine(destdir, file);
|
||||
}
|
||||
else
|
||||
{
|
||||
{
|
||||
destinationFileName = Path.Combine(fullDestinationDirectoryPath, file);
|
||||
|
||||
}
|
||||
@@ -48,25 +51,26 @@ namespace SharpCompress.Common
|
||||
{
|
||||
destinationFileName = Path.GetFullPath(destinationFileName);
|
||||
|
||||
if (!destinationFileName.StartsWith(fullDestinationDirectoryPath))
|
||||
if (!destinationFileName.StartsWith(fullDestinationDirectoryPath, StringComparison.Ordinal))
|
||||
{
|
||||
throw new ExtractionException("Entry is trying to write a file outside of the destination directory.");
|
||||
}
|
||||
write(destinationFileName, options);
|
||||
await write(destinationFileName, options, cancellationToken);
|
||||
}
|
||||
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
|
||||
{
|
||||
Directory.CreateDirectory(destinationFileName);
|
||||
}
|
||||
}
|
||||
|
||||
public static void WriteEntryToFile(IEntry entry, string destinationFileName,
|
||||
ExtractionOptions options,
|
||||
Action<string, FileMode> openAndWrite)
|
||||
|
||||
public static async ValueTask WriteEntryToFileAsync(IEntry entry, string destinationFileName,
|
||||
ExtractionOptions? options,
|
||||
Func<string, FileMode, CancellationToken, ValueTask> openAndWrite,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (entry.LinkTarget != null)
|
||||
if (entry.LinkTarget is not null)
|
||||
{
|
||||
if (null == options.WriteSymbolicLink)
|
||||
if (options?.WriteSymbolicLink is null)
|
||||
{
|
||||
throw new ExtractionException("Entry is a symbolic link but ExtractionOptions.WriteSymbolicLink delegate is null");
|
||||
}
|
||||
@@ -75,17 +79,17 @@ namespace SharpCompress.Common
|
||||
else
|
||||
{
|
||||
FileMode fm = FileMode.Create;
|
||||
options = options ?? new ExtractionOptions()
|
||||
{
|
||||
Overwrite = true
|
||||
};
|
||||
options ??= new ExtractionOptions()
|
||||
{
|
||||
Overwrite = true
|
||||
};
|
||||
|
||||
if (!options.Overwrite)
|
||||
{
|
||||
fm = FileMode.CreateNew;
|
||||
}
|
||||
|
||||
openAndWrite(destinationFileName, fm);
|
||||
await openAndWrite(destinationFileName, fm, cancellationToken);
|
||||
entry.PreserveExtractionOptions(destinationFileName, options);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
namespace SharpCompress.Common
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Common
|
||||
{
|
||||
public class ExtractionOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// overwrite target if it exists
|
||||
/// </summary>
|
||||
public bool Overwrite {get; set; }
|
||||
public bool Overwrite { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// extract with internal directory structure
|
||||
@@ -29,6 +31,10 @@
|
||||
/// </summary>
|
||||
public delegate void SymbolicLinkWriterDelegate(string sourcePath, string targetPath);
|
||||
|
||||
public SymbolicLinkWriterDelegate WriteSymbolicLink;
|
||||
public SymbolicLinkWriterDelegate WriteSymbolicLink =
|
||||
(sourcePath, targetPath) =>
|
||||
{
|
||||
Console.WriteLine($"Could not write symlink {sourcePath} -> {targetPath}, for more information please see https://github.com/dotnet/runtime/issues/24271");
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,6 @@
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common
|
||||
{
|
||||
@@ -11,10 +13,10 @@ namespace SharpCompress.Common
|
||||
|
||||
internal ArchiveEncoding ArchiveEncoding { get; }
|
||||
|
||||
internal abstract string FilePartName { get; }
|
||||
internal abstract string? FilePartName { get; }
|
||||
|
||||
internal abstract Stream GetCompressedStream();
|
||||
internal abstract Stream GetRawStream();
|
||||
internal abstract ValueTask<Stream> GetCompressedStreamAsync(CancellationToken cancellationToken);
|
||||
internal abstract Stream? GetRawStream();
|
||||
internal bool Skipped { get; set; }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,21 +2,28 @@
|
||||
|
||||
namespace SharpCompress.Common
|
||||
{
|
||||
public class FilePartExtractionBeginEventArgs : EventArgs
|
||||
public sealed class FilePartExtractionBeginEventArgs : EventArgs
|
||||
{
|
||||
public FilePartExtractionBeginEventArgs(string name, long size, long compressedSize)
|
||||
{
|
||||
Name = name;
|
||||
Size = size;
|
||||
CompressedSize = compressedSize;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// File name for the part for the current entry
|
||||
/// </summary>
|
||||
public string Name { get; internal set; }
|
||||
public string Name { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Uncompressed size of the current entry in the part
|
||||
/// </summary>
|
||||
public long Size { get; internal set; }
|
||||
public long Size { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Compressed size of the current entry in the part
|
||||
/// </summary>
|
||||
public long CompressedSize { get; internal set; }
|
||||
public long CompressedSize { get; }
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,8 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Threading;
|
||||
|
||||
namespace SharpCompress.Common.GZip
|
||||
{
|
||||
@@ -16,15 +17,15 @@ namespace SharpCompress.Common.GZip
|
||||
|
||||
public override CompressionType CompressionType => CompressionType.GZip;
|
||||
|
||||
public override long Crc => 0;
|
||||
public override long Crc => _filePart.Crc ?? 0;
|
||||
|
||||
public override string Key => _filePart.FilePartName;
|
||||
public override string Key => _filePart.FilePartName ?? string.Empty;
|
||||
|
||||
public override string LinkTarget => null;
|
||||
public override string? LinkTarget => null;
|
||||
|
||||
public override long CompressedSize => 0;
|
||||
|
||||
public override long Size => 0;
|
||||
public override long Size => _filePart.UncompressedSize ?? 0;
|
||||
|
||||
public override DateTime? LastModifiedTime => _filePart.DateModified;
|
||||
|
||||
@@ -42,9 +43,12 @@ namespace SharpCompress.Common.GZip
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => _filePart.AsEnumerable<FilePart>();
|
||||
|
||||
internal static IEnumerable<GZipEntry> GetEntries(Stream stream, OptionsBase options)
|
||||
internal static async IAsyncEnumerable<GZipEntry> GetEntries(Stream stream, OptionsBase options,
|
||||
[EnumeratorCancellation] CancellationToken cancellationToken)
|
||||
{
|
||||
yield return new GZipEntry(new GZipFilePart(stream, options.ArchiveEncoding));
|
||||
var part = new GZipFilePart(options.ArchiveEncoding);
|
||||
await part.Initialize(stream, cancellationToken);
|
||||
yield return new GZipEntry(part);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,35 +1,57 @@
|
||||
using System;
|
||||
using System.Buffers;
|
||||
using System.Buffers.Binary;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Tar.Headers;
|
||||
using SharpCompress.Compressors;
|
||||
using SharpCompress.Compressors.Deflate;
|
||||
|
||||
namespace SharpCompress.Common.GZip
|
||||
{
|
||||
internal class GZipFilePart : FilePart
|
||||
internal sealed class GZipFilePart : FilePart
|
||||
{
|
||||
private string _name;
|
||||
private readonly Stream _stream;
|
||||
private string? _name;
|
||||
//init only
|
||||
#nullable disable
|
||||
private Stream _stream;
|
||||
#nullable enable
|
||||
|
||||
internal GZipFilePart(Stream stream, ArchiveEncoding archiveEncoding)
|
||||
: base(archiveEncoding)
|
||||
internal GZipFilePart(ArchiveEncoding archiveEncoding)
|
||||
: base(archiveEncoding)
|
||||
{
|
||||
ReadAndValidateGzipHeader(stream);
|
||||
EntryStartPosition = stream.Position;
|
||||
_stream = stream;
|
||||
}
|
||||
|
||||
internal long EntryStartPosition { get; }
|
||||
internal async ValueTask Initialize(Stream stream, CancellationToken cancellationToken)
|
||||
{
|
||||
_stream = stream;
|
||||
if (stream.CanSeek)
|
||||
{
|
||||
long position = stream.Position;
|
||||
stream.Position = stream.Length - 8;
|
||||
await ReadTrailerAsync(cancellationToken);
|
||||
stream.Position = position;
|
||||
}
|
||||
EntryStartPosition = stream.Position;
|
||||
}
|
||||
|
||||
internal long EntryStartPosition { get; private set; }
|
||||
|
||||
internal DateTime? DateModified { get; private set; }
|
||||
internal int? Crc { get; private set; }
|
||||
internal int? UncompressedSize { get; private set; }
|
||||
|
||||
internal override string FilePartName => _name;
|
||||
internal override string? FilePartName => _name;
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
internal override async ValueTask<Stream> GetCompressedStreamAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
return new DeflateStream(_stream, CompressionMode.Decompress, CompressionLevel.Default);
|
||||
var stream = new GZipStream(_stream, CompressionMode.Decompress, CompressionLevel.Default);
|
||||
await stream.ReadAsync(Array.Empty<byte>(), 0, 0, cancellationToken);
|
||||
_name = stream.FileName;
|
||||
DateModified = stream.LastModified;
|
||||
return stream;
|
||||
}
|
||||
|
||||
internal override Stream GetRawStream()
|
||||
@@ -37,83 +59,12 @@ namespace SharpCompress.Common.GZip
|
||||
return _stream;
|
||||
}
|
||||
|
||||
private void ReadAndValidateGzipHeader(Stream stream)
|
||||
private async ValueTask ReadTrailerAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
// read the header on the first read
|
||||
byte[] header = new byte[10];
|
||||
int n = stream.Read(header, 0, header.Length);
|
||||
// Read and potentially verify the GZIP trailer: CRC32 and size mod 2^32
|
||||
|
||||
// workitem 8501: handle edge case (decompress empty stream)
|
||||
if (n == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (n != 10)
|
||||
{
|
||||
throw new ZlibException("Not a valid GZIP stream.");
|
||||
}
|
||||
|
||||
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
|
||||
{
|
||||
throw new ZlibException("Bad GZIP header.");
|
||||
}
|
||||
|
||||
int timet = BinaryPrimitives.ReadInt32LittleEndian(header.AsSpan(4));
|
||||
DateModified = TarHeader.EPOCH.AddSeconds(timet);
|
||||
if ((header[3] & 0x04) == 0x04)
|
||||
{
|
||||
// read and discard extra field
|
||||
n = stream.Read(header, 0, 2); // 2-byte length field
|
||||
|
||||
Int16 extraLength = (Int16)(header[0] + header[1] * 256);
|
||||
byte[] extra = new byte[extraLength];
|
||||
|
||||
if (!stream.ReadFully(extra))
|
||||
{
|
||||
throw new ZlibException("Unexpected end-of-file reading GZIP header.");
|
||||
}
|
||||
n = extraLength;
|
||||
}
|
||||
if ((header[3] & 0x08) == 0x08)
|
||||
{
|
||||
_name = ReadZeroTerminatedString(stream);
|
||||
}
|
||||
if ((header[3] & 0x10) == 0x010)
|
||||
{
|
||||
ReadZeroTerminatedString(stream);
|
||||
}
|
||||
if ((header[3] & 0x02) == 0x02)
|
||||
{
|
||||
stream.ReadByte(); // CRC16, ignore
|
||||
}
|
||||
}
|
||||
|
||||
private string ReadZeroTerminatedString(Stream stream)
|
||||
{
|
||||
byte[] buf1 = new byte[1];
|
||||
var list = new List<byte>();
|
||||
bool done = false;
|
||||
do
|
||||
{
|
||||
// workitem 7740
|
||||
int n = stream.Read(buf1, 0, 1);
|
||||
if (n != 1)
|
||||
{
|
||||
throw new ZlibException("Unexpected EOF reading GZIP header.");
|
||||
}
|
||||
if (buf1[0] == 0)
|
||||
{
|
||||
done = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
list.Add(buf1[0]);
|
||||
}
|
||||
}
|
||||
while (!done);
|
||||
byte[] buffer = list.ToArray();
|
||||
return ArchiveEncoding.Decode(buffer);
|
||||
Crc = await _stream.ReadInt32(cancellationToken);
|
||||
UncompressedSize = await _stream.ReadInt32(cancellationToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
using System.IO;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Common
|
||||
{
|
||||
|
||||
@@ -10,7 +10,7 @@ namespace SharpCompress.Common
|
||||
long Crc { get; }
|
||||
DateTime? CreatedTime { get; }
|
||||
string Key { get; }
|
||||
string LinkTarget { get; }
|
||||
string? LinkTarget { get; }
|
||||
bool IsDirectory { get; }
|
||||
bool IsEncrypted { get; }
|
||||
bool IsSplitAfter { get; }
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
namespace SharpCompress.Common
|
||||
{
|
||||
public interface IVolume : IDisposable
|
||||
public interface IVolume : IAsyncDisposable
|
||||
{
|
||||
}
|
||||
}
|
||||
@@ -4,11 +4,13 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
{
|
||||
internal class AvHeader : RarHeader
|
||||
{
|
||||
public AvHeader(RarHeader header, RarCrcBinaryReader reader)
|
||||
: base(header, reader, HeaderType.Av)
|
||||
public AvHeader(RarHeader header, RarCrcBinaryReader reader)
|
||||
: base(header, reader, HeaderType.Av)
|
||||
{
|
||||
if (IsRar5)
|
||||
if (IsRar5)
|
||||
{
|
||||
throw new InvalidFormatException("unexpected rar5 record");
|
||||
}
|
||||
}
|
||||
|
||||
protected override void ReadFinish(MarkingBinaryReader reader)
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
using SharpCompress.IO;
|
||||
#nullable disable
|
||||
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Common.Rar.Headers
|
||||
{
|
||||
internal class ArchiveCryptHeader : RarHeader
|
||||
{
|
||||
|
||||
|
||||
private const int CRYPT_VERSION = 0; // Supported encryption version.
|
||||
private const int SIZE_SALT50 = 16;
|
||||
private const int SIZE_SALT30 = 8;
|
||||
@@ -13,14 +15,14 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
private const int SIZE_PSWCHECK_CSUM = 4;
|
||||
private const int CRYPT5_KDF_LG2_COUNT = 15; // LOG2 of PDKDF2 iteration count.
|
||||
private const int CRYPT5_KDF_LG2_COUNT_MAX = 24; // LOG2 of maximum accepted iteration count.
|
||||
|
||||
|
||||
|
||||
|
||||
private bool _usePswCheck;
|
||||
private uint _lg2Count; // Log2 of PBKDF2 repetition count.
|
||||
private byte[] _salt;
|
||||
private byte[] _pswCheck;
|
||||
private byte[] _pswCheckCsm;
|
||||
|
||||
|
||||
public ArchiveCryptHeader(RarHeader header, RarCrcBinaryReader reader)
|
||||
: base(header, reader, HeaderType.Crypt)
|
||||
{
|
||||
@@ -33,12 +35,12 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
{
|
||||
//error?
|
||||
return;
|
||||
}
|
||||
}
|
||||
var encryptionFlags = reader.ReadRarVIntUInt32();
|
||||
_usePswCheck = FlagUtility.HasFlag(encryptionFlags, EncryptionFlagsV5.CHFL_CRYPT_PSWCHECK);
|
||||
_lg2Count = reader.ReadRarVIntByte(1);
|
||||
|
||||
|
||||
|
||||
//UsePswCheck = HasHeaderFlag(EncryptionFlagsV5.CHFL_CRYPT_PSWCHECK);
|
||||
if (_lg2Count > CRYPT5_KDF_LG2_COUNT_MAX)
|
||||
{
|
||||
|
||||
@@ -2,16 +2,16 @@ using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Common.Rar.Headers
|
||||
{
|
||||
internal class ArchiveHeader : RarHeader
|
||||
internal sealed class ArchiveHeader : RarHeader
|
||||
{
|
||||
public ArchiveHeader(RarHeader header, RarCrcBinaryReader reader)
|
||||
: base(header, reader, HeaderType.Archive)
|
||||
public ArchiveHeader(RarHeader header, RarCrcBinaryReader reader)
|
||||
: base(header, reader, HeaderType.Archive)
|
||||
{
|
||||
}
|
||||
|
||||
protected override void ReadFinish(MarkingBinaryReader reader)
|
||||
{
|
||||
if (IsRar5)
|
||||
if (IsRar5)
|
||||
{
|
||||
Flags = reader.ReadRarVIntUInt16();
|
||||
if (HasFlag(ArchiveFlagsV5.HAS_VOLUME_NUMBER))
|
||||
@@ -22,8 +22,8 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
//if (ExtraSize != 0) {
|
||||
// ReadLocator(reader);
|
||||
//}
|
||||
}
|
||||
else
|
||||
}
|
||||
else
|
||||
{
|
||||
Flags = HeaderFlags;
|
||||
HighPosAv = reader.ReadInt16();
|
||||
@@ -35,26 +35,33 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
}
|
||||
}
|
||||
|
||||
private void ReadLocator(MarkingBinaryReader reader) {
|
||||
private void ReadLocator(MarkingBinaryReader reader)
|
||||
{
|
||||
var size = reader.ReadRarVIntUInt16();
|
||||
var type = reader.ReadRarVIntUInt16();
|
||||
if (type != 1) throw new InvalidFormatException("expected locator record");
|
||||
if (type != 1)
|
||||
{
|
||||
throw new InvalidFormatException("expected locator record");
|
||||
}
|
||||
|
||||
var flags = reader.ReadRarVIntUInt16();
|
||||
const ushort hasQuickOpenOffset = 0x01;
|
||||
const ushort hasRecoveryOffset = 0x02;
|
||||
ulong quickOpenOffset = 0;
|
||||
if ((flags & hasQuickOpenOffset) == hasQuickOpenOffset) {
|
||||
if ((flags & hasQuickOpenOffset) == hasQuickOpenOffset)
|
||||
{
|
||||
quickOpenOffset = reader.ReadRarVInt();
|
||||
}
|
||||
ulong recoveryOffset = 0;
|
||||
if ((flags & hasRecoveryOffset) == hasRecoveryOffset) {
|
||||
if ((flags & hasRecoveryOffset) == hasRecoveryOffset)
|
||||
{
|
||||
recoveryOffset = reader.ReadRarVInt();
|
||||
}
|
||||
}
|
||||
|
||||
private ushort Flags { get; set; }
|
||||
private ushort Flags { get; set; }
|
||||
|
||||
private bool HasFlag(ushort flag)
|
||||
private bool HasFlag(ushort flag)
|
||||
{
|
||||
return (Flags & flag) == flag;
|
||||
}
|
||||
@@ -74,7 +81,7 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
public bool IsVolume => HasFlag(IsRar5 ? ArchiveFlagsV5.VOLUME : ArchiveFlagsV4.VOLUME);
|
||||
|
||||
// RAR5: Volume number field is present. True for all volumes except first.
|
||||
public bool IsFirstVolume => IsRar5 ? VolumeNumber == null : HasFlag(ArchiveFlagsV4.FIRST_VOLUME);
|
||||
public bool IsFirstVolume => IsRar5 ? VolumeNumber is null : HasFlag(ArchiveFlagsV4.FIRST_VOLUME);
|
||||
|
||||
public bool IsSolid => HasFlag(IsRar5 ? ArchiveFlagsV5.SOLID : ArchiveFlagsV4.SOLID);
|
||||
}
|
||||
|
||||
@@ -5,9 +5,12 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
internal class CommentHeader : RarHeader
|
||||
{
|
||||
protected CommentHeader(RarHeader header, RarCrcBinaryReader reader)
|
||||
: base(header, reader, HeaderType.Comment)
|
||||
{
|
||||
if (IsRar5) throw new InvalidFormatException("unexpected rar5 record");
|
||||
: base(header, reader, HeaderType.Comment)
|
||||
{
|
||||
if (IsRar5)
|
||||
{
|
||||
throw new InvalidFormatException("unexpected rar5 record");
|
||||
}
|
||||
}
|
||||
|
||||
protected override void ReadFinish(MarkingBinaryReader reader)
|
||||
|
||||
@@ -4,14 +4,14 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
{
|
||||
internal class EndArchiveHeader : RarHeader
|
||||
{
|
||||
public EndArchiveHeader(RarHeader header, RarCrcBinaryReader reader)
|
||||
: base(header, reader, HeaderType.EndArchive)
|
||||
public EndArchiveHeader(RarHeader header, RarCrcBinaryReader reader)
|
||||
: base(header, reader, HeaderType.EndArchive)
|
||||
{
|
||||
}
|
||||
|
||||
protected override void ReadFinish(MarkingBinaryReader reader)
|
||||
{
|
||||
if (IsRar5)
|
||||
if (IsRar5)
|
||||
{
|
||||
Flags = reader.ReadRarVIntUInt16();
|
||||
}
|
||||
@@ -31,7 +31,7 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
|
||||
private ushort Flags { get; set; }
|
||||
|
||||
private bool HasFlag(ushort flag)
|
||||
private bool HasFlag(ushort flag)
|
||||
{
|
||||
return (Flags & flag) == flag;
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
#nullable disable
|
||||
|
||||
#if !Rar2017_64bit
|
||||
using nint = System.Int32;
|
||||
using nuint = System.UInt32;
|
||||
@@ -19,18 +21,18 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
{
|
||||
private uint _fileCrc;
|
||||
|
||||
public FileHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType)
|
||||
: base(header, reader, headerType)
|
||||
public FileHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType)
|
||||
: base(header, reader, headerType)
|
||||
{
|
||||
}
|
||||
|
||||
protected override void ReadFinish(MarkingBinaryReader reader)
|
||||
protected override void ReadFinish(MarkingBinaryReader reader)
|
||||
{
|
||||
if (IsRar5)
|
||||
if (IsRar5)
|
||||
{
|
||||
ReadFromReaderV5(reader);
|
||||
}
|
||||
else
|
||||
}
|
||||
else
|
||||
{
|
||||
ReadFromReaderV4(reader);
|
||||
}
|
||||
@@ -47,11 +49,13 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
|
||||
FileAttributes = reader.ReadRarVIntUInt32();
|
||||
|
||||
if (HasFlag(FileFlagsV5.HAS_MOD_TIME)) {
|
||||
if (HasFlag(FileFlagsV5.HAS_MOD_TIME))
|
||||
{
|
||||
FileLastModifiedTime = Utility.UnixTimeToDateTime(reader.ReadUInt32());
|
||||
}
|
||||
|
||||
if (HasFlag(FileFlagsV5.HAS_CRC32)) {
|
||||
if (HasFlag(FileFlagsV5.HAS_CRC32))
|
||||
{
|
||||
FileCrc = reader.ReadUInt32();
|
||||
}
|
||||
|
||||
@@ -63,7 +67,7 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
// but it was already used in RAR 1.5 and Unpack needs to distinguish
|
||||
// them.
|
||||
CompressionAlgorithm = (byte)((compressionInfo & 0x3f) + 50);
|
||||
|
||||
|
||||
// 7th bit (0x0040) defines the solid flag. If it is set, RAR continues to use the compression dictionary left after processing preceding files.
|
||||
// It can be set only for file headers and is never set for service headers.
|
||||
IsSolid = (compressionInfo & 0x40) == 0x40;
|
||||
@@ -72,7 +76,7 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
CompressionMethod = (byte)((compressionInfo >> 7) & 0x7);
|
||||
|
||||
// Bits 11 - 14 (0x3c00) define the minimum size of dictionary size required to extract data. Value 0 means 128 KB, 1 - 256 KB, ..., 14 - 2048 MB, 15 - 4096 MB.
|
||||
WindowSize = IsDirectory ? 0 : ((size_t)0x20000) << ((compressionInfo>>10) & 0xf);
|
||||
WindowSize = IsDirectory ? 0 : ((size_t)0x20000) << ((compressionInfo >> 10) & 0xf);
|
||||
|
||||
HostOs = reader.ReadRarVIntByte();
|
||||
|
||||
@@ -99,18 +103,20 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
FileName = ConvertPathV5(Encoding.UTF8.GetString(b, 0, b.Length));
|
||||
|
||||
// extra size seems to be redudant since we know the total header size
|
||||
if (ExtraSize != RemainingHeaderBytes(reader))
|
||||
if (ExtraSize != RemainingHeaderBytes(reader))
|
||||
{
|
||||
throw new InvalidFormatException("rar5 header size / extra size inconsistency");
|
||||
}
|
||||
|
||||
isEncryptedRar5 = false;
|
||||
|
||||
while (RemainingHeaderBytes(reader) > 0) {
|
||||
while (RemainingHeaderBytes(reader) > 0)
|
||||
{
|
||||
var size = reader.ReadRarVIntUInt16();
|
||||
int n = RemainingHeaderBytes(reader);
|
||||
var type = reader.ReadRarVIntUInt16();
|
||||
switch (type) {
|
||||
switch (type)
|
||||
{
|
||||
//TODO
|
||||
case 1: // file encryption
|
||||
{
|
||||
@@ -118,7 +124,7 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
|
||||
//var version = reader.ReadRarVIntByte();
|
||||
//if (version != 0) throw new InvalidFormatException("unknown encryption algorithm " + version);
|
||||
}
|
||||
}
|
||||
break;
|
||||
// case 2: // file hash
|
||||
// {
|
||||
@@ -129,38 +135,41 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
{
|
||||
ushort flags = reader.ReadRarVIntUInt16();
|
||||
var isWindowsTime = (flags & 1) == 0;
|
||||
if ((flags & 0x2) == 0x2) {
|
||||
if ((flags & 0x2) == 0x2)
|
||||
{
|
||||
FileLastModifiedTime = ReadExtendedTimeV5(reader, isWindowsTime);
|
||||
}
|
||||
if ((flags & 0x4) == 0x4) {
|
||||
if ((flags & 0x4) == 0x4)
|
||||
{
|
||||
FileCreatedTime = ReadExtendedTimeV5(reader, isWindowsTime);
|
||||
}
|
||||
if ((flags & 0x8) == 0x8) {
|
||||
if ((flags & 0x8) == 0x8)
|
||||
{
|
||||
FileLastAccessedTime = ReadExtendedTimeV5(reader, isWindowsTime);
|
||||
}
|
||||
}
|
||||
break;
|
||||
//TODO
|
||||
// case 4: // file version
|
||||
// {
|
||||
//
|
||||
// }
|
||||
// break;
|
||||
// case 5: // file system redirection
|
||||
// {
|
||||
//
|
||||
// }
|
||||
// break;
|
||||
// case 6: // unix owner
|
||||
// {
|
||||
//
|
||||
// }
|
||||
// break;
|
||||
// case 7: // service data
|
||||
// {
|
||||
//
|
||||
// }
|
||||
// break;
|
||||
//TODO
|
||||
// case 4: // file version
|
||||
// {
|
||||
//
|
||||
// }
|
||||
// break;
|
||||
// case 5: // file system redirection
|
||||
// {
|
||||
//
|
||||
// }
|
||||
// break;
|
||||
// case 6: // unix owner
|
||||
// {
|
||||
//
|
||||
// }
|
||||
// break;
|
||||
// case 7: // service data
|
||||
// {
|
||||
//
|
||||
// }
|
||||
// break;
|
||||
|
||||
default:
|
||||
// skip unknown record types to allow new record types to be added in the future
|
||||
@@ -169,25 +178,26 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
// drain any trailing bytes of extra record
|
||||
int did = n - RemainingHeaderBytes(reader);
|
||||
int drain = size - did;
|
||||
if (drain > 0)
|
||||
if (drain > 0)
|
||||
{
|
||||
reader.ReadBytes(drain);
|
||||
}
|
||||
}
|
||||
|
||||
if (AdditionalDataSize != 0) {
|
||||
if (AdditionalDataSize != 0)
|
||||
{
|
||||
CompressedSize = AdditionalDataSize;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private static DateTime ReadExtendedTimeV5(MarkingBinaryReader reader, bool isWindowsTime)
|
||||
private static DateTime ReadExtendedTimeV5(MarkingBinaryReader reader, bool isWindowsTime)
|
||||
{
|
||||
if (isWindowsTime)
|
||||
if (isWindowsTime)
|
||||
{
|
||||
return DateTime.FromFileTime(reader.ReadInt64());
|
||||
}
|
||||
else
|
||||
}
|
||||
else
|
||||
{
|
||||
return Utility.UnixTimeToDateTime(reader.ReadUInt32());
|
||||
}
|
||||
@@ -199,7 +209,7 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
{
|
||||
// replace embedded \\ with valid filename char
|
||||
return path.Replace('\\', '-').Replace('/', '\\');
|
||||
}
|
||||
}
|
||||
return path;
|
||||
}
|
||||
|
||||
@@ -374,20 +384,22 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
|
||||
private ushort Flags { get; set; }
|
||||
|
||||
private bool HasFlag(ushort flag)
|
||||
private bool HasFlag(ushort flag)
|
||||
{
|
||||
return (Flags & flag) == flag;
|
||||
}
|
||||
|
||||
internal uint FileCrc
|
||||
{
|
||||
get {
|
||||
if (IsRar5 && !HasFlag(FileFlagsV5.HAS_CRC32)) {
|
||||
//!!! rar5:
|
||||
internal uint FileCrc
|
||||
{
|
||||
get
|
||||
{
|
||||
if (IsRar5 && !HasFlag(FileFlagsV5.HAS_CRC32))
|
||||
{
|
||||
//!!! rar5:
|
||||
throw new InvalidOperationException("TODO rar5");
|
||||
}
|
||||
return _fileCrc;
|
||||
}
|
||||
return _fileCrc;
|
||||
}
|
||||
private set => _fileCrc = value;
|
||||
}
|
||||
|
||||
@@ -407,7 +419,7 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
//case 29: // rar 3.x compression
|
||||
//case 50: // RAR 5.0 compression algorithm.
|
||||
internal byte CompressionAlgorithm { get; private set; }
|
||||
|
||||
|
||||
public bool IsSolid { get; private set; }
|
||||
|
||||
// unused for UnpackV1 implementation (limitation)
|
||||
@@ -425,13 +437,14 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
internal long DataStartPosition { get; set; }
|
||||
public Stream PackedStream { get; set; }
|
||||
|
||||
public bool IsSplitBefore => IsRar5 ? HasHeaderFlag(HeaderFlagsV5.SPLIT_BEFORE) : HasFlag(FileFlagsV4.SPLIT_BEFORE);
|
||||
public bool IsSplitAfter => IsRar5 ? HasHeaderFlag(HeaderFlagsV5.SPLIT_AFTER) : HasFlag(FileFlagsV4.SPLIT_AFTER);
|
||||
|
||||
public bool IsDirectory => HasFlag(IsRar5 ? FileFlagsV5.DIRECTORY : FileFlagsV4.DIRECTORY);
|
||||
|
||||
private bool isEncryptedRar5 = false;
|
||||
public bool IsEncrypted => IsRar5 ? isEncryptedRar5: HasFlag(FileFlagsV4.PASSWORD);
|
||||
|
||||
public bool IsEncrypted => IsRar5 ? isEncryptedRar5 : HasFlag(FileFlagsV4.PASSWORD);
|
||||
|
||||
internal DateTime? FileLastModifiedTime { get; private set; }
|
||||
|
||||
internal DateTime? FileCreatedTime { get; private set; }
|
||||
|
||||
@@ -42,10 +42,10 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
}
|
||||
|
||||
internal static class EncryptionFlagsV5
|
||||
{
|
||||
{
|
||||
// RAR 5.0 archive encryption header specific flags.
|
||||
public const uint CHFL_CRYPT_PSWCHECK = 0x01; // Password check data is present.
|
||||
|
||||
|
||||
public const uint FHEXTRA_CRYPT_PSWCHECK = 0x01; // Password check data is present.
|
||||
public const uint FHEXTRA_CRYPT_HASHMAC = 0x02;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
namespace SharpCompress.Common.Rar.Headers
|
||||
{
|
||||
internal interface IRarHeader
|
||||
internal interface IRarHeader
|
||||
{
|
||||
HeaderType HeaderType { get; }
|
||||
}
|
||||
|
||||
@@ -11,71 +11,98 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
|
||||
public bool IsRar5 { get; }
|
||||
|
||||
private MarkHeader(bool isRar5)
|
||||
{
|
||||
private MarkHeader(bool isRar5)
|
||||
{
|
||||
IsRar5 = isRar5;
|
||||
}
|
||||
|
||||
public HeaderType HeaderType => HeaderType.Mark;
|
||||
|
||||
private static byte GetByte(Stream stream)
|
||||
private static byte GetByte(Stream stream)
|
||||
{
|
||||
var b = stream.ReadByte();
|
||||
if (b != -1)
|
||||
if (b != -1)
|
||||
{
|
||||
return (byte)b;
|
||||
}
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
|
||||
public static MarkHeader Read(Stream stream, bool leaveStreamOpen, bool lookForHeader)
|
||||
public static MarkHeader Read(Stream stream, bool leaveStreamOpen, bool lookForHeader)
|
||||
{
|
||||
int maxScanIndex = lookForHeader ? MAX_SFX_SIZE : 0;
|
||||
try
|
||||
{
|
||||
int start = -1;
|
||||
var b = GetByte(stream); start++;
|
||||
while (start <= maxScanIndex)
|
||||
while (start <= maxScanIndex)
|
||||
{
|
||||
// Rar old signature: 52 45 7E 5E
|
||||
// Rar4 signature: 52 61 72 21 1A 07 00
|
||||
// Rar5 signature: 52 61 72 21 1A 07 01 00
|
||||
if (b == 0x52)
|
||||
if (b == 0x52)
|
||||
{
|
||||
b = GetByte(stream); start++;
|
||||
if (b == 0x61)
|
||||
if (b == 0x61)
|
||||
{
|
||||
b = GetByte(stream); start++;
|
||||
if (b != 0x72) continue;
|
||||
b = GetByte(stream); start++;
|
||||
if (b != 0x21) continue;
|
||||
b = GetByte(stream); start++;
|
||||
if (b != 0x1a) continue;
|
||||
b = GetByte(stream); start++;
|
||||
if (b != 0x07) continue;
|
||||
if (b != 0x72)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
b = GetByte(stream); start++;
|
||||
if (b == 1)
|
||||
if (b != 0x21)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
b = GetByte(stream); start++;
|
||||
if (b != 0x1a)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
b = GetByte(stream); start++;
|
||||
if (b != 0x07)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
b = GetByte(stream); start++;
|
||||
if (b == 1)
|
||||
{
|
||||
b = GetByte(stream); start++;
|
||||
if (b != 0) continue;
|
||||
if (b != 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
return new MarkHeader(true); // Rar5
|
||||
}
|
||||
else if (b == 0)
|
||||
}
|
||||
else if (b == 0)
|
||||
{
|
||||
return new MarkHeader(false); // Rar4
|
||||
}
|
||||
}
|
||||
else if (b == 0x45)
|
||||
}
|
||||
}
|
||||
else if (b == 0x45)
|
||||
{
|
||||
b = GetByte(stream); start++;
|
||||
if (b != 0x7e) continue;
|
||||
if (b != 0x7e)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
b = GetByte(stream); start++;
|
||||
if (b != 0x5e) continue;
|
||||
if (b != 0x5e)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
throw new InvalidFormatException("Rar format version pre-4 is unsupported.");
|
||||
}
|
||||
}
|
||||
else
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
b = GetByte(stream); start++;
|
||||
}
|
||||
|
||||
@@ -2,23 +2,23 @@
|
||||
|
||||
namespace SharpCompress.Common.Rar.Headers
|
||||
{
|
||||
internal class NewSubHeaderType : IEquatable<NewSubHeaderType>
|
||||
internal sealed class NewSubHeaderType : IEquatable<NewSubHeaderType>
|
||||
{
|
||||
internal static readonly NewSubHeaderType SUBHEAD_TYPE_CMT = new NewSubHeaderType('C', 'M', 'T');
|
||||
internal static readonly NewSubHeaderType SUBHEAD_TYPE_CMT = new('C', 'M', 'T');
|
||||
|
||||
//internal static final NewSubHeaderType SUBHEAD_TYPE_ACL = new NewSubHeaderType(new byte[]{'A','C','L'});
|
||||
//internal static final NewSubHeaderType SUBHEAD_TYPE_ACL = new (new byte[]{'A','C','L'});
|
||||
|
||||
//internal static final NewSubHeaderType SUBHEAD_TYPE_STREAM = new NewSubHeaderType(new byte[]{'S','T','M'});
|
||||
//internal static final NewSubHeaderType SUBHEAD_TYPE_STREAM = new (new byte[]{'S','T','M'});
|
||||
|
||||
//internal static final NewSubHeaderType SUBHEAD_TYPE_UOWNER = new NewSubHeaderType(new byte[]{'U','O','W'});
|
||||
//internal static final NewSubHeaderType SUBHEAD_TYPE_UOWNER = new (new byte[]{'U','O','W'});
|
||||
|
||||
//internal static final NewSubHeaderType SUBHEAD_TYPE_AV = new NewSubHeaderType(new byte[]{'A','V'});
|
||||
//internal static final NewSubHeaderType SUBHEAD_TYPE_AV = new (new byte[]{'A','V'});
|
||||
|
||||
internal static readonly NewSubHeaderType SUBHEAD_TYPE_RR = new NewSubHeaderType('R', 'R');
|
||||
internal static readonly NewSubHeaderType SUBHEAD_TYPE_RR = new('R', 'R');
|
||||
|
||||
//internal static final NewSubHeaderType SUBHEAD_TYPE_OS2EA = new NewSubHeaderType(new byte[]{'E','A','2'});
|
||||
//internal static final NewSubHeaderType SUBHEAD_TYPE_OS2EA = new (new byte[]{'E','A','2'});
|
||||
|
||||
//internal static final NewSubHeaderType SUBHEAD_TYPE_BEOSEA = new NewSubHeaderType(new byte[]{'E','A','B','E'});
|
||||
//internal static final NewSubHeaderType SUBHEAD_TYPE_BEOSEA = new (new byte[]{'E','A','B','E'});
|
||||
|
||||
private readonly byte[] _bytes;
|
||||
|
||||
@@ -37,19 +37,13 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
{
|
||||
return false;
|
||||
}
|
||||
for (int i = 0; i < bytes.Length; ++i)
|
||||
{
|
||||
if (_bytes[i] != bytes[i])
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
|
||||
return _bytes.AsSpan().SequenceEqual(bytes);
|
||||
}
|
||||
|
||||
public bool Equals(NewSubHeaderType other)
|
||||
public bool Equals(NewSubHeaderType? other)
|
||||
{
|
||||
return Equals(other._bytes);
|
||||
return other is not null && Equals(other._bytes);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,12 +3,15 @@
|
||||
namespace SharpCompress.Common.Rar.Headers
|
||||
{
|
||||
// ProtectHeader is part of the Recovery Record feature
|
||||
internal class ProtectHeader : RarHeader
|
||||
internal sealed class ProtectHeader : RarHeader
|
||||
{
|
||||
public ProtectHeader(RarHeader header, RarCrcBinaryReader reader)
|
||||
: base(header, reader, HeaderType.Protect)
|
||||
public ProtectHeader(RarHeader header, RarCrcBinaryReader reader)
|
||||
: base(header, reader, HeaderType.Protect)
|
||||
{
|
||||
if (IsRar5) throw new InvalidFormatException("unexpected rar5 record");
|
||||
if (IsRar5)
|
||||
{
|
||||
throw new InvalidFormatException("unexpected rar5 record");
|
||||
}
|
||||
}
|
||||
|
||||
protected override void ReadFinish(MarkingBinaryReader reader)
|
||||
@@ -23,6 +26,6 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
internal byte Version { get; private set; }
|
||||
internal ushort RecSectors { get; private set; }
|
||||
internal uint TotalBlocks { get; private set; }
|
||||
internal byte[] Mark { get; private set; }
|
||||
internal byte[]? Mark { get; private set; }
|
||||
}
|
||||
}
|
||||
@@ -11,7 +11,7 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
private readonly HeaderType _headerType;
|
||||
private readonly bool _isRar5;
|
||||
|
||||
internal static RarHeader TryReadBase(RarCrcBinaryReader reader, bool isRar5, ArchiveEncoding archiveEncoding)
|
||||
internal static RarHeader? TryReadBase(RarCrcBinaryReader reader, bool isRar5, ArchiveEncoding archiveEncoding)
|
||||
{
|
||||
try
|
||||
{
|
||||
@@ -23,12 +23,12 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
}
|
||||
}
|
||||
|
||||
private RarHeader(RarCrcBinaryReader reader, bool isRar5, ArchiveEncoding archiveEncoding)
|
||||
private RarHeader(RarCrcBinaryReader reader, bool isRar5, ArchiveEncoding archiveEncoding)
|
||||
{
|
||||
_headerType = HeaderType.Null;
|
||||
_isRar5 = isRar5;
|
||||
ArchiveEncoding = archiveEncoding;
|
||||
if (IsRar5)
|
||||
if (IsRar5)
|
||||
{
|
||||
HeaderCrc = reader.ReadUInt32();
|
||||
reader.ResetCrc();
|
||||
@@ -45,7 +45,9 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
{
|
||||
AdditionalDataSize = (long)reader.ReadRarVInt();
|
||||
}
|
||||
} else {
|
||||
}
|
||||
else
|
||||
{
|
||||
reader.Mark();
|
||||
HeaderCrc = reader.ReadUInt16();
|
||||
reader.ResetCrc();
|
||||
@@ -59,7 +61,8 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
}
|
||||
}
|
||||
|
||||
protected RarHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType) {
|
||||
protected RarHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType)
|
||||
{
|
||||
_headerType = headerType;
|
||||
_isRar5 = header.IsRar5;
|
||||
HeaderCrc = header.HeaderCrc;
|
||||
@@ -80,7 +83,8 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
VerifyHeaderCrc(reader.GetCrc32());
|
||||
}
|
||||
|
||||
protected int RemainingHeaderBytes(MarkingBinaryReader reader) {
|
||||
protected int RemainingHeaderBytes(MarkingBinaryReader reader)
|
||||
{
|
||||
return checked(HeaderSize - (int)reader.CurrentReadByteCount);
|
||||
}
|
||||
|
||||
@@ -108,7 +112,7 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
|
||||
protected ushort HeaderFlags { get; }
|
||||
|
||||
protected bool HasHeaderFlag(ushort flag)
|
||||
protected bool HasHeaderFlag(ushort flag)
|
||||
{
|
||||
return (HeaderFlags & flag) == flag;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using SharpCompress.IO;
|
||||
@@ -26,7 +25,7 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
_isRar5 = markHeader.IsRar5;
|
||||
yield return markHeader;
|
||||
|
||||
RarHeader header;
|
||||
RarHeader? header;
|
||||
while ((header = TryReadNextHeader(stream)) != null)
|
||||
{
|
||||
yield return header;
|
||||
@@ -39,16 +38,16 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
}
|
||||
}
|
||||
|
||||
private RarHeader TryReadNextHeader(Stream stream)
|
||||
private RarHeader? TryReadNextHeader(Stream stream)
|
||||
{
|
||||
RarCrcBinaryReader reader;
|
||||
if (!IsEncrypted)
|
||||
if (!IsEncrypted)
|
||||
{
|
||||
reader = new RarCrcBinaryReader(stream);
|
||||
}
|
||||
else
|
||||
}
|
||||
else
|
||||
{
|
||||
if (Options.Password == null)
|
||||
if (Options.Password is null)
|
||||
{
|
||||
throw new CryptographicException("Encrypted Rar archive has no password specified.");
|
||||
}
|
||||
@@ -56,7 +55,7 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
}
|
||||
|
||||
var header = RarHeader.TryReadBase(reader, _isRar5, Options.ArchiveEncoding);
|
||||
if (header == null)
|
||||
if (header is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
@@ -66,7 +65,7 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
case HeaderCodeV.RAR4_ARCHIVE_HEADER:
|
||||
{
|
||||
var ah = new ArchiveHeader(header, reader);
|
||||
if (ah.IsEncrypted == true)
|
||||
if (ah.IsEncrypted == true)
|
||||
{
|
||||
//!!! rar5 we don't know yet
|
||||
IsEncrypted = true;
|
||||
@@ -128,13 +127,13 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
case StreamingMode.Streaming:
|
||||
{
|
||||
var ms = new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize);
|
||||
if (fh.R4Salt == null)
|
||||
if (fh.R4Salt is null)
|
||||
{
|
||||
fh.PackedStream = ms;
|
||||
}
|
||||
else
|
||||
{
|
||||
fh.PackedStream = new RarCryptoWrapper(ms, Options.Password, fh.R4Salt);
|
||||
fh.PackedStream = new RarCryptoWrapper(ms, Options.Password!, fh.R4Salt);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@@ -151,11 +150,11 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
return new EndArchiveHeader(header, reader);
|
||||
}
|
||||
case HeaderCodeV.RAR5_ARCHIVE_ENCRYPTION_HEADER:
|
||||
{
|
||||
var ch = new ArchiveCryptHeader(header, reader);
|
||||
IsEncrypted = true;
|
||||
return ch;
|
||||
}
|
||||
{
|
||||
var ch = new ArchiveCryptHeader(header, reader);
|
||||
IsEncrypted = true;
|
||||
return ch;
|
||||
}
|
||||
default:
|
||||
{
|
||||
throw new InvalidFormatException("Unknown Rar Header: " + header.HeaderCode);
|
||||
@@ -163,21 +162,26 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
}
|
||||
}
|
||||
|
||||
private void SkipData(FileHeader fh, RarCrcBinaryReader reader) {
|
||||
switch (StreamingMode) {
|
||||
case StreamingMode.Seekable: {
|
||||
fh.DataStartPosition = reader.BaseStream.Position;
|
||||
reader.BaseStream.Position += fh.CompressedSize;
|
||||
}
|
||||
private void SkipData(FileHeader fh, RarCrcBinaryReader reader)
|
||||
{
|
||||
switch (StreamingMode)
|
||||
{
|
||||
case StreamingMode.Seekable:
|
||||
{
|
||||
fh.DataStartPosition = reader.BaseStream.Position;
|
||||
reader.BaseStream.Position += fh.CompressedSize;
|
||||
}
|
||||
break;
|
||||
case StreamingMode.Streaming: {
|
||||
//skip the data because it's useless?
|
||||
reader.BaseStream.Skip(fh.CompressedSize);
|
||||
}
|
||||
case StreamingMode.Streaming:
|
||||
{
|
||||
//skip the data because it's useless?
|
||||
reader.BaseStream.Skip(fh.CompressedSize);
|
||||
}
|
||||
break;
|
||||
default: {
|
||||
throw new InvalidFormatException("Invalid StreamingMode");
|
||||
}
|
||||
default:
|
||||
{
|
||||
throw new InvalidFormatException("Invalid StreamingMode");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,9 +5,12 @@ namespace SharpCompress.Common.Rar.Headers
|
||||
internal class SignHeader : RarHeader
|
||||
{
|
||||
protected SignHeader(RarHeader header, RarCrcBinaryReader reader)
|
||||
: base(header, reader, HeaderType.Sign)
|
||||
{
|
||||
if (IsRar5) throw new InvalidFormatException("unexpected rar5 record");
|
||||
: base(header, reader, HeaderType.Sign)
|
||||
{
|
||||
if (IsRar5)
|
||||
{
|
||||
throw new InvalidFormatException("unexpected rar5 record");
|
||||
}
|
||||
}
|
||||
|
||||
protected override void ReadFinish(MarkingBinaryReader reader)
|
||||
|
||||
@@ -3,7 +3,7 @@ using System.IO;
|
||||
|
||||
namespace SharpCompress.Common.Rar
|
||||
{
|
||||
internal class RarCryptoBinaryReader : RarCrcBinaryReader
|
||||
internal sealed class RarCryptoBinaryReader : RarCrcBinaryReader
|
||||
{
|
||||
private RarRijndael _rijndael;
|
||||
private byte[] _salt;
|
||||
@@ -19,7 +19,9 @@ namespace SharpCompress.Common.Rar
|
||||
// coderb: not sure why this was being done at this logical point
|
||||
//SkipQueue();
|
||||
byte[] salt = ReadBytes(8);
|
||||
InitializeAes(salt);
|
||||
|
||||
_salt = salt;
|
||||
_rijndael = RarRijndael.InitializeFrom(_password, salt);
|
||||
}
|
||||
|
||||
// track read count ourselves rather than using the underlying stream since we buffer
|
||||
@@ -39,12 +41,6 @@ namespace SharpCompress.Common.Rar
|
||||
|
||||
private bool UseEncryption => _salt != null;
|
||||
|
||||
internal void InitializeAes(byte[] salt)
|
||||
{
|
||||
_salt = salt;
|
||||
_rijndael = RarRijndael.InitializeFrom(_password, salt);
|
||||
}
|
||||
|
||||
public override byte ReadByte()
|
||||
{
|
||||
if (UseEncryption)
|
||||
@@ -81,7 +77,9 @@ namespace SharpCompress.Common.Rar
|
||||
byte[] cipherText = ReadBytesNoCrc(16);
|
||||
var readBytes = _rijndael.ProcessBlock(cipherText);
|
||||
foreach (var readByte in readBytes)
|
||||
{
|
||||
_data.Enqueue(readByte);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ using System.IO;
|
||||
|
||||
namespace SharpCompress.Common.Rar
|
||||
{
|
||||
internal class RarCryptoWrapper : Stream
|
||||
internal sealed class RarCryptoWrapper : Stream
|
||||
{
|
||||
private readonly Stream _actualStream;
|
||||
private readonly byte[] _salt;
|
||||
@@ -35,7 +35,7 @@ namespace SharpCompress.Common.Rar
|
||||
|
||||
public override int Read(byte[] buffer, int offset, int count)
|
||||
{
|
||||
if (_salt == null)
|
||||
if (_salt is null)
|
||||
{
|
||||
return _actualStream.Read(buffer, offset, count);
|
||||
}
|
||||
@@ -50,20 +50,23 @@ namespace SharpCompress.Common.Rar
|
||||
if (sizeToRead > 0)
|
||||
{
|
||||
int alignedSize = sizeToRead + ((~sizeToRead + 1) & 0xf);
|
||||
byte[] cipherText = new byte[RarRijndael.CRYPTO_BLOCK_SIZE];
|
||||
Span<byte> cipherText = stackalloc byte[RarRijndael.CRYPTO_BLOCK_SIZE];
|
||||
for (int i = 0; i < alignedSize / 16; i++)
|
||||
{
|
||||
//long ax = System.currentTimeMillis();
|
||||
_actualStream.Read(cipherText, 0, RarRijndael.CRYPTO_BLOCK_SIZE);
|
||||
_actualStream.Read(cipherText);
|
||||
|
||||
var readBytes = _rijndael.ProcessBlock(cipherText);
|
||||
foreach (var readByte in readBytes)
|
||||
{
|
||||
_data.Enqueue(readByte);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
for (int i = 0; i < count; i++)
|
||||
{
|
||||
buffer[offset + i] = _data.Dequeue();
|
||||
}
|
||||
}
|
||||
return count;
|
||||
}
|
||||
@@ -88,7 +91,7 @@ namespace SharpCompress.Common.Rar
|
||||
if (_rijndael != null)
|
||||
{
|
||||
_rijndael.Dispose();
|
||||
_rijndael = null;
|
||||
_rijndael = null!;
|
||||
}
|
||||
base.Dispose(disposing);
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ namespace SharpCompress.Common.Rar
|
||||
/// As the V2017 port isn't complete, add this check to use the legacy Rar code.
|
||||
/// </summary>
|
||||
internal bool IsRarV3 => FileHeader.CompressionAlgorithm == 29 || FileHeader.CompressionAlgorithm == 36;
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// The File's 32 bit CRC Hash
|
||||
/// </summary>
|
||||
@@ -22,7 +22,7 @@ namespace SharpCompress.Common.Rar
|
||||
/// </summary>
|
||||
public override string Key => FileHeader.FileName;
|
||||
|
||||
public override string LinkTarget => null;
|
||||
public override string? LinkTarget => null;
|
||||
|
||||
/// <summary>
|
||||
/// The entry last modified time in the archive, if recorded
|
||||
|
||||
@@ -19,7 +19,7 @@ namespace SharpCompress.Common.Rar
|
||||
|
||||
internal FileHeader FileHeader { get; }
|
||||
|
||||
internal override Stream GetRawStream()
|
||||
internal override Stream? GetRawStream()
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
using System;
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using SharpCompress.Crypto;
|
||||
@@ -25,13 +27,13 @@ namespace SharpCompress.Common.Rar
|
||||
|
||||
_rijndael = new RijndaelEngine();
|
||||
_aesInitializationVector = new byte[CRYPTO_BLOCK_SIZE];
|
||||
int rawLength = 2*_password.Length;
|
||||
int rawLength = 2 * _password.Length;
|
||||
byte[] rawPassword = new byte[rawLength + 8];
|
||||
byte[] passwordBytes = Encoding.UTF8.GetBytes(_password);
|
||||
for (int i = 0; i < _password.Length; i++)
|
||||
{
|
||||
rawPassword[i*2] = passwordBytes[i];
|
||||
rawPassword[i*2 + 1] = 0;
|
||||
rawPassword[i * 2] = passwordBytes[i];
|
||||
rawPassword[i * 2 + 1] = 0;
|
||||
}
|
||||
for (int i = 0; i < _salt.Length; i++)
|
||||
{
|
||||
@@ -66,11 +68,11 @@ namespace SharpCompress.Common.Rar
|
||||
{
|
||||
for (int j = 0; j < 4; j++)
|
||||
{
|
||||
aesKey[i*4 + j] = (byte)
|
||||
(((digest[i*4]*0x1000000) & 0xff000000 |
|
||||
(uint) ((digest[i*4 + 1]*0x10000) & 0xff0000) |
|
||||
(uint) ((digest[i*4 + 2]*0x100) & 0xff00) |
|
||||
(uint) (digest[i*4 + 3] & 0xff)) >> (j*8));
|
||||
aesKey[i * 4 + j] = (byte)
|
||||
(((digest[i * 4] * 0x1000000) & 0xff000000 |
|
||||
(uint)((digest[i * 4 + 1] * 0x10000) & 0xff0000) |
|
||||
(uint)((digest[i * 4 + 2] * 0x100) & 0xff00) |
|
||||
(uint)(digest[i * 4 + 3] & 0xff)) >> (j * 8));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -85,11 +87,11 @@ namespace SharpCompress.Common.Rar
|
||||
return rijndael;
|
||||
}
|
||||
|
||||
public byte[] ProcessBlock(byte[] cipherText)
|
||||
public byte[] ProcessBlock(ReadOnlySpan<byte> cipherText)
|
||||
{
|
||||
var plainText = new byte[CRYPTO_BLOCK_SIZE];
|
||||
Span<byte> plainText = stackalloc byte[CRYPTO_BLOCK_SIZE]; // 16 bytes
|
||||
byte[] decryptedBytes = new byte[CRYPTO_BLOCK_SIZE];
|
||||
_rijndael.ProcessBlock(cipherText, 0, plainText, 0);
|
||||
_rijndael.ProcessBlock(cipherText, plainText);
|
||||
|
||||
for (int j = 0; j < CRYPTO_BLOCK_SIZE; j++)
|
||||
{
|
||||
|
||||
@@ -21,7 +21,9 @@ namespace SharpCompress.Common.Rar
|
||||
_headerFactory = new RarHeaderFactory(mode, options);
|
||||
}
|
||||
|
||||
#nullable disable
|
||||
internal ArchiveHeader ArchiveHeader { get; private set; }
|
||||
#nullable enable
|
||||
|
||||
internal StreamingMode Mode => _headerFactory.StreamingMode;
|
||||
|
||||
@@ -31,26 +33,26 @@ namespace SharpCompress.Common.Rar
|
||||
|
||||
internal IEnumerable<RarFilePart> GetVolumeFileParts()
|
||||
{
|
||||
MarkHeader lastMarkHeader = null;
|
||||
MarkHeader? lastMarkHeader = null;
|
||||
foreach (var header in _headerFactory.ReadHeaders(Stream))
|
||||
{
|
||||
switch (header.HeaderType)
|
||||
{
|
||||
case HeaderType.Mark:
|
||||
{
|
||||
lastMarkHeader = header as MarkHeader;
|
||||
}
|
||||
{
|
||||
lastMarkHeader = (MarkHeader)header;
|
||||
}
|
||||
break;
|
||||
case HeaderType.Archive:
|
||||
{
|
||||
ArchiveHeader = header as ArchiveHeader;
|
||||
}
|
||||
{
|
||||
ArchiveHeader = (ArchiveHeader)header;
|
||||
}
|
||||
break;
|
||||
case HeaderType.File:
|
||||
{
|
||||
var fh = header as FileHeader;
|
||||
yield return CreateFilePart(lastMarkHeader, fh);
|
||||
}
|
||||
{
|
||||
var fh = (FileHeader)header;
|
||||
yield return CreateFilePart(lastMarkHeader!, fh);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -58,7 +60,7 @@ namespace SharpCompress.Common.Rar
|
||||
|
||||
private void EnsureArchiveHeaderLoaded()
|
||||
{
|
||||
if (ArchiveHeader == null)
|
||||
if (ArchiveHeader is null)
|
||||
{
|
||||
if (Mode == StreamingMode.Streaming)
|
||||
{
|
||||
|
||||
@@ -3,15 +3,16 @@ using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Common
|
||||
{
|
||||
public class ReaderExtractionEventArgs<T> : EventArgs
|
||||
public sealed class ReaderExtractionEventArgs<T> : EventArgs
|
||||
{
|
||||
internal ReaderExtractionEventArgs(T entry, ReaderProgress readerProgress = null)
|
||||
internal ReaderExtractionEventArgs(T entry, ReaderProgress? readerProgress = null)
|
||||
{
|
||||
Item = entry;
|
||||
ReaderProgress = readerProgress;
|
||||
}
|
||||
|
||||
public T Item { get; }
|
||||
public ReaderProgress ReaderProgress { get; }
|
||||
|
||||
public ReaderProgress? ReaderProgress { get; }
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,6 @@
|
||||
using System;
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using SharpCompress.Compressors.LZMA;
|
||||
@@ -35,7 +37,7 @@ namespace SharpCompress.Common.SevenZip
|
||||
_packSizes.Clear();
|
||||
_packCrCs.Clear();
|
||||
_folders.Clear();
|
||||
_numUnpackStreamsVector = null;
|
||||
_numUnpackStreamsVector = null!;
|
||||
_files.Clear();
|
||||
|
||||
_packStreamStartPositions.Clear();
|
||||
@@ -87,7 +89,7 @@ namespace SharpCompress.Common.SevenZip
|
||||
{
|
||||
// v3.13 incorrectly worked with empty folders
|
||||
// v4.07: Loop for skipping empty folders
|
||||
for (;;)
|
||||
for (; ; )
|
||||
{
|
||||
if (folderIndex >= _folders.Count)
|
||||
{
|
||||
@@ -96,7 +98,7 @@ namespace SharpCompress.Common.SevenZip
|
||||
|
||||
_folderStartFileIndex.Add(i); // check it
|
||||
|
||||
if (_numUnpackStreamsVector[folderIndex] != 0)
|
||||
if (_numUnpackStreamsVector![folderIndex] != 0)
|
||||
{
|
||||
break;
|
||||
}
|
||||
@@ -114,7 +116,7 @@ namespace SharpCompress.Common.SevenZip
|
||||
|
||||
indexInFolder++;
|
||||
|
||||
if (indexInFolder >= _numUnpackStreamsVector[folderIndex])
|
||||
if (indexInFolder >= _numUnpackStreamsVector![folderIndex])
|
||||
{
|
||||
folderIndex++;
|
||||
indexInFolder = 0;
|
||||
|
||||
@@ -1,8 +1,12 @@
|
||||
using System;
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Compressors.LZMA;
|
||||
using SharpCompress.Compressors.LZMA.Utilites;
|
||||
using SharpCompress.IO;
|
||||
@@ -88,7 +92,7 @@ namespace SharpCompress.Common.SevenZip
|
||||
|
||||
private void WaitAttribute(BlockType attribute)
|
||||
{
|
||||
for (;;)
|
||||
for (; ; )
|
||||
{
|
||||
BlockType? type = ReadId();
|
||||
if (type == attribute)
|
||||
@@ -450,7 +454,7 @@ namespace SharpCompress.Common.SevenZip
|
||||
#endif
|
||||
|
||||
BlockType? type;
|
||||
for (;;)
|
||||
for (; ; )
|
||||
{
|
||||
type = ReadId();
|
||||
if (type == BlockType.End)
|
||||
@@ -465,7 +469,7 @@ namespace SharpCompress.Common.SevenZip
|
||||
SkipData();
|
||||
}
|
||||
|
||||
if (packCrCs == null)
|
||||
if (packCrCs is null)
|
||||
{
|
||||
packCrCs = new List<uint?>(numPackStreams);
|
||||
for (int i = 0; i < numPackStreams; i++)
|
||||
@@ -506,7 +510,7 @@ namespace SharpCompress.Common.SevenZip
|
||||
int index = 0;
|
||||
for (int i = 0; i < numFolders; i++)
|
||||
{
|
||||
var f = new CFolder {_firstPackStreamId = index};
|
||||
var f = new CFolder { _firstPackStreamId = index };
|
||||
folders.Add(f);
|
||||
GetNextFolderItem(f);
|
||||
index += f._packStreams.Count;
|
||||
@@ -537,7 +541,7 @@ namespace SharpCompress.Common.SevenZip
|
||||
#endif
|
||||
}
|
||||
|
||||
for (;;)
|
||||
for (; ; )
|
||||
{
|
||||
BlockType? type = ReadId();
|
||||
if (type == BlockType.End)
|
||||
@@ -578,7 +582,7 @@ namespace SharpCompress.Common.SevenZip
|
||||
numUnpackStreamsInFolders = null;
|
||||
|
||||
BlockType? type;
|
||||
for (;;)
|
||||
for (; ; )
|
||||
{
|
||||
type = ReadId();
|
||||
if (type == BlockType.NumUnpackStream)
|
||||
@@ -600,7 +604,7 @@ namespace SharpCompress.Common.SevenZip
|
||||
#endif
|
||||
continue;
|
||||
}
|
||||
if (type == BlockType.Crc || type == BlockType.Size)
|
||||
if (type is BlockType.Crc or BlockType.Size)
|
||||
{
|
||||
break;
|
||||
}
|
||||
@@ -611,7 +615,7 @@ namespace SharpCompress.Common.SevenZip
|
||||
SkipData();
|
||||
}
|
||||
|
||||
if (numUnpackStreamsInFolders == null)
|
||||
if (numUnpackStreamsInFolders is null)
|
||||
{
|
||||
numUnpackStreamsInFolders = new List<int>(folders.Count);
|
||||
for (int i = 0; i < folders.Count; i++)
|
||||
@@ -670,7 +674,7 @@ namespace SharpCompress.Common.SevenZip
|
||||
|
||||
digests = null;
|
||||
|
||||
for (;;)
|
||||
for (; ; )
|
||||
{
|
||||
if (type == BlockType.Crc)
|
||||
{
|
||||
@@ -703,7 +707,7 @@ namespace SharpCompress.Common.SevenZip
|
||||
}
|
||||
else if (type == BlockType.End)
|
||||
{
|
||||
if (digests == null)
|
||||
if (digests is null)
|
||||
{
|
||||
digests = new List<uint?>(numDigestsTotal);
|
||||
for (int i = 0; i < numDigestsTotal; i++)
|
||||
@@ -753,7 +757,7 @@ namespace SharpCompress.Common.SevenZip
|
||||
unpackSizes = null;
|
||||
digests = null;
|
||||
|
||||
for (;;)
|
||||
for (; ; )
|
||||
{
|
||||
switch (ReadId())
|
||||
{
|
||||
@@ -781,7 +785,7 @@ namespace SharpCompress.Common.SevenZip
|
||||
}
|
||||
}
|
||||
|
||||
private List<byte[]> ReadAndDecodePackedStreams(long baseOffset, IPasswordProvider pass)
|
||||
private async ValueTask<List<byte[]>> ReadAndDecodePackedStreams(long baseOffset, IPasswordProvider pass, CancellationToken cancellationToken)
|
||||
{
|
||||
#if DEBUG
|
||||
Log.WriteLine("-- ReadAndDecodePackedStreams --");
|
||||
@@ -789,22 +793,14 @@ namespace SharpCompress.Common.SevenZip
|
||||
#endif
|
||||
try
|
||||
{
|
||||
long dataStartPos;
|
||||
List<long> packSizes;
|
||||
List<uint?> packCrCs;
|
||||
List<CFolder> folders;
|
||||
List<int> numUnpackStreamsInFolders;
|
||||
List<long> unpackSizes;
|
||||
List<uint?> digests;
|
||||
|
||||
ReadStreamsInfo(null,
|
||||
out dataStartPos,
|
||||
out packSizes,
|
||||
out packCrCs,
|
||||
out folders,
|
||||
out numUnpackStreamsInFolders,
|
||||
out unpackSizes,
|
||||
out digests);
|
||||
out long dataStartPos,
|
||||
out List<long> packSizes,
|
||||
out List<uint?> packCrCs,
|
||||
out List<CFolder> folders,
|
||||
out List<int> numUnpackStreamsInFolders,
|
||||
out List<long> unpackSizes,
|
||||
out List<uint?> digests);
|
||||
|
||||
dataStartPos += baseOffset;
|
||||
|
||||
@@ -821,8 +817,8 @@ namespace SharpCompress.Common.SevenZip
|
||||
dataStartPos += packSize;
|
||||
}
|
||||
|
||||
var outStream = DecoderStreamHelper.CreateDecoderStream(_stream, oldDataStartPos, myPackSizes,
|
||||
folder, pass);
|
||||
var outStream = await DecoderStreamHelper.CreateDecoderStream(_stream, oldDataStartPos, myPackSizes,
|
||||
folder, pass, cancellationToken);
|
||||
|
||||
int unpackSize = checked((int)folder.GetUnpackSize());
|
||||
byte[] data = new byte[unpackSize];
|
||||
@@ -851,7 +847,7 @@ namespace SharpCompress.Common.SevenZip
|
||||
}
|
||||
}
|
||||
|
||||
private void ReadHeader(ArchiveDatabase db, IPasswordProvider getTextPassword)
|
||||
private async ValueTask ReadHeader(ArchiveDatabase db, IPasswordProvider getTextPassword, CancellationToken cancellationToken)
|
||||
{
|
||||
#if DEBUG
|
||||
Log.WriteLine("-- ReadHeader --");
|
||||
@@ -870,7 +866,7 @@ namespace SharpCompress.Common.SevenZip
|
||||
List<byte[]> dataVector = null;
|
||||
if (type == BlockType.AdditionalStreamsInfo)
|
||||
{
|
||||
dataVector = ReadAndDecodePackedStreams(db._startPositionAfterHeader, getTextPassword);
|
||||
dataVector = await ReadAndDecodePackedStreams(db._startPositionAfterHeader, getTextPassword, cancellationToken);
|
||||
type = ReadId();
|
||||
}
|
||||
|
||||
@@ -932,7 +928,7 @@ namespace SharpCompress.Common.SevenZip
|
||||
BitVector antiFileVector = null;
|
||||
int numEmptyStreams = 0;
|
||||
|
||||
for (;;)
|
||||
for (; ; )
|
||||
{
|
||||
type = ReadId();
|
||||
if (type == BlockType.End)
|
||||
@@ -967,7 +963,7 @@ namespace SharpCompress.Common.SevenZip
|
||||
#if DEBUG
|
||||
Log.Write("WinAttributes:");
|
||||
#endif
|
||||
ReadAttributeVector(dataVector, numFiles, delegate(int i, uint? attr)
|
||||
ReadAttributeVector(dataVector, numFiles, delegate (int i, uint? attr)
|
||||
{
|
||||
// Some third party implementations established an unofficial extension
|
||||
// of the 7z archive format by placing posix file attributes in the high
|
||||
@@ -1055,7 +1051,7 @@ namespace SharpCompress.Common.SevenZip
|
||||
#if DEBUG
|
||||
Log.Write("StartPos:");
|
||||
#endif
|
||||
ReadNumberVector(dataVector, numFiles, delegate(int i, long? startPos)
|
||||
ReadNumberVector(dataVector, numFiles, delegate (int i, long? startPos)
|
||||
{
|
||||
db._files[i].StartPos = startPos;
|
||||
#if DEBUG
|
||||
@@ -1070,7 +1066,7 @@ namespace SharpCompress.Common.SevenZip
|
||||
#if DEBUG
|
||||
Log.Write("CTime:");
|
||||
#endif
|
||||
ReadDateTimeVector(dataVector, numFiles, delegate(int i, DateTime? time)
|
||||
ReadDateTimeVector(dataVector, numFiles, delegate (int i, DateTime? time)
|
||||
{
|
||||
db._files[i].CTime = time;
|
||||
#if DEBUG
|
||||
@@ -1085,7 +1081,7 @@ namespace SharpCompress.Common.SevenZip
|
||||
#if DEBUG
|
||||
Log.Write("ATime:");
|
||||
#endif
|
||||
ReadDateTimeVector(dataVector, numFiles, delegate(int i, DateTime? time)
|
||||
ReadDateTimeVector(dataVector, numFiles, delegate (int i, DateTime? time)
|
||||
{
|
||||
db._files[i].ATime = time;
|
||||
#if DEBUG
|
||||
@@ -1100,7 +1096,7 @@ namespace SharpCompress.Common.SevenZip
|
||||
#if DEBUG
|
||||
Log.Write("MTime:");
|
||||
#endif
|
||||
ReadDateTimeVector(dataVector, numFiles, delegate(int i, DateTime? time)
|
||||
ReadDateTimeVector(dataVector, numFiles, delegate (int i, DateTime? time)
|
||||
{
|
||||
db._files[i].MTime = time;
|
||||
#if DEBUG
|
||||
@@ -1443,8 +1439,7 @@ namespace SharpCompress.Common.SevenZip
|
||||
|
||||
private Stream GetCachedDecoderStream(ArchiveDatabase db, int folderIndex)
|
||||
{
|
||||
Stream s;
|
||||
if (!_cachedStreams.TryGetValue(folderIndex, out s))
|
||||
if (!_cachedStreams.TryGetValue(folderIndex, out Stream s))
|
||||
{
|
||||
CFolder folderInfo = db._folders[folderIndex];
|
||||
int packStreamIndex = db._folders[folderIndex]._firstPackStreamId;
|
||||
@@ -1487,16 +1482,11 @@ namespace SharpCompress.Common.SevenZip
|
||||
|
||||
public void Extract(ArchiveDatabase db, int[] indices)
|
||||
{
|
||||
int numItems;
|
||||
bool allFilesMode = (indices == null);
|
||||
if (allFilesMode)
|
||||
{
|
||||
numItems = db._files.Count;
|
||||
}
|
||||
else
|
||||
{
|
||||
numItems = indices.Length;
|
||||
}
|
||||
bool allFilesMode = (indices is null);
|
||||
|
||||
int numItems = allFilesMode
|
||||
? db._files.Count
|
||||
: indices.Length;
|
||||
|
||||
if (numItems == 0)
|
||||
{
|
||||
@@ -1529,6 +1519,7 @@ namespace SharpCompress.Common.SevenZip
|
||||
}
|
||||
}
|
||||
|
||||
byte[] buffer = null;
|
||||
foreach (CExtractFolderInfo efi in extractFolderInfoVector)
|
||||
{
|
||||
int startIndex;
|
||||
@@ -1565,8 +1556,8 @@ namespace SharpCompress.Common.SevenZip
|
||||
|
||||
Stream s = DecoderStreamHelper.CreateDecoderStream(_stream, folderStartPackPos, packSizes,
|
||||
folderInfo, db.PasswordProvider);
|
||||
byte[] buffer = new byte[4 << 10];
|
||||
for (;;)
|
||||
buffer ??= new byte[4 << 10];
|
||||
for (; ; )
|
||||
{
|
||||
int processed = s.Read(buffer, 0, buffer.Length);
|
||||
if (processed == 0)
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
namespace SharpCompress.Common.SevenZip
|
||||
#nullable disable
|
||||
|
||||
namespace SharpCompress.Common.SevenZip
|
||||
{
|
||||
internal class CCoderInfo
|
||||
{
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
using System;
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Common.SevenZip
|
||||
{
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
namespace SharpCompress.Common.SevenZip
|
||||
{
|
||||
internal struct CMethodId
|
||||
internal readonly struct CMethodId
|
||||
{
|
||||
public const ulong K_COPY_ID = 0;
|
||||
public const ulong K_LZMA_ID = 0x030101;
|
||||
@@ -24,9 +24,9 @@
|
||||
return _id.GetHashCode();
|
||||
}
|
||||
|
||||
public override bool Equals(object obj)
|
||||
public override bool Equals(object? obj)
|
||||
{
|
||||
return obj is CMethodId && (CMethodId)obj == this;
|
||||
return obj is CMethodId other && Equals(other);
|
||||
}
|
||||
|
||||
public bool Equals(CMethodId other)
|
||||
|
||||
@@ -161,7 +161,7 @@ namespace SharpCompress.Common.SevenZip
|
||||
{
|
||||
int ending = Offset;
|
||||
|
||||
for (;;)
|
||||
for (; ; )
|
||||
{
|
||||
if (ending + 2 > _ending)
|
||||
{
|
||||
|
||||
@@ -18,7 +18,7 @@ namespace SharpCompress.Common.SevenZip
|
||||
|
||||
public override string Key => FilePart.Header.Name;
|
||||
|
||||
public override string LinkTarget => null;
|
||||
public override string? LinkTarget => null;
|
||||
|
||||
public override long CompressedSize => 0;
|
||||
|
||||
@@ -38,7 +38,7 @@ namespace SharpCompress.Common.SevenZip
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
public override int? Attrib => (int)FilePart.Header.Attrib;
|
||||
public override int? Attrib => FilePart.Header.Attrib.HasValue ? (int?)FilePart.Header.Attrib.Value : null;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => FilePart.AsEnumerable<FilePart>();
|
||||
}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Common.SevenZip
|
||||
@@ -25,25 +27,25 @@ namespace SharpCompress.Common.SevenZip
|
||||
}
|
||||
|
||||
internal CFileItem Header { get; }
|
||||
internal CFolder Folder { get; }
|
||||
internal CFolder? Folder { get; }
|
||||
internal int Index { get; }
|
||||
|
||||
internal override string FilePartName => Header.Name;
|
||||
|
||||
internal override Stream GetRawStream()
|
||||
internal override Stream? GetRawStream()
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
internal override async ValueTask<Stream> GetCompressedStreamAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
if (!Header.HasStream)
|
||||
{
|
||||
return null;
|
||||
return Stream.Null;
|
||||
}
|
||||
var folderStream = _database.GetFolderStream(_stream, Folder, _database.PasswordProvider);
|
||||
var folderStream = _database.GetFolderStream(_stream, Folder!, _database.PasswordProvider);
|
||||
|
||||
int firstFileIndex = _database._folderStartFileIndex[_database._folders.IndexOf(Folder)];
|
||||
int firstFileIndex = _database._folderStartFileIndex[_database._folders.IndexOf(Folder!)];
|
||||
int skipCount = Index - firstFileIndex;
|
||||
long skipSize = 0;
|
||||
for (int i = 0; i < skipCount; i++)
|
||||
@@ -52,7 +54,7 @@ namespace SharpCompress.Common.SevenZip
|
||||
}
|
||||
if (skipSize > 0)
|
||||
{
|
||||
folderStream.Skip(skipSize);
|
||||
await folderStream.SkipAsync(skipSize, cancellationToken);
|
||||
}
|
||||
return new ReadOnlySubStream(folderStream, Header.Size);
|
||||
}
|
||||
@@ -61,7 +63,7 @@ namespace SharpCompress.Common.SevenZip
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_type == null)
|
||||
if (_type is null)
|
||||
{
|
||||
_type = GetCompression();
|
||||
}
|
||||
@@ -82,22 +84,22 @@ namespace SharpCompress.Common.SevenZip
|
||||
|
||||
internal CompressionType GetCompression()
|
||||
{
|
||||
var coder = Folder._coders.First();
|
||||
var coder = Folder!._coders.First();
|
||||
switch (coder._methodId._id)
|
||||
{
|
||||
{
|
||||
case K_LZMA:
|
||||
case K_LZMA2:
|
||||
{
|
||||
return CompressionType.LZMA;
|
||||
}
|
||||
{
|
||||
return CompressionType.LZMA;
|
||||
}
|
||||
case K_PPMD:
|
||||
{
|
||||
return CompressionType.PPMd;
|
||||
}
|
||||
{
|
||||
return CompressionType.PPMd;
|
||||
}
|
||||
case K_B_ZIP2:
|
||||
{
|
||||
return CompressionType.BZip2;
|
||||
}
|
||||
{
|
||||
return CompressionType.BZip2;
|
||||
}
|
||||
default:
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
using System.IO;
|
||||
using SharpCompress.Archives;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Common.SevenZip
|
||||
|
||||
@@ -1,11 +1,16 @@
|
||||
using System;
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Buffers;
|
||||
using System.Buffers.Binary;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common.Tar.Headers
|
||||
{
|
||||
internal class TarHeader
|
||||
internal sealed class TarHeader
|
||||
{
|
||||
internal static readonly DateTime EPOCH = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
|
||||
|
||||
@@ -30,48 +35,48 @@ namespace SharpCompress.Common.Tar.Headers
|
||||
|
||||
internal const int BLOCK_SIZE = 512;
|
||||
|
||||
internal void Write(Stream output)
|
||||
internal async Task WriteAsync(Stream output)
|
||||
{
|
||||
byte[] buffer = new byte[BLOCK_SIZE];
|
||||
using var buffer = MemoryPool<byte>.Shared.Rent(BLOCK_SIZE);
|
||||
|
||||
WriteOctalBytes(511, buffer, 100, 8); // file mode
|
||||
WriteOctalBytes(0, buffer, 108, 8); // owner ID
|
||||
WriteOctalBytes(0, buffer, 116, 8); // group ID
|
||||
WriteOctalBytes(511, buffer.Memory.Span, 100, 8); // file mode
|
||||
WriteOctalBytes(0, buffer.Memory.Span, 108, 8); // owner ID
|
||||
WriteOctalBytes(0, buffer.Memory.Span, 116, 8); // group ID
|
||||
|
||||
//ArchiveEncoding.UTF8.GetBytes("magic").CopyTo(buffer, 257);
|
||||
var nameByteCount = ArchiveEncoding.GetEncoding().GetByteCount(Name);
|
||||
if (nameByteCount > 100)
|
||||
{
|
||||
// Set mock filename and filetype to indicate the next block is the actual name of the file
|
||||
WriteStringBytes("././@LongLink", buffer, 0, 100);
|
||||
buffer[156] = (byte)EntryType.LongName;
|
||||
WriteOctalBytes(nameByteCount + 1, buffer, 124, 12);
|
||||
WriteStringBytes("././@LongLink", buffer.Memory.Span, 0, 100);
|
||||
buffer.Memory.Span[156] = (byte)EntryType.LongName;
|
||||
WriteOctalBytes(nameByteCount + 1, buffer.Memory.Span, 124, 12);
|
||||
}
|
||||
else
|
||||
{
|
||||
WriteStringBytes(ArchiveEncoding.Encode(Name), buffer, 100);
|
||||
WriteOctalBytes(Size, buffer, 124, 12);
|
||||
WriteStringBytes(ArchiveEncoding.Encode(Name), buffer.Memory, 100);
|
||||
WriteOctalBytes(Size, buffer.Memory.Span, 124, 12);
|
||||
var time = (long)(LastModifiedTime.ToUniversalTime() - EPOCH).TotalSeconds;
|
||||
WriteOctalBytes(time, buffer, 136, 12);
|
||||
buffer[156] = (byte)EntryType;
|
||||
WriteOctalBytes(time, buffer.Memory.Span, 136, 12);
|
||||
buffer.Memory.Span[156] = (byte)EntryType;
|
||||
|
||||
if (Size >= 0x1FFFFFFFF)
|
||||
{
|
||||
Span<byte> bytes12 = stackalloc byte[12];
|
||||
BinaryPrimitives.WriteInt64BigEndian(bytes12.Slice(4), Size);
|
||||
bytes12[0] |= 0x80;
|
||||
bytes12.CopyTo(buffer.AsSpan(124));
|
||||
using var bytes12 = MemoryPool<byte>.Shared.Rent(12);
|
||||
BinaryPrimitives.WriteInt64BigEndian(bytes12.Memory.Span.Slice(4), Size);
|
||||
bytes12.Memory.Span[0] |= 0x80;
|
||||
bytes12.Memory.CopyTo(buffer.Memory.Slice(124));
|
||||
}
|
||||
}
|
||||
|
||||
int crc = RecalculateChecksum(buffer);
|
||||
WriteOctalBytes(crc, buffer, 148, 8);
|
||||
int crc = RecalculateChecksum(buffer.Memory);
|
||||
WriteOctalBytes(crc, buffer.Memory.Span, 148, 8);
|
||||
|
||||
output.Write(buffer, 0, buffer.Length);
|
||||
await output.WriteAsync(buffer.Memory.Slice(0, BLOCK_SIZE));
|
||||
|
||||
if (nameByteCount > 100)
|
||||
{
|
||||
WriteLongFilenameHeader(output);
|
||||
await WriteLongFilenameHeaderAsync(output);
|
||||
// update to short name lower than 100 - [max bytes of one character].
|
||||
// subtracting bytes is needed because preventing infinite loop(example code is here).
|
||||
//
|
||||
@@ -80,14 +85,14 @@ namespace SharpCompress.Common.Tar.Headers
|
||||
//
|
||||
// and then infinite recursion is occured in WriteLongFilenameHeader because truncated.Length is 102.
|
||||
Name = ArchiveEncoding.Decode(ArchiveEncoding.Encode(Name), 0, 100 - ArchiveEncoding.GetEncoding().GetMaxByteCount(1));
|
||||
Write(output);
|
||||
await WriteAsync(output);
|
||||
}
|
||||
}
|
||||
|
||||
private void WriteLongFilenameHeader(Stream output)
|
||||
private async Task WriteLongFilenameHeaderAsync(Stream output)
|
||||
{
|
||||
byte[] nameBytes = ArchiveEncoding.Encode(Name);
|
||||
output.Write(nameBytes, 0, nameBytes.Length);
|
||||
await output.WriteAsync(nameBytes.AsMemory());
|
||||
|
||||
// pad to multiple of BlockSize bytes, and make sure a terminating null is added
|
||||
int numPaddingBytes = BLOCK_SIZE - (nameBytes.Length % BLOCK_SIZE);
|
||||
@@ -95,48 +100,56 @@ namespace SharpCompress.Common.Tar.Headers
|
||||
{
|
||||
numPaddingBytes = BLOCK_SIZE;
|
||||
}
|
||||
output.Write(new byte[numPaddingBytes], 0, numPaddingBytes);
|
||||
|
||||
using var padding = MemoryPool<byte>.Shared.Rent(numPaddingBytes);
|
||||
padding.Memory.Span.Clear();
|
||||
await output.WriteAsync(padding.Memory.Slice(0, numPaddingBytes));
|
||||
}
|
||||
|
||||
internal bool Read(BinaryReader reader)
|
||||
internal async ValueTask<bool> Read(Stream stream, CancellationToken cancellationToken)
|
||||
{
|
||||
var buffer = ReadBlock(reader);
|
||||
if (buffer.Length == 0)
|
||||
var block = MemoryPool<byte>.Shared.Rent(BLOCK_SIZE);
|
||||
bool readFullyAsync = await stream.ReadAsync(block.Memory.Slice(0, BLOCK_SIZE), cancellationToken) == BLOCK_SIZE;
|
||||
if (readFullyAsync is false)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// for symlinks, additionally read the linkname
|
||||
if (ReadEntryType(buffer) == EntryType.SymLink)
|
||||
if (ReadEntryType(block.Memory.Span) == EntryType.SymLink)
|
||||
{
|
||||
LinkName = ArchiveEncoding.Decode(buffer, 157, 100).TrimNulls();
|
||||
LinkName = ArchiveEncoding.Decode(block.Memory.Span.Slice(157, 100)).TrimNulls();
|
||||
}
|
||||
|
||||
if (ReadEntryType(buffer) == EntryType.LongName)
|
||||
if (ReadEntryType(block.Memory.Span) == EntryType.LongName)
|
||||
{
|
||||
Name = ReadLongName(reader, buffer);
|
||||
buffer = ReadBlock(reader);
|
||||
Name = await ReadLongName(stream, block.Memory.Slice(0,BLOCK_SIZE), cancellationToken);
|
||||
readFullyAsync = await stream.ReadAsync(block.Memory.Slice(0, BLOCK_SIZE), cancellationToken) == BLOCK_SIZE;
|
||||
if (readFullyAsync is false)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Name = ArchiveEncoding.Decode(buffer, 0, 100).TrimNulls();
|
||||
Name = ArchiveEncoding.Decode(block.Memory.Span.Slice( 0, 100)).TrimNulls();
|
||||
}
|
||||
|
||||
EntryType = ReadEntryType(buffer);
|
||||
Size = ReadSize(buffer);
|
||||
EntryType = ReadEntryType(block.Memory.Span);
|
||||
Size = ReadSize(block.Memory.Slice(0, BLOCK_SIZE));
|
||||
|
||||
//Mode = ReadASCIIInt32Base8(buffer, 100, 7);
|
||||
//UserId = ReadASCIIInt32Base8(buffer, 108, 7);
|
||||
//GroupId = ReadASCIIInt32Base8(buffer, 116, 7);
|
||||
long unixTimeStamp = ReadAsciiInt64Base8(buffer, 136, 11);
|
||||
long unixTimeStamp = ReadAsciiInt64Base8(block.Memory.Span.Slice(136, 11));
|
||||
LastModifiedTime = EPOCH.AddSeconds(unixTimeStamp).ToLocalTime();
|
||||
|
||||
Magic = ArchiveEncoding.Decode(buffer, 257, 6).TrimNulls();
|
||||
Magic = ArchiveEncoding.Decode(block.Memory.Span.Slice( 257, 6)).TrimNulls();
|
||||
|
||||
if (!string.IsNullOrEmpty(Magic)
|
||||
&& "ustar".Equals(Magic))
|
||||
{
|
||||
string namePrefix = ArchiveEncoding.Decode(buffer, 345, 157);
|
||||
string namePrefix = ArchiveEncoding.Decode(block.Memory.Span.Slice( 345, 157));
|
||||
namePrefix = namePrefix.TrimNulls();
|
||||
if (!string.IsNullOrEmpty(namePrefix))
|
||||
{
|
||||
@@ -151,55 +164,46 @@ namespace SharpCompress.Common.Tar.Headers
|
||||
return true;
|
||||
}
|
||||
|
||||
private string ReadLongName(BinaryReader reader, byte[] buffer)
|
||||
private async ValueTask<string> ReadLongName(Stream reader, ReadOnlyMemory<byte> buffer, CancellationToken cancellationToken)
|
||||
{
|
||||
var size = ReadSize(buffer);
|
||||
var nameLength = (int)size;
|
||||
var nameBytes = reader.ReadBytes(nameLength);
|
||||
using var rented = MemoryPool<byte>.Shared.Rent(nameLength);
|
||||
var nameBytes = rented.Memory.Slice(0, nameLength);
|
||||
await reader.ReadAsync(nameBytes, cancellationToken);
|
||||
var remainingBytesToRead = BLOCK_SIZE - (nameLength % BLOCK_SIZE);
|
||||
|
||||
// Read the rest of the block and discard the data
|
||||
if (remainingBytesToRead < BLOCK_SIZE)
|
||||
{
|
||||
reader.ReadBytes(remainingBytesToRead);
|
||||
using var remaining = MemoryPool<byte>.Shared.Rent(remainingBytesToRead);
|
||||
await reader.ReadAsync(remaining.Memory.Slice(0, remainingBytesToRead), cancellationToken);
|
||||
}
|
||||
return ArchiveEncoding.Decode(nameBytes, 0, nameBytes.Length).TrimNulls();
|
||||
return ArchiveEncoding.Decode(nameBytes.Span).TrimNulls();
|
||||
}
|
||||
|
||||
private static EntryType ReadEntryType(byte[] buffer)
|
||||
private static EntryType ReadEntryType(Span<byte> buffer)
|
||||
{
|
||||
return (EntryType)buffer[156];
|
||||
}
|
||||
|
||||
private long ReadSize(byte[] buffer)
|
||||
private long ReadSize(ReadOnlyMemory<byte> buffer)
|
||||
{
|
||||
if ((buffer[124] & 0x80) == 0x80) // if size in binary
|
||||
if ((buffer.Span[124] & 0x80) == 0x80) // if size in binary
|
||||
{
|
||||
return BinaryPrimitives.ReadInt64BigEndian(buffer.AsSpan(0x80));
|
||||
return BinaryPrimitives.ReadInt64BigEndian(buffer.Span.Slice(0x80));
|
||||
}
|
||||
|
||||
return ReadAsciiInt64Base8(buffer, 124, 11);
|
||||
return ReadAsciiInt64Base8(buffer.Span.Slice(124, 11));
|
||||
}
|
||||
|
||||
private static byte[] ReadBlock(BinaryReader reader)
|
||||
private static void WriteStringBytes(ReadOnlySpan<byte> name, Memory<byte> buffer, int length)
|
||||
{
|
||||
byte[] buffer = reader.ReadBytes(BLOCK_SIZE);
|
||||
|
||||
if (buffer.Length != 0 && buffer.Length < BLOCK_SIZE)
|
||||
{
|
||||
throw new InvalidOperationException("Buffer is invalid size");
|
||||
}
|
||||
return buffer;
|
||||
}
|
||||
|
||||
private static void WriteStringBytes(ReadOnlySpan<byte> name, Span<byte> buffer, int length)
|
||||
{
|
||||
name.CopyTo(buffer);
|
||||
name.CopyTo(buffer.Span.Slice(0));
|
||||
int i = Math.Min(length, name.Length);
|
||||
buffer.Slice(i, length - i).Fill(0);
|
||||
buffer.Slice(i, length - i).Span.Clear();
|
||||
}
|
||||
|
||||
private static void WriteStringBytes(string name, byte[] buffer, int offset, int length)
|
||||
private static void WriteStringBytes(string name, Span<byte> buffer, int offset, int length)
|
||||
{
|
||||
int i;
|
||||
|
||||
@@ -214,7 +218,7 @@ namespace SharpCompress.Common.Tar.Headers
|
||||
}
|
||||
}
|
||||
|
||||
private static void WriteOctalBytes(long value, byte[] buffer, int offset, int length)
|
||||
private static void WriteOctalBytes(long value, Span<byte> buffer, int offset, int length)
|
||||
{
|
||||
string val = Convert.ToString(value, 8);
|
||||
int shift = length - val.Length - 1;
|
||||
@@ -228,19 +232,9 @@ namespace SharpCompress.Common.Tar.Headers
|
||||
}
|
||||
}
|
||||
|
||||
private static int ReadAsciiInt32Base8(byte[] buffer, int offset, int count)
|
||||
private static long ReadAsciiInt64Base8(ReadOnlySpan<byte> buffer)
|
||||
{
|
||||
string s = Encoding.UTF8.GetString(buffer, offset, count).TrimNulls();
|
||||
if (string.IsNullOrEmpty(s))
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
return Convert.ToInt32(s, 8);
|
||||
}
|
||||
|
||||
private static long ReadAsciiInt64Base8(byte[] buffer, int offset, int count)
|
||||
{
|
||||
string s = Encoding.UTF8.GetString(buffer, offset, count).TrimNulls();
|
||||
string s = Encoding.UTF8.GetString(buffer).TrimNulls();
|
||||
if (string.IsNullOrEmpty(s))
|
||||
{
|
||||
return 0;
|
||||
@@ -258,38 +252,26 @@ namespace SharpCompress.Common.Tar.Headers
|
||||
return Convert.ToInt64(s);
|
||||
}
|
||||
|
||||
internal static int RecalculateChecksum(byte[] buf)
|
||||
|
||||
private static readonly byte[] eightSpaces = {
|
||||
(byte)' ', (byte)' ', (byte)' ', (byte)' ',
|
||||
(byte)' ', (byte)' ', (byte)' ', (byte)' '
|
||||
};
|
||||
|
||||
private static int RecalculateChecksum(Memory<byte> buf)
|
||||
{
|
||||
// Set default value for checksum. That is 8 spaces.
|
||||
Encoding.UTF8.GetBytes(" ").CopyTo(buf, 148);
|
||||
eightSpaces.CopyTo(buf.Slice(148));
|
||||
|
||||
// Calculate checksum
|
||||
int headerChecksum = 0;
|
||||
foreach (byte b in buf)
|
||||
foreach (byte b in buf.Span)
|
||||
{
|
||||
headerChecksum += b;
|
||||
}
|
||||
return headerChecksum;
|
||||
}
|
||||
|
||||
internal static int RecalculateAltChecksum(byte[] buf)
|
||||
{
|
||||
Encoding.UTF8.GetBytes(" ").CopyTo(buf, 148);
|
||||
int headerChecksum = 0;
|
||||
foreach (byte b in buf)
|
||||
{
|
||||
if ((b & 0x80) == 0x80)
|
||||
{
|
||||
headerChecksum -= b ^ 0x80;
|
||||
}
|
||||
else
|
||||
{
|
||||
headerChecksum += b;
|
||||
}
|
||||
}
|
||||
return headerChecksum;
|
||||
}
|
||||
|
||||
public long? DataStartPosition { get; set; }
|
||||
|
||||
public string Magic { get; set; }
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
using System;
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Threading;
|
||||
using SharpCompress.Common.Tar.Headers;
|
||||
using SharpCompress.IO;
|
||||
using System.Text;
|
||||
|
||||
namespace SharpCompress.Common.Tar
|
||||
{
|
||||
@@ -45,10 +48,11 @@ namespace SharpCompress.Common.Tar
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => _filePart.AsEnumerable<FilePart>();
|
||||
|
||||
internal static IEnumerable<TarEntry> GetEntries(StreamingMode mode, Stream stream,
|
||||
CompressionType compressionType, ArchiveEncoding archiveEncoding)
|
||||
internal static async IAsyncEnumerable<TarEntry> GetEntries(StreamingMode mode, Stream stream,
|
||||
CompressionType compressionType, ArchiveEncoding archiveEncoding,
|
||||
[EnumeratorCancellation]CancellationToken cancellationToken)
|
||||
{
|
||||
foreach (TarHeader h in TarHeaderFactory.ReadHeader(mode, stream, archiveEncoding))
|
||||
await foreach (TarHeader h in TarHeaderFactory.ReadHeader(mode, stream, archiveEncoding, cancellationToken))
|
||||
{
|
||||
if (h != null)
|
||||
{
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Tar.Headers;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Common.Tar
|
||||
{
|
||||
internal class TarFilePart : FilePart
|
||||
internal sealed class TarFilePart : FilePart
|
||||
{
|
||||
private readonly Stream _seekableStream;
|
||||
|
||||
@@ -19,17 +20,17 @@ namespace SharpCompress.Common.Tar
|
||||
|
||||
internal override string FilePartName => Header.Name;
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
internal override ValueTask<Stream> GetCompressedStreamAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
if (_seekableStream != null)
|
||||
{
|
||||
_seekableStream.Position = Header.DataStartPosition.Value;
|
||||
return new ReadOnlySubStream(_seekableStream, Header.Size);
|
||||
_seekableStream.Position = Header.DataStartPosition!.Value;
|
||||
return new(new TarReadOnlySubStream(_seekableStream, Header.Size));
|
||||
}
|
||||
return Header.PackedStream;
|
||||
return new(Header.PackedStream);
|
||||
}
|
||||
|
||||
internal override Stream GetRawStream()
|
||||
internal override Stream? GetRawStream()
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -1,24 +1,25 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Threading;
|
||||
using SharpCompress.Common.Tar.Headers;
|
||||
using SharpCompress.IO;
|
||||
using System.Text;
|
||||
|
||||
namespace SharpCompress.Common.Tar
|
||||
{
|
||||
internal static class TarHeaderFactory
|
||||
{
|
||||
internal static IEnumerable<TarHeader> ReadHeader(StreamingMode mode, Stream stream, ArchiveEncoding archiveEncoding)
|
||||
internal static async IAsyncEnumerable<TarHeader?> ReadHeader(StreamingMode mode, Stream stream, ArchiveEncoding archiveEncoding,
|
||||
[EnumeratorCancellation]CancellationToken cancellationToken)
|
||||
{
|
||||
while (true)
|
||||
{
|
||||
TarHeader header = null;
|
||||
TarHeader? header = null;
|
||||
try
|
||||
{
|
||||
BinaryReader reader = new BinaryReader(stream);
|
||||
header = new TarHeader(archiveEncoding);
|
||||
|
||||
if (!header.Read(reader))
|
||||
if (!await header.Read(stream, cancellationToken))
|
||||
{
|
||||
yield break;
|
||||
}
|
||||
@@ -26,10 +27,10 @@ namespace SharpCompress.Common.Tar
|
||||
{
|
||||
case StreamingMode.Seekable:
|
||||
{
|
||||
header.DataStartPosition = reader.BaseStream.Position;
|
||||
header.DataStartPosition = stream.Position;
|
||||
|
||||
//skip to nearest 512
|
||||
reader.BaseStream.Position += PadTo512(header.Size);
|
||||
stream.Position += PadTo512(header.Size);
|
||||
}
|
||||
break;
|
||||
case StreamingMode.Streaming:
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
using SharpCompress.IO;
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common.Tar
|
||||
{
|
||||
@@ -14,29 +16,26 @@ namespace SharpCompress.Common.Tar
|
||||
BytesLeftToRead = bytesToRead;
|
||||
}
|
||||
|
||||
protected override void Dispose(bool disposing)
|
||||
public override async ValueTask DisposeAsync()
|
||||
{
|
||||
if (_isDisposed)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
_isDisposed = true;
|
||||
if (disposing)
|
||||
|
||||
// Ensure we read all remaining blocks for this entry.
|
||||
await Stream.SkipAsync(BytesLeftToRead);
|
||||
_amountRead += BytesLeftToRead;
|
||||
|
||||
// If the last block wasn't a full 512 bytes, skip the remaining padding bytes.
|
||||
var bytesInLastBlock = _amountRead % 512;
|
||||
|
||||
if (bytesInLastBlock != 0)
|
||||
{
|
||||
long skipBytes = _amountRead % 512;
|
||||
if (skipBytes == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
skipBytes = 512 - skipBytes;
|
||||
if (skipBytes == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
var buffer = new byte[skipBytes];
|
||||
Stream.ReadFully(buffer);
|
||||
await Stream.SkipAsync(512 - bytesInLastBlock);
|
||||
}
|
||||
base.Dispose(disposing);
|
||||
}
|
||||
|
||||
private long BytesLeftToRead { get; set; }
|
||||
@@ -47,22 +46,18 @@ namespace SharpCompress.Common.Tar
|
||||
|
||||
public override bool CanWrite => false;
|
||||
|
||||
public override void Flush()
|
||||
{
|
||||
throw new NotSupportedException();
|
||||
}
|
||||
|
||||
public override long Length => throw new NotSupportedException();
|
||||
|
||||
public override long Position { get => throw new NotSupportedException(); set => throw new NotSupportedException(); }
|
||||
|
||||
public override int Read(byte[] buffer, int offset, int count)
|
||||
public override async ValueTask<int> ReadAsync(Memory<byte> buffer, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (BytesLeftToRead < count)
|
||||
var count = buffer.Length;
|
||||
if (BytesLeftToRead < buffer.Length)
|
||||
{
|
||||
count = (int)BytesLeftToRead;
|
||||
}
|
||||
int read = Stream.Read(buffer, offset, count);
|
||||
int read = await Stream.ReadAsync(buffer.Slice(0, count), cancellationToken);
|
||||
if (read > 0)
|
||||
{
|
||||
BytesLeftToRead -= read;
|
||||
@@ -71,20 +66,9 @@ namespace SharpCompress.Common.Tar
|
||||
return read;
|
||||
}
|
||||
|
||||
public override int ReadByte()
|
||||
public override Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
|
||||
{
|
||||
if (BytesLeftToRead <= 0)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
int value = Stream.ReadByte();
|
||||
if (value != -1)
|
||||
{
|
||||
--BytesLeftToRead;
|
||||
++_amountRead;
|
||||
}
|
||||
return value;
|
||||
|
||||
throw new NotSupportedException();
|
||||
}
|
||||
|
||||
public override long Seek(long offset, SeekOrigin origin)
|
||||
@@ -96,10 +80,5 @@ namespace SharpCompress.Common.Tar
|
||||
{
|
||||
throw new NotSupportedException();
|
||||
}
|
||||
|
||||
public override void Write(byte[] buffer, int offset, int count)
|
||||
{
|
||||
throw new NotSupportedException();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.IO;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
@@ -33,19 +33,10 @@ namespace SharpCompress.Common
|
||||
/// RarArchive is part of a multi-part archive.
|
||||
/// </summary>
|
||||
public virtual bool IsMultiVolume => true;
|
||||
|
||||
protected virtual void Dispose(bool disposing)
|
||||
|
||||
public ValueTask DisposeAsync()
|
||||
{
|
||||
if (disposing)
|
||||
{
|
||||
_actualStream.Dispose();
|
||||
}
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
Dispose(true);
|
||||
GC.SuppressFinalize(this);
|
||||
return _actualStream.DisposeAsync();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,6 @@
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common.Zip.Headers
|
||||
{
|
||||
@@ -9,31 +11,31 @@ namespace SharpCompress.Common.Zip.Headers
|
||||
{
|
||||
}
|
||||
|
||||
internal override void Read(BinaryReader reader)
|
||||
internal override async ValueTask Read(Stream stream, CancellationToken cancellationToken)
|
||||
{
|
||||
VolumeNumber = reader.ReadUInt16();
|
||||
FirstVolumeWithDirectory = reader.ReadUInt16();
|
||||
TotalNumberOfEntriesInDisk = reader.ReadUInt16();
|
||||
TotalNumberOfEntries = reader.ReadUInt16();
|
||||
DirectorySize = reader.ReadUInt32();
|
||||
DirectoryStartOffsetRelativeToDisk = reader.ReadUInt32();
|
||||
CommentLength = reader.ReadUInt16();
|
||||
Comment = reader.ReadBytes(CommentLength);
|
||||
VolumeNumber = await stream.ReadUInt16(cancellationToken);
|
||||
FirstVolumeWithDirectory = await stream.ReadUInt16(cancellationToken);
|
||||
TotalNumberOfEntriesInDisk = await stream.ReadUInt16(cancellationToken);
|
||||
TotalNumberOfEntries = await stream.ReadUInt16(cancellationToken);
|
||||
DirectorySize = await stream.ReadUInt32(cancellationToken);
|
||||
DirectoryStartOffsetRelativeToDisk = await stream.ReadUInt32(cancellationToken);
|
||||
CommentLength = await stream.ReadUInt16(cancellationToken);
|
||||
Comment = await stream.ReadBytes(CommentLength ?? 0, cancellationToken);
|
||||
}
|
||||
|
||||
public ushort VolumeNumber { get; private set; }
|
||||
public ushort? VolumeNumber { get; private set; }
|
||||
|
||||
public ushort FirstVolumeWithDirectory { get; private set; }
|
||||
public ushort? FirstVolumeWithDirectory { get; private set; }
|
||||
|
||||
public ushort TotalNumberOfEntriesInDisk { get; private set; }
|
||||
public ushort? TotalNumberOfEntriesInDisk { get; private set; }
|
||||
|
||||
public uint DirectorySize { get; private set; }
|
||||
public uint? DirectorySize { get; private set; }
|
||||
|
||||
public uint DirectoryStartOffsetRelativeToDisk { get; private set; }
|
||||
public uint? DirectoryStartOffsetRelativeToDisk { get; private set; }
|
||||
|
||||
public ushort CommentLength { get; private set; }
|
||||
public ushort? CommentLength { get; private set; }
|
||||
|
||||
public byte[] Comment { get; private set; }
|
||||
public byte[]? Comment { get; private set; }
|
||||
|
||||
public ushort TotalNumberOfEntries { get; private set; }
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common.Zip.Headers
|
||||
{
|
||||
@@ -11,29 +12,29 @@ namespace SharpCompress.Common.Zip.Headers
|
||||
{
|
||||
}
|
||||
|
||||
internal override void Read(BinaryReader reader)
|
||||
internal override async ValueTask Read(Stream stream, CancellationToken cancellationToken)
|
||||
{
|
||||
Version = reader.ReadUInt16();
|
||||
VersionNeededToExtract = reader.ReadUInt16();
|
||||
Flags = (HeaderFlags)reader.ReadUInt16();
|
||||
CompressionMethod = (ZipCompressionMethod)reader.ReadUInt16();
|
||||
LastModifiedTime = reader.ReadUInt16();
|
||||
LastModifiedDate = reader.ReadUInt16();
|
||||
Crc = reader.ReadUInt32();
|
||||
CompressedSize = reader.ReadUInt32();
|
||||
UncompressedSize = reader.ReadUInt32();
|
||||
ushort nameLength = reader.ReadUInt16();
|
||||
ushort extraLength = reader.ReadUInt16();
|
||||
ushort commentLength = reader.ReadUInt16();
|
||||
DiskNumberStart = reader.ReadUInt16();
|
||||
InternalFileAttributes = reader.ReadUInt16();
|
||||
ExternalFileAttributes = reader.ReadUInt32();
|
||||
RelativeOffsetOfEntryHeader = reader.ReadUInt32();
|
||||
Version = await stream.ReadUInt16(cancellationToken);
|
||||
VersionNeededToExtract = await stream.ReadUInt16(cancellationToken);
|
||||
Flags = (HeaderFlags)await stream.ReadUInt16(cancellationToken);
|
||||
CompressionMethod = (ZipCompressionMethod)await stream.ReadUInt16(cancellationToken);
|
||||
LastModifiedTime = await stream.ReadUInt16(cancellationToken);
|
||||
LastModifiedDate = await stream.ReadUInt16(cancellationToken);
|
||||
Crc = await stream.ReadUInt32(cancellationToken);
|
||||
CompressedSize = await stream.ReadUInt32(cancellationToken);
|
||||
UncompressedSize = await stream.ReadUInt32(cancellationToken);
|
||||
ushort nameLength = await stream.ReadUInt16(cancellationToken);
|
||||
ushort extraLength = await stream.ReadUInt16(cancellationToken);
|
||||
ushort commentLength = await stream.ReadUInt16(cancellationToken);
|
||||
DiskNumberStart = await stream.ReadUInt16(cancellationToken);
|
||||
InternalFileAttributes = await stream.ReadUInt16(cancellationToken);
|
||||
ExternalFileAttributes = await stream.ReadUInt32(cancellationToken);
|
||||
RelativeOffsetOfEntryHeader = await stream.ReadUInt32(cancellationToken);
|
||||
|
||||
byte[] name = await stream.ReadBytes(nameLength, cancellationToken);
|
||||
byte[] extra = await stream.ReadBytes(extraLength, cancellationToken);
|
||||
byte[] comment = await stream.ReadBytes(commentLength, cancellationToken);
|
||||
|
||||
byte[] name = reader.ReadBytes(nameLength);
|
||||
byte[] extra = reader.ReadBytes(extraLength);
|
||||
byte[] comment = reader.ReadBytes(commentLength);
|
||||
|
||||
// According to .ZIP File Format Specification
|
||||
//
|
||||
// For example: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
|
||||
@@ -41,7 +42,7 @@ namespace SharpCompress.Common.Zip.Headers
|
||||
// Bit 11: Language encoding flag (EFS). If this bit is set,
|
||||
// the filename and comment fields for this file
|
||||
// MUST be encoded using UTF-8. (see APPENDIX D)
|
||||
|
||||
|
||||
if (Flags.HasFlag(HeaderFlags.Efs))
|
||||
{
|
||||
Name = ArchiveEncoding.DecodeUTF8(name);
|
||||
@@ -93,6 +94,6 @@ namespace SharpCompress.Common.Zip.Headers
|
||||
|
||||
public ushort DiskNumberStart { get; set; }
|
||||
|
||||
public string Comment { get; private set; }
|
||||
public string? Comment { get; private set; }
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common.Zip.Headers
|
||||
{
|
||||
@@ -10,8 +11,9 @@ namespace SharpCompress.Common.Zip.Headers
|
||||
{
|
||||
}
|
||||
|
||||
internal override void Read(BinaryReader reader)
|
||||
internal override ValueTask Read(Stream stream, CancellationToken cancellationToken)
|
||||
{
|
||||
return new();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common.Zip.Headers
|
||||
{
|
||||
@@ -11,21 +12,21 @@ namespace SharpCompress.Common.Zip.Headers
|
||||
{
|
||||
}
|
||||
|
||||
internal override void Read(BinaryReader reader)
|
||||
internal override async ValueTask Read(Stream stream, CancellationToken cancellationToken)
|
||||
{
|
||||
Version = reader.ReadUInt16();
|
||||
Flags = (HeaderFlags)reader.ReadUInt16();
|
||||
CompressionMethod = (ZipCompressionMethod)reader.ReadUInt16();
|
||||
LastModifiedTime = reader.ReadUInt16();
|
||||
LastModifiedDate = reader.ReadUInt16();
|
||||
Crc = reader.ReadUInt32();
|
||||
CompressedSize = reader.ReadUInt32();
|
||||
UncompressedSize = reader.ReadUInt32();
|
||||
ushort nameLength = reader.ReadUInt16();
|
||||
ushort extraLength = reader.ReadUInt16();
|
||||
byte[] name = reader.ReadBytes(nameLength);
|
||||
byte[] extra = reader.ReadBytes(extraLength);
|
||||
|
||||
Version = await stream.ReadUInt16(cancellationToken);
|
||||
Flags = (HeaderFlags)await stream.ReadUInt16(cancellationToken);
|
||||
CompressionMethod = (ZipCompressionMethod)await stream.ReadUInt16(cancellationToken);
|
||||
LastModifiedTime = await stream.ReadUInt16(cancellationToken);
|
||||
LastModifiedDate = await stream.ReadUInt16(cancellationToken);
|
||||
Crc = await stream.ReadUInt32(cancellationToken);
|
||||
CompressedSize = await stream.ReadUInt32(cancellationToken);
|
||||
UncompressedSize = await stream.ReadUInt32(cancellationToken);
|
||||
ushort nameLength = await stream.ReadUInt16(cancellationToken);
|
||||
ushort extraLength = await stream.ReadUInt16(cancellationToken);
|
||||
byte[] name = await stream.ReadBytes(nameLength, cancellationToken);
|
||||
byte[] extra = await stream.ReadBytes(extraLength, cancellationToken);
|
||||
|
||||
// According to .ZIP File Format Specification
|
||||
//
|
||||
// For example: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
|
||||
@@ -33,7 +34,7 @@ namespace SharpCompress.Common.Zip.Headers
|
||||
// Bit 11: Language encoding flag (EFS). If this bit is set,
|
||||
// the filename and comment fields for this file
|
||||
// MUST be encoded using UTF-8. (see APPENDIX D)
|
||||
|
||||
|
||||
if (Flags.HasFlag(HeaderFlags.Efs))
|
||||
{
|
||||
Name = ArchiveEncoding.DecodeUTF8(name);
|
||||
@@ -42,7 +43,7 @@ namespace SharpCompress.Common.Zip.Headers
|
||||
{
|
||||
Name = ArchiveEncoding.Decode(name);
|
||||
}
|
||||
|
||||
|
||||
LoadExtra(extra);
|
||||
|
||||
var unicodePathExtra = Extra.FirstOrDefault(u => u.Type == ExtraDataType.UnicodePathExtraField);
|
||||
|
||||
@@ -18,13 +18,25 @@ namespace SharpCompress.Common.Zip.Headers
|
||||
|
||||
internal class ExtraData
|
||||
{
|
||||
internal ExtraDataType Type { get; set; }
|
||||
internal ushort Length { get; set; }
|
||||
internal byte[] DataBytes { get; set; }
|
||||
public ExtraData(ExtraDataType type, ushort length, byte[] dataBytes)
|
||||
{
|
||||
Type = type;
|
||||
Length = length;
|
||||
DataBytes = dataBytes;
|
||||
}
|
||||
|
||||
internal ExtraDataType Type { get; }
|
||||
internal ushort Length { get; }
|
||||
internal byte[] DataBytes { get; }
|
||||
}
|
||||
|
||||
internal class ExtraUnicodePathExtraField : ExtraData
|
||||
internal sealed class ExtraUnicodePathExtraField : ExtraData
|
||||
{
|
||||
public ExtraUnicodePathExtraField(ExtraDataType type, ushort length, byte[] dataBytes)
|
||||
: base(type, length, dataBytes)
|
||||
{
|
||||
}
|
||||
|
||||
internal byte Version => DataBytes[0];
|
||||
|
||||
internal byte[] NameCrc32
|
||||
@@ -49,64 +61,45 @@ namespace SharpCompress.Common.Zip.Headers
|
||||
}
|
||||
}
|
||||
|
||||
internal class Zip64ExtendedInformationExtraField : ExtraData
|
||||
internal sealed class Zip64ExtendedInformationExtraField : ExtraData
|
||||
{
|
||||
|
||||
public Zip64ExtendedInformationExtraField(ExtraDataType type, ushort length, byte[] dataBytes)
|
||||
: base(type, length, dataBytes)
|
||||
{
|
||||
Type = type;
|
||||
Length = length;
|
||||
DataBytes = dataBytes;
|
||||
Process();
|
||||
}
|
||||
|
||||
//From the spec values are only in the extradata if the standard
|
||||
//value is set to 0xFFFF, but if one of the sizes are present, both are.
|
||||
//Hence if length == 4 volume only
|
||||
// if length == 8 offset only
|
||||
// if length == 12 offset + volume
|
||||
// if length == 16 sizes only
|
||||
// if length == 20 sizes + volume
|
||||
// if length == 24 sizes + offset
|
||||
// if length == 28 everything.
|
||||
//It is unclear how many of these are used in the wild.
|
||||
|
||||
private void Process()
|
||||
{
|
||||
if (DataBytes.Length >= 8)
|
||||
{
|
||||
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
|
||||
}
|
||||
|
||||
if (DataBytes.Length >= 16)
|
||||
{
|
||||
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
|
||||
}
|
||||
|
||||
if (DataBytes.Length >= 24)
|
||||
{
|
||||
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(16));
|
||||
}
|
||||
|
||||
if (DataBytes.Length >= 28)
|
||||
{
|
||||
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes.AsSpan(24));
|
||||
}
|
||||
|
||||
switch (DataBytes.Length)
|
||||
{
|
||||
case 4:
|
||||
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes);
|
||||
return;
|
||||
case 8:
|
||||
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
|
||||
return;
|
||||
case 12:
|
||||
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
|
||||
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes.AsSpan(8));
|
||||
return;
|
||||
case 16:
|
||||
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
|
||||
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
|
||||
return;
|
||||
case 20:
|
||||
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
|
||||
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
|
||||
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes.AsSpan(16));
|
||||
return;
|
||||
case 24:
|
||||
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
|
||||
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
|
||||
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(16));
|
||||
return;
|
||||
case 28:
|
||||
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
|
||||
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
|
||||
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(16));
|
||||
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes.AsSpan(24));
|
||||
return;
|
||||
break;
|
||||
default:
|
||||
throw new ArchiveException("Unexpected size of of Zip64 extended information extra field");
|
||||
throw new ArchiveException($"Unexpected size of of Zip64 extended information extra field: {DataBytes.Length}");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -120,30 +113,12 @@ namespace SharpCompress.Common.Zip.Headers
|
||||
{
|
||||
internal static ExtraData Create(ExtraDataType type, ushort length, byte[] extraData)
|
||||
{
|
||||
switch (type)
|
||||
return type switch
|
||||
{
|
||||
case ExtraDataType.UnicodePathExtraField:
|
||||
return new ExtraUnicodePathExtraField
|
||||
{
|
||||
Type = type,
|
||||
Length = length,
|
||||
DataBytes = extraData
|
||||
};
|
||||
case ExtraDataType.Zip64ExtendedInformationExtraField:
|
||||
return new Zip64ExtendedInformationExtraField
|
||||
(
|
||||
type,
|
||||
length,
|
||||
extraData
|
||||
);
|
||||
default:
|
||||
return new ExtraData
|
||||
{
|
||||
Type = type,
|
||||
Length = length,
|
||||
DataBytes = extraData
|
||||
};
|
||||
}
|
||||
ExtraDataType.UnicodePathExtraField => new ExtraUnicodePathExtraField(type, length, extraData),
|
||||
ExtraDataType.Zip64ExtendedInformationExtraField => new Zip64ExtendedInformationExtraField(type, length, extraData),
|
||||
_ => new ExtraData(type, length, extraData)
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common.Zip.Headers
|
||||
{
|
||||
@@ -10,7 +12,7 @@ namespace SharpCompress.Common.Zip.Headers
|
||||
{
|
||||
}
|
||||
|
||||
internal override void Read(BinaryReader reader)
|
||||
internal override ValueTask Read(Stream stream, CancellationToken cancellationToken)
|
||||
{
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common.Zip.Headers
|
||||
{
|
||||
@@ -10,18 +11,18 @@ namespace SharpCompress.Common.Zip.Headers
|
||||
{
|
||||
}
|
||||
|
||||
internal override void Read(BinaryReader reader)
|
||||
internal override async ValueTask Read(Stream stream, CancellationToken cancellationToken)
|
||||
{
|
||||
SizeOfDirectoryEndRecord = (long)reader.ReadUInt64();
|
||||
VersionMadeBy = reader.ReadUInt16();
|
||||
VersionNeededToExtract = reader.ReadUInt16();
|
||||
VolumeNumber = reader.ReadUInt32();
|
||||
FirstVolumeWithDirectory = reader.ReadUInt32();
|
||||
TotalNumberOfEntriesInDisk = (long)reader.ReadUInt64();
|
||||
TotalNumberOfEntries = (long)reader.ReadUInt64();
|
||||
DirectorySize = (long)reader.ReadUInt64();
|
||||
DirectoryStartOffsetRelativeToDisk = (long)reader.ReadUInt64();
|
||||
DataSector = reader.ReadBytes((int)(SizeOfDirectoryEndRecord - SIZE_OF_FIXED_HEADER_DATA_EXCEPT_SIGNATURE_AND_SIZE_FIELDS));
|
||||
SizeOfDirectoryEndRecord = (long)await stream.ReadUInt64(cancellationToken);
|
||||
VersionMadeBy = await stream.ReadUInt16(cancellationToken);
|
||||
VersionNeededToExtract = await stream.ReadUInt16(cancellationToken);
|
||||
VolumeNumber = await stream.ReadUInt32(cancellationToken);
|
||||
FirstVolumeWithDirectory = await stream.ReadUInt32(cancellationToken);
|
||||
TotalNumberOfEntriesInDisk = (long)await stream.ReadUInt64(cancellationToken);
|
||||
TotalNumberOfEntries = (long)await stream.ReadUInt64(cancellationToken);
|
||||
DirectorySize = (long)await stream.ReadUInt64(cancellationToken);
|
||||
DirectoryStartOffsetRelativeToDisk = (long)await stream.ReadUInt64(cancellationToken);
|
||||
DataSector = await stream.ReadBytes((int)(SizeOfDirectoryEndRecord - SIZE_OF_FIXED_HEADER_DATA_EXCEPT_SIGNATURE_AND_SIZE_FIELDS), cancellationToken);
|
||||
}
|
||||
|
||||
private const int SIZE_OF_FIXED_HEADER_DATA_EXCEPT_SIGNATURE_AND_SIZE_FIELDS = 44;
|
||||
@@ -44,6 +45,6 @@ namespace SharpCompress.Common.Zip.Headers
|
||||
|
||||
public long DirectoryStartOffsetRelativeToDisk { get; private set; }
|
||||
|
||||
public byte[] DataSector { get; private set; }
|
||||
public byte[]? DataSector { get; private set; }
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,6 @@
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common.Zip.Headers
|
||||
{
|
||||
@@ -9,11 +11,11 @@ namespace SharpCompress.Common.Zip.Headers
|
||||
{
|
||||
}
|
||||
|
||||
internal override void Read(BinaryReader reader)
|
||||
internal override async ValueTask Read(Stream stream, CancellationToken cancellationToken)
|
||||
{
|
||||
FirstVolumeWithDirectory = reader.ReadUInt32();
|
||||
RelativeOffsetOfTheEndOfDirectoryRecord = (long)reader.ReadUInt64();
|
||||
TotalNumberOfVolumes = reader.ReadUInt32();
|
||||
FirstVolumeWithDirectory = await stream.ReadUInt32(cancellationToken);
|
||||
RelativeOffsetOfTheEndOfDirectoryRecord = (long)await stream.ReadUInt64(cancellationToken);
|
||||
TotalNumberOfVolumes = await stream.ReadUInt32(cancellationToken);
|
||||
}
|
||||
|
||||
public uint FirstVolumeWithDirectory { get; private set; }
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
using System;
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
@@ -18,7 +20,7 @@ namespace SharpCompress.Common.Zip.Headers
|
||||
{
|
||||
get
|
||||
{
|
||||
if (Name.EndsWith("/"))
|
||||
if (Name.EndsWith('/'))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
@@ -26,7 +28,7 @@ namespace SharpCompress.Common.Zip.Headers
|
||||
//.NET Framework 4.5 : System.IO.Compression::CreateFromDirectory() probably writes backslashes to headers
|
||||
return CompressedSize == 0
|
||||
&& UncompressedSize == 0
|
||||
&& Name.EndsWith("\\");
|
||||
&& Name.EndsWith('\\');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -52,7 +54,7 @@ namespace SharpCompress.Common.Zip.Headers
|
||||
|
||||
internal PkwareTraditionalEncryptionData ComposeEncryptionData(Stream archiveStream)
|
||||
{
|
||||
if (archiveStream == null)
|
||||
if (archiveStream is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(archiveStream));
|
||||
}
|
||||
@@ -60,7 +62,7 @@ namespace SharpCompress.Common.Zip.Headers
|
||||
var buffer = new byte[12];
|
||||
archiveStream.ReadFully(buffer);
|
||||
|
||||
PkwareTraditionalEncryptionData encryptionData = PkwareTraditionalEncryptionData.ForRead(Password, this, buffer);
|
||||
PkwareTraditionalEncryptionData encryptionData = PkwareTraditionalEncryptionData.ForRead(Password!, this, buffer);
|
||||
|
||||
return encryptionData;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common.Zip.Headers
|
||||
{
|
||||
@@ -12,7 +14,7 @@ namespace SharpCompress.Common.Zip.Headers
|
||||
|
||||
internal ZipHeaderType ZipHeaderType { get; }
|
||||
|
||||
internal abstract void Read(BinaryReader reader);
|
||||
internal abstract ValueTask Read(Stream stream, CancellationToken cancellationToken);
|
||||
|
||||
internal bool HasData { get; set; }
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user