Compare commits

..

4 Commits

Author SHA1 Message Date
Adam Hathcock
d6a6085d75 Update docker build 2019-10-22 17:28:09 +01:00
Adam Hathcock
b5a897819d Fix pack 2019-10-22 17:26:08 +01:00
Adam Hathcock
9e842ee8ec Fix cake build 2019-10-22 17:23:58 +01:00
Adam Hathcock
a04a0a5912 Use RecyclableMemoryStreamManager 2019-10-22 17:13:44 +01:00
304 changed files with 7792 additions and 11282 deletions

16
.circleci/config.yml Normal file
View File

@@ -0,0 +1,16 @@
version: 2
jobs:
build:
docker:
- image: mcr.microsoft.com/dotnet/core/sdk:2.2-alpine
steps:
- checkout
- run:
name: Install Cake
command: |
dotnet tool install -g Cake.Tool
echo 'export PATH=$PATH:/root/.dotnet/tools' >> $BASH_ENV
source $BASH_ENV
- run:
name: Build
command: dotnet cake build.cake

View File

@@ -1,12 +0,0 @@
{
"version": 1,
"isRoot": true,
"tools": {
"dotnet-format": {
"version": "4.1.131201",
"commands": [
"dotnet-format"
]
}
}
}

View File

@@ -1,24 +0,0 @@
name: SharpCompress
on: [push, pull_request]
jobs:
build:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [windows-latest, ubuntu-latest]
steps:
- uses: actions/checkout@v1
- uses: actions/setup-dotnet@v1
with:
dotnet-version: 5.0.101
- run: dotnet run -p build/build.csproj
- uses: actions/upload-artifact@v2
with:
name: ${{ matrix.os }}-sharpcompress.nupkg
path: artifacts/*
- uses: actions/upload-artifact@v2
with:
name: ${{ matrix.os }}-sharpcompress.snupkg
path: artifacts/*

1
.gitignore vendored
View File

@@ -17,4 +17,3 @@ tools
.idea/
.DS_Store
*.snupkg

View File

@@ -19,6 +19,7 @@
| Tar.XZ | LZMA2 | Decompress | TarArchive | TarReader | TarWriter (3) |
| GZip (single file) | DEFLATE | Both | GZipArchive | GZipReader | GZipWriter |
| 7Zip (4) | LZMA, LZMA2, BZip2, PPMd, BCJ, BCJ2, Deflate | Decompress | SevenZipArchive | N/A | N/A |
| LZip (single file) (5) | LZip (LZMA) | Both | LZipArchive | LZipReader | LZipWriter |
1. SOLID Rars are only supported in the RarReader API.
2. Zip format supports pkware and WinzipAES encryption. However, encrypted LZMA is not supported. Zip64 reading/writing is supported but only with seekable streams as the Zip spec doesn't support Zip64 data in post data descriptors. Deflate64 is only supported for reading.

View File

@@ -1,11 +1,14 @@
# SharpCompress
SharpCompress is a compression library in pure C# for .NET Standard 2.0, 2.1, .NET Core 3.1 and .NET 5.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
SharpCompress is a compression library in pure C# for .NET Standard 1.4 and 2.0 and .NET 4.6 that can unrar, un7zip, unzip, untar unbzip2 and ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip are implemented.
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).
GitHub Actions Build -
[![GitHubActions](https://github.com/adamhathcock/sharpcompress/workflows/SharpCompress/badge.svg)](https://circleci.com/gh/adamhathcock/sharpcompress)
AppVeyor Build -
[![Build status](https://ci.appveyor.com/api/projects/status/voxg971oemmvxh1e/branch/master?svg=true)](https://ci.appveyor.com/project/adamhathcock/sharpcompress/branch/master)
Circle CI Build -
[![CircleCI](https://circleci.com/gh/adamhathcock/sharpcompress.svg?style=svg)](https://circleci.com/gh/adamhathcock/sharpcompress)
## Need Help?

View File

@@ -13,8 +13,6 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SharpCompress", "src\SharpC
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SharpCompress.Test", "tests\SharpCompress.Test\SharpCompress.Test.csproj", "{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "build", "build\build.csproj", "{D4D613CB-5E94-47FB-85BE-B8423D20C545}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -29,10 +27,6 @@ Global
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}.Debug|Any CPU.Build.0 = Debug|Any CPU
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}.Release|Any CPU.ActiveCfg = Release|Any CPU
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}.Release|Any CPU.Build.0 = Release|Any CPU
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Debug|Any CPU.Build.0 = Debug|Any CPU
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Release|Any CPU.ActiveCfg = Release|Any CPU
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE

View File

@@ -126,7 +126,4 @@
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EAddAccessorOwnerDeclarationBracesMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002ECSharpPlaceAttributeOnSameLineMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateBlankLinesAroundFieldToBlankLinesAroundProperty/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateThisQualifierSettings/@EntryIndexedValue">True</s:Boolean>
<s:String x:Key="/Default/Environment/UnitTesting/UnitTestSessionStore/Sessions/=6af8f80e_002D9fdd_002D4223_002D8e02_002D473db916f9b2/@EntryIndexedValue">&lt;SessionState ContinuousTestingIsOn="False" ContinuousTestingMode="0" FrameworkVersion="{x:Null}" IsLocked="False" Name="All tests from Solution" PlatformMonoPreference="{x:Null}" PlatformType="{x:Null}" xmlns="urn:schemas-jetbrains-com:jetbrains-ut-session" xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"&gt;
&lt;Solution /&gt;
&lt;/SessionState&gt;</s:String></wpf:ResourceDictionary>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateThisQualifierSettings/@EntryIndexedValue">True</s:Boolean></wpf:ResourceDictionary>

20
appveyor.yml Normal file
View File

@@ -0,0 +1,20 @@
version: '{build}'
image: Visual Studio 2017
pull_requests:
do_not_increment_build_number: true
branches:
only:
- master
nuget:
disable_publish_on_pr: true
build_script:
- ps: .\build.ps1
test: off
artifacts:
- path: src\SharpCompress\bin\Release\*.nupkg

81
build.cake Normal file
View File

@@ -0,0 +1,81 @@
var target = Argument("target", "Default");
var tag = Argument("tag", "cake");
Task("Restore")
.Does(() =>
{
DotNetCoreRestore(".");
});
Task("Build")
.IsDependentOn("Restore")
.Does(() =>
{
var settings = new DotNetCoreBuildSettings
{
Framework = "netstandard1.4",
Configuration = "Release",
NoRestore = true
};
DotNetCoreBuild("./src/SharpCompress/SharpCompress.csproj", settings);
if (IsRunningOnWindows())
{
settings.Framework = "net46";
DotNetCoreBuild("./src/SharpCompress/SharpCompress.csproj", settings);
}
settings.Framework = "netstandard2.0";
DotNetCoreBuild("./src/SharpCompress/SharpCompress.csproj", settings);
});
Task("Test")
.IsDependentOn("Build")
.Does(() =>
{
var files = GetFiles("tests/**/*.csproj");
foreach(var file in files)
{
var settings = new DotNetCoreTestSettings
{
Configuration = "Release",
Framework = "netcoreapp2.2"
};
DotNetCoreTest(file.ToString(), settings);
}
});
Task("Pack")
.IsDependentOn("Build")
.Does(() =>
{
if (IsRunningOnWindows())
{
var settings = new DotNetCorePackSettings
{
Configuration = "Release",
NoBuild = true
};
DotNetCorePack("src/SharpCompress/SharpCompress.csproj", settings);
}
else
{
Information("Skipping Pack as this is not Windows");
}
});
Task("Default")
.IsDependentOn("Restore")
.IsDependentOn("Build")
.IsDependentOn("Test")
.IsDependentOn("Pack");
Task("RunTests")
.IsDependentOn("Restore")
.IsDependentOn("Build")
.IsDependentOn("Test");
RunTarget(target);

228
build.ps1 Normal file
View File

@@ -0,0 +1,228 @@
##########################################################################
# This is the Cake bootstrapper script for PowerShell.
# This file was downloaded from https://github.com/cake-build/resources
# Feel free to change this file to fit your needs.
##########################################################################
<#
.SYNOPSIS
This is a Powershell script to bootstrap a Cake build.
.DESCRIPTION
This Powershell script will download NuGet if missing, restore NuGet tools (including Cake)
and execute your Cake build script with the parameters you provide.
.PARAMETER Script
The build script to execute.
.PARAMETER Target
The build script target to run.
.PARAMETER Configuration
The build configuration to use.
.PARAMETER Verbosity
Specifies the amount of information to be displayed.
.PARAMETER Experimental
Tells Cake to use the latest Roslyn release.
.PARAMETER WhatIf
Performs a dry run of the build script.
No tasks will be executed.
.PARAMETER Mono
Tells Cake to use the Mono scripting engine.
.PARAMETER SkipToolPackageRestore
Skips restoring of packages.
.PARAMETER ScriptArgs
Remaining arguments are added here.
.LINK
http://cakebuild.net
#>
[CmdletBinding()]
Param(
[string]$Script = "build.cake",
[string]$Target = "Default",
[ValidateSet("Release", "Debug")]
[string]$Configuration = "Release",
[ValidateSet("Quiet", "Minimal", "Normal", "Verbose", "Diagnostic")]
[string]$Verbosity = "Verbose",
[switch]$Experimental,
[Alias("DryRun","Noop")]
[switch]$WhatIf,
[switch]$Mono,
[switch]$SkipToolPackageRestore,
[Parameter(Position=0,Mandatory=$false,ValueFromRemainingArguments=$true)]
[string[]]$ScriptArgs
)
[Reflection.Assembly]::LoadWithPartialName("System.Security") | Out-Null
function MD5HashFile([string] $filePath)
{
if ([string]::IsNullOrEmpty($filePath) -or !(Test-Path $filePath -PathType Leaf))
{
return $null
}
[System.IO.Stream] $file = $null;
[System.Security.Cryptography.MD5] $md5 = $null;
try
{
$md5 = [System.Security.Cryptography.MD5]::Create()
$file = [System.IO.File]::OpenRead($filePath)
return [System.BitConverter]::ToString($md5.ComputeHash($file))
}
finally
{
if ($file -ne $null)
{
$file.Dispose()
}
}
}
Write-Host "Preparing to run build script..."
if(!$PSScriptRoot){
$PSScriptRoot = Split-Path $MyInvocation.MyCommand.Path -Parent
}
$TOOLS_DIR = Join-Path $PSScriptRoot "tools"
$ADDINS_DIR = Join-Path $TOOLS_DIR "addins"
$MODULES_DIR = Join-Path $TOOLS_DIR "modules"
$NUGET_EXE = Join-Path $TOOLS_DIR "nuget.exe"
$CAKE_EXE = Join-Path $TOOLS_DIR "Cake/Cake.exe"
$NUGET_URL = "https://dist.nuget.org/win-x86-commandline/latest/nuget.exe"
$PACKAGES_CONFIG = Join-Path $TOOLS_DIR "packages.config"
$PACKAGES_CONFIG_MD5 = Join-Path $TOOLS_DIR "packages.config.md5sum"
$ADDINS_PACKAGES_CONFIG = Join-Path $ADDINS_DIR "packages.config"
$MODULES_PACKAGES_CONFIG = Join-Path $MODULES_DIR "packages.config"
# Should we use mono?
$UseMono = "";
if($Mono.IsPresent) {
Write-Verbose -Message "Using the Mono based scripting engine."
$UseMono = "-mono"
}
# Should we use the new Roslyn?
$UseExperimental = "";
if($Experimental.IsPresent -and !($Mono.IsPresent)) {
Write-Verbose -Message "Using experimental version of Roslyn."
$UseExperimental = "-experimental"
}
# Is this a dry run?
$UseDryRun = "";
if($WhatIf.IsPresent) {
$UseDryRun = "-dryrun"
}
# Make sure tools folder exists
if ((Test-Path $PSScriptRoot) -and !(Test-Path $TOOLS_DIR)) {
Write-Verbose -Message "Creating tools directory..."
New-Item -Path $TOOLS_DIR -Type directory | out-null
}
# Make sure that packages.config exist.
if (!(Test-Path $PACKAGES_CONFIG)) {
Write-Verbose -Message "Downloading packages.config..."
try { (New-Object System.Net.WebClient).DownloadFile("http://cakebuild.net/download/bootstrapper/packages", $PACKAGES_CONFIG) } catch {
Throw "Could not download packages.config."
}
}
# Try find NuGet.exe in path if not exists
if (!(Test-Path $NUGET_EXE)) {
Write-Verbose -Message "Trying to find nuget.exe in PATH..."
$existingPaths = $Env:Path -Split ';' | Where-Object { (![string]::IsNullOrEmpty($_)) -and (Test-Path $_ -PathType Container) }
$NUGET_EXE_IN_PATH = Get-ChildItem -Path $existingPaths -Filter "nuget.exe" | Select -First 1
if ($NUGET_EXE_IN_PATH -ne $null -and (Test-Path $NUGET_EXE_IN_PATH.FullName)) {
Write-Verbose -Message "Found in PATH at $($NUGET_EXE_IN_PATH.FullName)."
$NUGET_EXE = $NUGET_EXE_IN_PATH.FullName
}
}
# Try download NuGet.exe if not exists
if (!(Test-Path $NUGET_EXE)) {
Write-Verbose -Message "Downloading NuGet.exe..."
try {
(New-Object System.Net.WebClient).DownloadFile($NUGET_URL, $NUGET_EXE)
} catch {
Throw "Could not download NuGet.exe."
}
}
# Save nuget.exe path to environment to be available to child processed
$ENV:NUGET_EXE = $NUGET_EXE
# Restore tools from NuGet?
if(-Not $SkipToolPackageRestore.IsPresent) {
Push-Location
Set-Location $TOOLS_DIR
# Check for changes in packages.config and remove installed tools if true.
[string] $md5Hash = MD5HashFile($PACKAGES_CONFIG)
if((!(Test-Path $PACKAGES_CONFIG_MD5)) -Or
($md5Hash -ne (Get-Content $PACKAGES_CONFIG_MD5 ))) {
Write-Verbose -Message "Missing or changed package.config hash..."
Remove-Item * -Recurse -Exclude packages.config,nuget.exe
}
Write-Verbose -Message "Restoring tools from NuGet..."
$NuGetOutput = Invoke-Expression "&`"$NUGET_EXE`" install -ExcludeVersion -OutputDirectory `"$TOOLS_DIR`""
if ($LASTEXITCODE -ne 0) {
Throw "An error occured while restoring NuGet tools."
}
else
{
$md5Hash | Out-File $PACKAGES_CONFIG_MD5 -Encoding "ASCII"
}
Write-Verbose -Message ($NuGetOutput | out-string)
Pop-Location
}
# Restore addins from NuGet
if (Test-Path $ADDINS_PACKAGES_CONFIG) {
Push-Location
Set-Location $ADDINS_DIR
Write-Verbose -Message "Restoring addins from NuGet..."
$NuGetOutput = Invoke-Expression "&`"$NUGET_EXE`" install -ExcludeVersion -OutputDirectory `"$ADDINS_DIR`""
if ($LASTEXITCODE -ne 0) {
Throw "An error occured while restoring NuGet addins."
}
Write-Verbose -Message ($NuGetOutput | out-string)
Pop-Location
}
# Restore modules from NuGet
if (Test-Path $MODULES_PACKAGES_CONFIG) {
Push-Location
Set-Location $MODULES_DIR
Write-Verbose -Message "Restoring modules from NuGet..."
$NuGetOutput = Invoke-Expression "&`"$NUGET_EXE`" install -ExcludeVersion -OutputDirectory `"$MODULES_DIR`""
if ($LASTEXITCODE -ne 0) {
Throw "An error occured while restoring NuGet modules."
}
Write-Verbose -Message ($NuGetOutput | out-string)
Pop-Location
}
# Make sure that Cake has been installed.
if (!(Test-Path $CAKE_EXE)) {
Throw "Could not find Cake.exe at $CAKE_EXE"
}
# Start Cake
Write-Host "Running build script..."
Invoke-Expression "& `"$CAKE_EXE`" `"$Script`" -target=`"$Target`" -configuration=`"$Configuration`" -verbosity=`"$Verbosity`" $UseMono $UseDryRun $UseExperimental $ScriptArgs"
exit $LASTEXITCODE

42
build.sh Executable file
View File

@@ -0,0 +1,42 @@
#!/usr/bin/env bash
##########################################################################
# This is the Cake bootstrapper script for Linux and OS X.
# This file was downloaded from https://github.com/cake-build/resources
# Feel free to change this file to fit your needs.
##########################################################################
# Define directories.
SCRIPT_DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
TOOLS_DIR=$SCRIPT_DIR/tools
CAKE_VERSION=0.27.1
CAKE_DLL=$TOOLS_DIR/Cake.CoreCLR.$CAKE_VERSION/Cake.dll
# Make sure the tools folder exist.
if [ ! -d "$TOOLS_DIR" ]; then
mkdir "$TOOLS_DIR"
fi
###########################################################################
# INSTALL CAKE
###########################################################################
if [ ! -f "$CAKE_DLL" ]; then
curl -Lsfo Cake.CoreCLR.zip "https://www.nuget.org/api/v2/package/Cake.CoreCLR/$CAKE_VERSION" && unzip -q Cake.CoreCLR.zip -d "$TOOLS_DIR/Cake.CoreCLR.$CAKE_VERSION" && rm -f Cake.CoreCLR.zip
if [ $? -ne 0 ]; then
echo "An error occured while installing Cake."
exit 1
fi
fi
# Make sure that Cake has been installed.
if [ ! -f "$CAKE_DLL" ]; then
echo "Could not find Cake.exe at '$CAKE_DLL'."
exit 1
fi
###########################################################################
# RUN BUILD SCRIPT
###########################################################################
# Start Cake
exec dotnet "$CAKE_DLL" "$@"

View File

@@ -1,83 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Runtime.InteropServices;
using GlobExpressions;
using static Bullseye.Targets;
using static SimpleExec.Command;
class Program
{
private const string Clean = "clean";
private const string Format = "format";
private const string Build = "build";
private const string Test = "test";
private const string Publish = "publish";
static void Main(string[] args)
{
Target(Clean,
ForEach("**/bin", "**/obj"),
dir =>
{
IEnumerable<string> GetDirectories(string d)
{
return Glob.Directories(".", d);
}
void RemoveDirectory(string d)
{
if (Directory.Exists(d))
{
Console.WriteLine(d);
Directory.Delete(d, true);
}
}
foreach (var d in GetDirectories(dir))
{
RemoveDirectory(d);
}
});
Target(Format, () =>
{
Run("dotnet", "tool restore");
Run("dotnet", "format --check");
});
Target(Build, DependsOn(Format),
framework =>
{
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows) && framework == "net46")
{
return;
}
Run("dotnet", "build src/SharpCompress/SharpCompress.csproj -c Release");
});
Target(Test, DependsOn(Build), ForEach("net5.0"),
framework =>
{
IEnumerable<string> GetFiles(string d)
{
return Glob.Files(".", d);
}
foreach (var file in GetFiles("**/*.Test.csproj"))
{
Run("dotnet", $"test {file} -c Release -f {framework}");
}
});
Target(Publish, DependsOn(Test),
() =>
{
Run("dotnet", "pack src/SharpCompress/SharpCompress.csproj -c Release -o artifacts/");
});
Target("default", DependsOn(Publish), () => Console.WriteLine("Done!"));
RunTargetsAndExit(args);
}
}

View File

@@ -1,14 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net5.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Bullseye" Version="3.6.0" />
<PackageReference Include="Glob" Version="1.1.8" />
<PackageReference Include="SimpleExec" Version="6.4.0" />
</ItemGroup>
</Project>

View File

@@ -1,5 +0,0 @@
{
"sdk": {
"version": "5.0.101"
}
}

View File

@@ -1,285 +0,0 @@
// Copyright (c) Six Labors and contributors.
// Licensed under the GNU Affero General Public License, Version 3.
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
#if !NETSTANDARD2_0 && !NETSTANDARD2_1
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
#endif
namespace SharpCompress.Algorithms
{
/// <summary>
/// Calculates the 32 bit Adler checksum of a given buffer according to
/// RFC 1950. ZLIB Compressed Data Format Specification version 3.3)
/// </summary>
internal static class Adler32
{
/// <summary>
/// The default initial seed value of a Adler32 checksum calculation.
/// </summary>
public const uint SeedValue = 1U;
#if !NETSTANDARD2_0 && !NETSTANDARD2_1
private const int MinBufferSize = 64;
#endif
// Largest prime smaller than 65536
private const uint BASE = 65521;
// NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1
private const uint NMAX = 5552;
/// <summary>
/// Calculates the Adler32 checksum with the bytes taken from the span.
/// </summary>
/// <param name="buffer">The readonly span of bytes.</param>
/// <returns>The <see cref="uint"/>.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static uint Calculate(ReadOnlySpan<byte> buffer)
{
return Calculate(SeedValue, buffer);
}
/// <summary>
/// Calculates the Adler32 checksum with the bytes taken from the span and seed.
/// </summary>
/// <param name="adler">The input Adler32 value.</param>
/// <param name="buffer">The readonly span of bytes.</param>
/// <returns>The <see cref="uint"/>.</returns>
public static uint Calculate(uint adler, ReadOnlySpan<byte> buffer)
{
if (buffer.IsEmpty)
{
return SeedValue;
}
#if !NETSTANDARD2_0 && !NETSTANDARD2_1
if (Sse3.IsSupported && buffer.Length >= MinBufferSize)
{
return CalculateSse(adler, buffer);
}
return CalculateScalar(adler, buffer);
#else
return CalculateScalar(adler, buffer);
#endif
}
// Based on https://github.com/chromium/chromium/blob/master/third_party/zlib/adler32_simd.c
#if !NETSTANDARD2_0 && !NETSTANDARD2_1
private static unsafe uint CalculateSse(uint adler, ReadOnlySpan<byte> buffer)
{
uint s1 = adler & 0xFFFF;
uint s2 = (adler >> 16) & 0xFFFF;
// Process the data in blocks.
const int BLOCK_SIZE = 1 << 5;
uint length = (uint)buffer.Length;
uint blocks = length / BLOCK_SIZE;
length -= blocks * BLOCK_SIZE;
int index = 0;
fixed (byte* bufferPtr = &buffer[0])
{
index += (int)blocks * BLOCK_SIZE;
var localBufferPtr = bufferPtr;
// _mm_setr_epi8 on x86
var tap1 = Vector128.Create(32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17);
var tap2 = Vector128.Create(16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1);
Vector128<byte> zero = Vector128<byte>.Zero;
var ones = Vector128.Create((short)1);
while (blocks > 0)
{
uint n = NMAX / BLOCK_SIZE; /* The NMAX constraint. */
if (n > blocks)
{
n = blocks;
}
blocks -= n;
// Process n blocks of data. At most NMAX data bytes can be
// processed before s2 must be reduced modulo BASE.
Vector128<int> v_ps = Vector128.CreateScalar(s1 * n).AsInt32();
Vector128<int> v_s2 = Vector128.CreateScalar(s2).AsInt32();
Vector128<int> v_s1 = Vector128<int>.Zero;
do
{
// Load 32 input bytes.
Vector128<byte> bytes1 = Sse3.LoadDquVector128(localBufferPtr);
Vector128<byte> bytes2 = Sse3.LoadDquVector128(localBufferPtr + 16);
// Add previous block byte sum to v_ps.
v_ps = Sse2.Add(v_ps, v_s1);
// Horizontally add the bytes for s1, multiply-adds the
// bytes by [ 32, 31, 30, ... ] for s2.
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes1, zero).AsInt32());
Vector128<short> mad1 = Ssse3.MultiplyAddAdjacent(bytes1, tap1);
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad1, ones));
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes2, zero).AsInt32());
Vector128<short> mad2 = Ssse3.MultiplyAddAdjacent(bytes2, tap2);
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad2, ones));
localBufferPtr += BLOCK_SIZE;
}
while (--n > 0);
v_s2 = Sse2.Add(v_s2, Sse2.ShiftLeftLogical(v_ps, 5));
// Sum epi32 ints v_s1(s2) and accumulate in s1(s2).
const byte S2301 = 0b1011_0001; // A B C D -> B A D C
const byte S1032 = 0b0100_1110; // A B C D -> C D A B
v_s1 = Sse2.Add(v_s1, Sse2.Shuffle(v_s1, S2301));
v_s1 = Sse2.Add(v_s1, Sse2.Shuffle(v_s1, S1032));
s1 += (uint)v_s1.ToScalar();
v_s2 = Sse2.Add(v_s2, Sse2.Shuffle(v_s2, S2301));
v_s2 = Sse2.Add(v_s2, Sse2.Shuffle(v_s2, S1032));
s2 = (uint)v_s2.ToScalar();
// Reduce.
s1 %= BASE;
s2 %= BASE;
}
}
ref byte bufferRef = ref MemoryMarshal.GetReference(buffer);
if (length > 0)
{
if (length >= 16)
{
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
length -= 16;
}
while (length-- > 0)
{
s2 += s1 += Unsafe.Add(ref bufferRef, index++);
}
if (s1 >= BASE)
{
s1 -= BASE;
}
s2 %= BASE;
}
return s1 | (s2 << 16);
}
#endif
private static uint CalculateScalar(uint adler, ReadOnlySpan<byte> buffer)
{
uint s1 = adler & 0xFFFF;
uint s2 = (adler >> 16) & 0xFFFF;
uint k;
ref byte bufferRef = ref MemoryMarshal.GetReference<byte>(buffer);
uint length = (uint)buffer.Length;
int index = 0;
while (length > 0)
{
k = length < NMAX ? length : NMAX;
length -= k;
while (k >= 16)
{
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
k -= 16;
}
if (k != 0)
{
do
{
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
}
while (--k != 0);
}
s1 %= BASE;
s2 %= BASE;
}
return (s2 << 16) | s1;
}
}
}

View File

@@ -14,16 +14,16 @@ namespace SharpCompress.Archives
private readonly LazyReadOnlyCollection<TVolume> lazyVolumes;
private readonly LazyReadOnlyCollection<TEntry> lazyEntries;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionBegin;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionEnd;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionBegin;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionEnd;
public event EventHandler<CompressedBytesReadEventArgs>? CompressedBytesRead;
public event EventHandler<FilePartExtractionBeginEventArgs>? FilePartExtractionBegin;
public event EventHandler<CompressedBytesReadEventArgs> CompressedBytesRead;
public event EventHandler<FilePartExtractionBeginEventArgs> FilePartExtractionBegin;
protected ReaderOptions ReaderOptions { get; }
private bool disposed;
internal AbstractArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerOptions)
{
Type = type;
@@ -48,14 +48,12 @@ namespace SharpCompress.Archives
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
}
#nullable disable
internal AbstractArchive(ArchiveType type)
{
Type = type;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
}
#nullable enable
public ArchiveType Type { get; }
@@ -81,29 +79,29 @@ namespace SharpCompress.Archives
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveEntries across the one or many parts of the RarArchive.
/// </summary>
public virtual ICollection<TEntry> Entries => lazyEntries;
public virtual ICollection<TEntry> Entries { get { return lazyEntries; } }
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveVolumes across the one or many parts of the RarArchive.
/// </summary>
public ICollection<TVolume> Volumes => lazyVolumes;
public ICollection<TVolume> Volumes { get { return lazyVolumes; } }
/// <summary>
/// The total size of the files compressed in the archive.
/// </summary>
public virtual long TotalSize => Entries.Aggregate(0L, (total, cf) => total + cf.CompressedSize);
public virtual long TotalSize { get { return Entries.Aggregate(0L, (total, cf) => total + cf.CompressedSize); } }
/// <summary>
/// The total size of the files as uncompressed in the archive.
/// </summary>
public virtual long TotalUncompressSize => Entries.Aggregate(0L, (total, cf) => total + cf.Size);
public virtual long TotalUncompressSize { get { return Entries.Aggregate(0L, (total, cf) => total + cf.Size); } }
protected abstract IEnumerable<TVolume> LoadVolumes(IEnumerable<Stream> streams);
protected abstract IEnumerable<TEntry> LoadEntries(IEnumerable<TVolume> volumes);
IEnumerable<IArchiveEntry> IArchive.Entries => Entries.Cast<IArchiveEntry>();
IEnumerable<IArchiveEntry> IArchive.Entries { get { return Entries.Cast<IArchiveEntry>(); } }
IEnumerable<IVolume> IArchive.Volumes => lazyVolumes.Cast<IVolume>();
IEnumerable<IVolume> IArchive.Volumes { get { return lazyVolumes.Cast<IVolume>(); } }
public virtual void Dispose()
{
@@ -123,29 +121,31 @@ namespace SharpCompress.Archives
void IExtractionListener.FireCompressedBytesRead(long currentPartCompressedBytes, long compressedReadBytes)
{
CompressedBytesRead?.Invoke(this, new CompressedBytesReadEventArgs(
currentFilePartCompressedBytesRead: currentPartCompressedBytes,
compressedBytesRead: compressedReadBytes
));
CompressedBytesRead?.Invoke(this, new CompressedBytesReadEventArgs
{
CurrentFilePartCompressedBytesRead = currentPartCompressedBytes,
CompressedBytesRead = compressedReadBytes
});
}
void IExtractionListener.FireFilePartExtractionBegin(string name, long size, long compressedSize)
{
FilePartExtractionBegin?.Invoke(this, new FilePartExtractionBeginEventArgs(
compressedSize: compressedSize,
size: size,
name: name
));
FilePartExtractionBegin?.Invoke(this, new FilePartExtractionBeginEventArgs
{
CompressedSize = compressedSize,
Size = size,
Name = name
});
}
/// <summary>
/// Use this method to extract all entries in an archive in order.
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
/// extracted sequentially for the best performance.
///
///
/// This method will load all entry information from the archive.
///
/// WARNING: this will reuse the underlying stream for the archive. Errors may
///
/// WARNING: this will reuse the underlying stream for the archive. Errors may
/// occur if this is used at the same time as other extraction methods on this instance.
/// </summary>
/// <returns></returns>
@@ -160,7 +160,7 @@ namespace SharpCompress.Archives
/// <summary>
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
/// </summary>
public virtual bool IsSolid => false;
public virtual bool IsSolid { get { return false; } }
/// <summary>
/// The archive can find all the parts of the archive needed to fully extract the archive. This forces the parsing of the entire archive.
@@ -174,4 +174,4 @@ namespace SharpCompress.Archives
}
}
}
}
}

View File

@@ -12,28 +12,11 @@ namespace SharpCompress.Archives
where TEntry : IArchiveEntry
where TVolume : IVolume
{
private class RebuildPauseDisposable : IDisposable
{
private readonly AbstractWritableArchive<TEntry, TVolume> archive;
public RebuildPauseDisposable(AbstractWritableArchive<TEntry, TVolume> archive)
{
this.archive = archive;
archive.pauseRebuilding = true;
}
public void Dispose()
{
archive.pauseRebuilding = false;
archive.RebuildModifiedCollection();
}
}
private readonly List<TEntry> newEntries = new List<TEntry>();
private readonly List<TEntry> removedEntries = new List<TEntry>();
private readonly List<TEntry> modifiedEntries = new List<TEntry>();
private bool hasModifications;
private bool pauseRebuilding;
internal AbstractWritableArchive(ArchiveType type)
: base(type)
@@ -62,17 +45,8 @@ namespace SharpCompress.Archives
}
}
public IDisposable PauseEntryRebuilding()
{
return new RebuildPauseDisposable(this);
}
private void RebuildModifiedCollection()
{
if (pauseRebuilding)
{
return;
}
hasModifications = true;
newEntries.RemoveAll(v => removedEntries.Contains(v));
modifiedEntries.Clear();
@@ -109,7 +83,8 @@ namespace SharpCompress.Archives
public TEntry AddEntry(string key, Stream source, bool closeStream,
long size = 0, DateTime? modified = null)
{
if (key.Length > 0 && key[0] is '/' or '\\')
if (key.StartsWith("/")
|| key.StartsWith("\\"))
{
key = key.Substring(1);
}
@@ -128,7 +103,7 @@ namespace SharpCompress.Archives
foreach (var path in Entries.Select(x => x.Key))
{
var p = path.Replace('/', '\\');
if (p.Length > 0 && p[0] == '\\')
if (p.StartsWith("\\"))
{
p = p.Substring(1);
}
@@ -167,4 +142,4 @@ namespace SharpCompress.Archives
modifiedEntries.Cast<Entry>().ForEach(x => x.Close());
}
}
}
}

View File

@@ -1,6 +1,5 @@
using System;
using System.IO;
using SharpCompress.Archives.Dmg;
using SharpCompress.Archives.GZip;
using SharpCompress.Archives.Rar;
using SharpCompress.Archives.SevenZip;
@@ -11,7 +10,7 @@ using SharpCompress.Readers;
namespace SharpCompress.Archives
{
public static class ArchiveFactory
public class ArchiveFactory
{
/// <summary>
/// Opens an Archive for random access
@@ -19,14 +18,14 @@ namespace SharpCompress.Archives
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <returns></returns>
public static IArchive Open(Stream stream, ReaderOptions? readerOptions = null)
public static IArchive Open(Stream stream, ReaderOptions readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
{
throw new ArgumentException("Stream should be readable and seekable");
}
readerOptions ??= new ReaderOptions();
readerOptions = readerOptions ?? new ReaderOptions();
if (ZipArchive.IsZipFile(stream, null))
{
stream.Seek(0, SeekOrigin.Begin);
@@ -45,12 +44,6 @@ namespace SharpCompress.Archives
return GZipArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
if (DmgArchive.IsDmgFile(stream))
{
stream.Seek(0, SeekOrigin.Begin);
return DmgArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
if (RarArchive.IsRarFile(stream, readerOptions))
{
stream.Seek(0, SeekOrigin.Begin);
@@ -62,18 +55,30 @@ namespace SharpCompress.Archives
stream.Seek(0, SeekOrigin.Begin);
return TarArchive.Open(stream, readerOptions);
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip, LZip, Dmg");
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip, LZip");
}
public static IWritableArchive Create(ArchiveType type)
{
return type switch
switch (type)
{
ArchiveType.Zip => ZipArchive.Create(),
ArchiveType.Tar => TarArchive.Create(),
ArchiveType.GZip => GZipArchive.Create(),
_ => throw new NotSupportedException("Cannot create Archives of type: " + type)
};
case ArchiveType.Zip:
{
return ZipArchive.Create();
}
case ArchiveType.Tar:
{
return TarArchive.Create();
}
case ArchiveType.GZip:
{
return GZipArchive.Create();
}
default:
{
throw new NotSupportedException("Cannot create Archives of type: " + type);
}
}
}
/// <summary>
@@ -81,7 +86,7 @@ namespace SharpCompress.Archives
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
public static IArchive Open(string filePath, ReaderOptions? options = null)
public static IArchive Open(string filePath, ReaderOptions options = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), options);
@@ -92,55 +97,52 @@ namespace SharpCompress.Archives
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
public static IArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
public static IArchive Open(FileInfo fileInfo, ReaderOptions options = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
options ??= new ReaderOptions { LeaveStreamOpen = false };
using var stream = fileInfo.OpenRead();
if (ZipArchive.IsZipFile(stream, null))
{
return ZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (SevenZipArchive.IsSevenZipFile(stream))
{
return SevenZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (GZipArchive.IsGZipFile(stream))
{
return GZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (DmgArchive.IsDmgFile(stream))
options = options ?? new ReaderOptions { LeaveStreamOpen = false };
using (var stream = fileInfo.OpenRead())
{
if (ZipArchive.IsZipFile(stream, null))
{
return ZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
return DmgArchive.Open(fileInfo, options);
if (SevenZipArchive.IsSevenZipFile(stream))
{
return SevenZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (GZipArchive.IsGZipFile(stream))
{
return GZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (RarArchive.IsRarFile(stream, options))
{
return RarArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (TarArchive.IsTarFile(stream))
{
return TarArchive.Open(fileInfo, options);
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
}
stream.Seek(0, SeekOrigin.Begin);
if (RarArchive.IsRarFile(stream, options))
{
return RarArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (TarArchive.IsTarFile(stream))
{
return TarArchive.Open(fileInfo, options);
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip, Dmg");
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(string sourceArchive, string destinationDirectory,
ExtractionOptions? options = null)
ExtractionOptions options = null)
{
using IArchive archive = Open(sourceArchive);
foreach (IArchiveEntry entry in archive.Entries)
using (IArchive archive = Open(sourceArchive))
{
entry.WriteToDirectory(destinationDirectory, options);
foreach (IArchiveEntry entry in archive.Entries)
{
entry.WriteToDirectory(destinationDirectory, options);
}
}
}
}

View File

@@ -1,117 +0,0 @@
using SharpCompress.Common;
using SharpCompress.Common.Dmg;
using SharpCompress.Common.Dmg.Headers;
using SharpCompress.Common.Dmg.HFS;
using SharpCompress.Readers;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
namespace SharpCompress.Archives.Dmg
{
public class DmgArchive : AbstractArchive<DmgArchiveEntry, DmgVolume>
{
private readonly string _fileName;
internal DmgArchive(FileInfo fileInfo, ReaderOptions readerOptions)
: base(ArchiveType.Dmg, fileInfo, readerOptions)
{
_fileName = fileInfo.FullName;
}
internal DmgArchive(Stream stream, ReaderOptions readerOptions)
: base(ArchiveType.Dmg, stream.AsEnumerable(), readerOptions)
{
_fileName = string.Empty;
}
protected override IReader CreateReaderForSolidExtraction()
=> new DmgReader(ReaderOptions, this, _fileName);
protected override IEnumerable<DmgArchiveEntry> LoadEntries(IEnumerable<DmgVolume> volumes)
=> volumes.Single().LoadEntries();
protected override IEnumerable<DmgVolume> LoadVolumes(FileInfo file)
=> new DmgVolume(this, file.OpenRead(), file.FullName, ReaderOptions).AsEnumerable();
protected override IEnumerable<DmgVolume> LoadVolumes(IEnumerable<Stream> streams)
=> new DmgVolume(this, streams.Single(), string.Empty, ReaderOptions).AsEnumerable();
public static bool IsDmgFile(FileInfo fileInfo)
{
if (!fileInfo.Exists) return false;
using var stream = fileInfo.OpenRead();
return IsDmgFile(stream);
}
public static bool IsDmgFile(Stream stream)
{
long headerPos = stream.Length - DmgHeader.HeaderSize;
if (headerPos < 0) return false;
stream.Position = headerPos;
return DmgHeader.TryRead(stream, out _);
}
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static DmgArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static DmgArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new DmgArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static DmgArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
return new DmgArchive(stream, readerOptions ?? new ReaderOptions());
}
private sealed class DmgReader : AbstractReader<DmgEntry, DmgVolume>
{
private readonly DmgArchive _archive;
private readonly string _fileName;
private readonly Stream? _partitionStream;
public override DmgVolume Volume { get; }
internal DmgReader(ReaderOptions readerOptions, DmgArchive archive, string fileName)
: base(readerOptions, ArchiveType.Dmg)
{
_archive = archive;
_fileName = fileName;
Volume = archive.Volumes.Single();
using var compressedStream = DmgUtil.LoadHFSPartitionStream(Volume.Stream, Volume.Header);
_partitionStream = compressedStream?.Decompress();
}
protected override IEnumerable<DmgEntry> GetEntries(Stream stream)
{
if (_partitionStream is null) return Array.Empty<DmgArchiveEntry>();
else return HFSUtil.LoadEntriesFromPartition(_partitionStream, _fileName, _archive);
}
}
}
}

View File

@@ -1,32 +0,0 @@
using SharpCompress.Common.Dmg;
using SharpCompress.Common.Dmg.HFS;
using System;
using System.IO;
namespace SharpCompress.Archives.Dmg
{
public sealed class DmgArchiveEntry : DmgEntry, IArchiveEntry
{
private readonly Stream? _stream;
public bool IsComplete { get; } = true;
public IArchive Archive { get; }
internal DmgArchiveEntry(Stream? stream, DmgArchive archive, HFSCatalogRecord record, string path, DmgFilePart part)
: base(record, path, stream?.Length ?? 0, part)
{
_stream = stream;
Archive = archive;
}
public Stream OpenEntryStream()
{
if (IsDirectory)
throw new NotSupportedException("Directories cannot be opened as stream");
_stream!.Position = 0;
return _stream;
}
}
}

View File

@@ -18,7 +18,7 @@ namespace SharpCompress.Archives.GZip
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
public static GZipArchive Open(string filePath, ReaderOptions readerOptions = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
@@ -29,7 +29,7 @@ namespace SharpCompress.Archives.GZip
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new GZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
@@ -40,7 +40,7 @@ namespace SharpCompress.Archives.GZip
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
public static GZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
return new GZipArchive(stream, readerOptions ?? new ReaderOptions());
@@ -77,9 +77,10 @@ namespace SharpCompress.Archives.GZip
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsGZipFile(stream);
using (Stream stream = fileInfo.OpenRead())
{
return IsGZipFile(stream);
}
}
public void SaveTo(string filePath)
@@ -98,7 +99,7 @@ namespace SharpCompress.Archives.GZip
public static bool IsGZipFile(Stream stream)
{
// read the header on the first read
Span<byte> header = stackalloc byte[10];
byte[] header = new byte[10];
// workitem 8501: handle edge case (decompress empty stream)
if (!stream.ReadFully(header))

View File

@@ -15,7 +15,7 @@ namespace SharpCompress.Archives.GZip
public virtual Stream OpenEntryStream()
{
//this is to reset the stream to be read multiple times
var part = (GZipFilePart)Parts.Single();
var part = Parts.Single() as GZipFilePart;
if (part.GetRawStream().Position != part.EntryStartPosition)
{
part.GetRawStream().Position = part.EntryStartPosition;

View File

@@ -1,6 +1,4 @@
#nullable disable
using System;
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
@@ -8,7 +6,7 @@ using SharpCompress.IO;
namespace SharpCompress.Archives.GZip
{
internal sealed class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArchiveEntry
internal class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArchiveEntry
{
private readonly bool closeStream;
private readonly Stream stream;

View File

@@ -8,17 +8,22 @@ namespace SharpCompress.Archives
{
public static void WriteTo(this IArchiveEntry archiveEntry, Stream streamToWriteTo)
{
if (archiveEntry.Archive.Type == ArchiveType.Rar && archiveEntry.Archive.IsSolid)
{
throw new InvalidFormatException("Cannot use Archive random access on SOLID Rar files.");
}
if (archiveEntry.IsDirectory)
{
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
}
var streamListener = (IArchiveExtractionListener)archiveEntry.Archive;
var streamListener = archiveEntry.Archive as IArchiveExtractionListener;
streamListener.EnsureEntriesLoaded();
streamListener.FireEntryExtractionBegin(archiveEntry);
streamListener.FireFilePartExtractionBegin(archiveEntry.Key, archiveEntry.Size, archiveEntry.CompressedSize);
var entryStream = archiveEntry.OpenEntryStream();
if (entryStream is null)
if (entryStream == null)
{
return;
}
@@ -31,12 +36,12 @@ namespace SharpCompress.Archives
}
streamListener.FireEntryExtractionEnd(archiveEntry);
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(this IArchiveEntry entry, string destinationDirectory,
ExtractionOptions? options = null)
ExtractionOptions options = null)
{
ExtractionMethods.WriteEntryToDirectory(entry, destinationDirectory, options,
entry.WriteToFile);
@@ -45,11 +50,10 @@ namespace SharpCompress.Archives
/// <summary>
/// Extract to specific file
/// </summary>
public static void WriteToFile(this IArchiveEntry entry,
string destinationFileName,
ExtractionOptions? options = null)
public static void WriteToFile(this IArchiveEntry entry, string destinationFileName,
ExtractionOptions options = null)
{
ExtractionMethods.WriteEntryToFile(entry, destinationFileName, options,
(x, fm) =>
{

View File

@@ -9,7 +9,7 @@ namespace SharpCompress.Archives
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(this IArchive archive, string destinationDirectory,
ExtractionOptions? options = null)
ExtractionOptions options = null)
{
foreach (IArchiveEntry entry in archive.Entries.Where(x => !x.IsDirectory))
{

View File

@@ -11,11 +11,5 @@ namespace SharpCompress.Archives
IArchiveEntry AddEntry(string key, Stream source, bool closeStream, long size = 0, DateTime? modified = null);
void SaveTo(Stream stream, WriterOptions options);
/// <summary>
/// Use this to pause entry rebuilding when adding large collections of entries. Dispose when complete. A using statement is recommended.
/// </summary>
/// <returns>IDisposeable to resume entry rebuilding</returns>
IDisposable PauseEntryRebuilding();
}
}

View File

@@ -35,14 +35,11 @@ namespace SharpCompress.Archives
this IWritableArchive writableArchive,
string filePath, string searchPattern = "*.*", SearchOption searchOption = SearchOption.AllDirectories)
{
using (writableArchive.PauseEntryRebuilding())
foreach (var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption))
{
foreach (var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption))
{
var fileInfo = new FileInfo(path);
writableArchive.AddEntry(path.Substring(filePath.Length), fileInfo.OpenRead(), true, fileInfo.Length,
fileInfo.LastWriteTime);
}
var fileInfo = new FileInfo(path);
writableArchive.AddEntry(path.Substring(filePath.Length), fileInfo.OpenRead(), true, fileInfo.Length,
fileInfo.LastWriteTime);
}
}
public static IArchiveEntry AddEntry(this IWritableArchive writableArchive, string key, FileInfo fileInfo)

View File

@@ -3,9 +3,9 @@ using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Archives.Rar
{
internal sealed class FileInfoRarFilePart : SeekableFilePart
internal class FileInfoRarFilePart : SeekableFilePart
{
internal FileInfoRarFilePart(FileInfoRarArchiveVolume volume, string? password, MarkHeader mh, FileHeader fh, FileInfo fi)
internal FileInfoRarFilePart(FileInfoRarArchiveVolume volume, string password, MarkHeader mh, FileHeader fh, FileInfo fi)
: base(mh, fh, volume.Stream, password)
{
FileInfo = fi;

View File

@@ -10,8 +10,7 @@ using SharpCompress.Readers.Rar;
namespace SharpCompress.Archives.Rar
{
public class
RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
{
internal Lazy<IRarUnpack> UnpackV2017 { get; } = new Lazy<IRarUnpack>(() => new SharpCompress.Compressors.Rar.UnpackV2017.Unpack());
internal Lazy<IRarUnpack> UnpackV1 { get; } = new Lazy<IRarUnpack>(() => new SharpCompress.Compressors.Rar.UnpackV1.Unpack());
@@ -43,7 +42,7 @@ namespace SharpCompress.Archives.Rar
protected override IEnumerable<RarArchiveEntry> LoadEntries(IEnumerable<RarVolume> volumes)
{
return RarArchiveEntryFactory.GetEntries(this, volumes, ReaderOptions);
return RarArchiveEntryFactory.GetEntries(this, volumes);
}
protected override IEnumerable<RarVolume> LoadVolumes(IEnumerable<Stream> streams)
@@ -66,7 +65,7 @@ namespace SharpCompress.Archives.Rar
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
public static RarArchive Open(string filePath, ReaderOptions? options = null)
public static RarArchive Open(string filePath, ReaderOptions options = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
return new RarArchive(new FileInfo(filePath), options ?? new ReaderOptions());
@@ -77,7 +76,7 @@ namespace SharpCompress.Archives.Rar
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
public static RarArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
public static RarArchive Open(FileInfo fileInfo, ReaderOptions options = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new RarArchive(fileInfo, options ?? new ReaderOptions());
@@ -88,7 +87,7 @@ namespace SharpCompress.Archives.Rar
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
public static RarArchive Open(Stream stream, ReaderOptions? options = null)
public static RarArchive Open(Stream stream, ReaderOptions options = null)
{
stream.CheckNotNull(nameof(stream));
return Open(stream.AsEnumerable(), options ?? new ReaderOptions());
@@ -99,7 +98,7 @@ namespace SharpCompress.Archives.Rar
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
public static RarArchive Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
public static RarArchive Open(IEnumerable<Stream> streams, ReaderOptions options = null)
{
streams.CheckNotNull(nameof(streams));
return new RarArchive(streams, options ?? new ReaderOptions());
@@ -121,8 +120,8 @@ namespace SharpCompress.Archives.Rar
return IsRarFile(stream);
}
}
public static bool IsRarFile(Stream stream, ReaderOptions? options = null)
public static bool IsRarFile(Stream stream, ReaderOptions options = null)
{
try
{

View File

@@ -6,7 +6,6 @@ using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Compressors.Rar;
using SharpCompress.Readers;
namespace SharpCompress.Archives.Rar
{
@@ -14,13 +13,11 @@ namespace SharpCompress.Archives.Rar
{
private readonly ICollection<RarFilePart> parts;
private readonly RarArchive archive;
private readonly ReaderOptions readerOptions;
internal RarArchiveEntry(RarArchive archive, IEnumerable<RarFilePart> parts, ReaderOptions readerOptions)
internal RarArchiveEntry(RarArchive archive, IEnumerable<RarFilePart> parts)
{
this.parts = parts.ToList();
this.archive = archive;
this.readerOptions = readerOptions;
}
public override CompressionType CompressionType => CompressionType.Rar;
@@ -60,26 +57,30 @@ namespace SharpCompress.Archives.Rar
public Stream OpenEntryStream()
{
if (archive.IsSolid)
{
throw new InvalidOperationException("Use ExtractAllEntries to extract SOLID archives.");
}
if (IsRarV3)
{
return new RarStream(archive.UnpackV1.Value, FileHeader, new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive));
}
return new RarStream(archive.UnpackV2017.Value, FileHeader, new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive));
}
public bool IsComplete
{
get
{
var headers = parts.Select(x => x.FileHeader);
return !headers.First().IsSplitBefore && !headers.Last().IsSplitAfter;
}
public bool IsComplete
{
get
{
return parts.Select(fp => fp.FileHeader).Any(fh => !fh.IsSplitAfter);
}
}
private void CheckIncomplete()
{
if (!readerOptions.DisableCheckIncomplete && !IsComplete)
if (!IsComplete)
{
throw new IncompleteArchiveException("ArchiveEntry is incomplete and cannot perform this operation.");
}

View File

@@ -1,6 +1,5 @@
using System.Collections.Generic;
using SharpCompress.Common.Rar;
using SharpCompress.Readers;
namespace SharpCompress.Archives.Rar
{
@@ -37,12 +36,11 @@ namespace SharpCompress.Archives.Rar
}
internal static IEnumerable<RarArchiveEntry> GetEntries(RarArchive archive,
IEnumerable<RarVolume> rarParts,
ReaderOptions readerOptions)
IEnumerable<RarVolume> rarParts)
{
foreach (var groupedParts in GetMatchedFileParts(rarParts))
{
yield return new RarArchiveEntry(archive, groupedParts, readerOptions);
yield return new RarArchiveEntry(archive, groupedParts);
}
}
}

View File

@@ -23,7 +23,7 @@ namespace SharpCompress.Archives.Rar
yield return part;
}
}
internal static IEnumerable<RarVolume> GetParts(FileInfo fileInfo, ReaderOptions options)
{
FileInfoRarArchiveVolume part = new FileInfoRarArchiveVolume(fileInfo, options);
@@ -34,20 +34,20 @@ namespace SharpCompress.Archives.Rar
{
yield break; //if file isn't volume then there is no reason to look
}
fileInfo = GetNextFileInfo(ah, part.FileParts.FirstOrDefault() as FileInfoRarFilePart)!;
fileInfo = GetNextFileInfo(ah, part.FileParts.FirstOrDefault() as FileInfoRarFilePart);
//we use fileinfo because rar is dumb and looks at file names rather than archive info for another volume
while (fileInfo != null && fileInfo.Exists)
{
part = new FileInfoRarArchiveVolume(fileInfo, options);
fileInfo = GetNextFileInfo(ah, part.FileParts.FirstOrDefault() as FileInfoRarFilePart)!;
fileInfo = GetNextFileInfo(ah, part.FileParts.FirstOrDefault() as FileInfoRarFilePart);
yield return part;
}
}
private static FileInfo? GetNextFileInfo(ArchiveHeader ah, FileInfoRarFilePart? currentFilePart)
private static FileInfo GetNextFileInfo(ArchiveHeader ah, FileInfoRarFilePart currentFilePart)
{
if (currentFilePart is null)
if (currentFilePart == null)
{
return null;
}
@@ -68,7 +68,7 @@ namespace SharpCompress.Archives.Rar
// .rar, .r00, .r01, ...
string extension = currentFileInfo.Extension;
var buffer = new StringBuilder(currentFileInfo.FullName.Length);
StringBuilder buffer = new StringBuilder(currentFileInfo.FullName.Length);
buffer.Append(currentFileInfo.FullName.Substring(0,
currentFileInfo.FullName.Length - extension.Length));
if (string.Compare(extension, ".rar", StringComparison.OrdinalIgnoreCase) == 0)
@@ -77,7 +77,8 @@ namespace SharpCompress.Archives.Rar
}
else
{
if (int.TryParse(extension.Substring(2, 2), out int num))
int num = 0;
if (int.TryParse(extension.Substring(2, 2), out num))
{
num++;
buffer.Append(".r");
@@ -110,11 +111,12 @@ namespace SharpCompress.Archives.Rar
}
StringBuilder buffer = new StringBuilder(currentFileInfo.FullName.Length);
buffer.Append(currentFileInfo.FullName, 0, startIndex);
int num = 0;
string numString = currentFileInfo.FullName.Substring(startIndex + 5,
currentFileInfo.FullName.IndexOf('.', startIndex + 5) -
startIndex - 5);
buffer.Append(".part");
if (int.TryParse(numString, out int num))
if (int.TryParse(numString, out num))
{
num++;
for (int i = 0; i < numString.Length - num.ToString().Length; i++)

View File

@@ -7,9 +7,9 @@ namespace SharpCompress.Archives.Rar
internal class SeekableFilePart : RarFilePart
{
private readonly Stream stream;
private readonly string? password;
private readonly string password;
internal SeekableFilePart(MarkHeader mh, FileHeader fh, Stream stream, string? password)
internal SeekableFilePart(MarkHeader mh, FileHeader fh, Stream stream, string password)
: base(mh, fh)
{
this.stream = stream;
@@ -21,7 +21,7 @@ namespace SharpCompress.Archives.Rar
stream.Position = FileHeader.DataStartPosition;
if (FileHeader.R4Salt != null)
{
return new RarCryptoWrapper(stream, password!, FileHeader.R4Salt);
return new RarCryptoWrapper(stream, password, FileHeader.R4Salt);
}
return stream;
}

View File

@@ -1,6 +1,4 @@
#nullable disable
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -110,7 +108,7 @@ namespace SharpCompress.Archives.SevenZip
private void LoadFactory(Stream stream)
{
if (database is null)
if (database == null)
{
stream.Position = 0;
var reader = new ArchiveReader();
@@ -131,13 +129,13 @@ namespace SharpCompress.Archives.SevenZip
}
}
private static ReadOnlySpan<byte> SIGNATURE => new byte[] { (byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C };
private static readonly byte[] SIGNATURE = {(byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C};
private static bool SignatureMatch(Stream stream)
{
BinaryReader reader = new BinaryReader(stream);
ReadOnlySpan<byte> signatureBytes = reader.ReadBytes(6);
return signatureBytes.SequenceEqual(SIGNATURE);
byte[] signatureBytes = reader.ReadBytes(6);
return signatureBytes.BinaryEquals(SIGNATURE);
}
protected override IReader CreateReaderForSolidExtraction()
@@ -156,7 +154,7 @@ namespace SharpCompress.Archives.SevenZip
}
}
private sealed class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
private class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
{
private readonly SevenZipArchive archive;
private CFolder currentFolder;
@@ -182,7 +180,7 @@ namespace SharpCompress.Archives.SevenZip
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
{
currentFolder = group.Key;
if (group.Key is null)
if (group.Key == null)
{
currentStream = Stream.Null;
}
@@ -203,7 +201,7 @@ namespace SharpCompress.Archives.SevenZip
return CreateEntryStream(new ReadOnlySubStream(currentStream, currentItem.Size));
}
}
private class PasswordProvider : IPasswordProvider
{
private readonly string _password;
@@ -211,6 +209,7 @@ namespace SharpCompress.Archives.SevenZip
public PasswordProvider(string password)
{
_password = password;
}
public string CryptoGetTextPassword()

View File

@@ -20,7 +20,7 @@ namespace SharpCompress.Archives.Tar
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(string filePath, ReaderOptions? readerOptions = null)
public static TarArchive Open(string filePath, ReaderOptions readerOptions = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
@@ -31,7 +31,7 @@ namespace SharpCompress.Archives.Tar
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
public static TarArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new TarArchive(fileInfo, readerOptions ?? new ReaderOptions());
@@ -42,7 +42,7 @@ namespace SharpCompress.Archives.Tar
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(Stream stream, ReaderOptions? readerOptions = null)
public static TarArchive Open(Stream stream, ReaderOptions readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
return new TarArchive(stream, readerOptions ?? new ReaderOptions());
@@ -79,7 +79,7 @@ namespace SharpCompress.Archives.Tar
}
return false;
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
@@ -118,8 +118,8 @@ namespace SharpCompress.Archives.Tar
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
{
Stream stream = volumes.Single().Stream;
TarHeader? previousHeader = null;
foreach (TarHeader? header in TarHeaderFactory.ReadHeader(StreamingMode.Seekable, stream, ReaderOptions.ArchiveEncoding))
TarHeader previousHeader = null;
foreach (TarHeader header in TarHeaderFactory.ReadHeader(StreamingMode.Seekable, stream, ReaderOptions.ArchiveEncoding))
{
if (header != null)
{
@@ -138,7 +138,7 @@ namespace SharpCompress.Archives.Tar
using (var entryStream = entry.OpenEntryStream())
{
using (var memoryStream = new MemoryStream())
using (var memoryStream = Utility.RECYCLABLE_MEMORY_STREAM_MANAGER.GetStream())
{
entryStream.TransferTo(memoryStream);
memoryStream.Position = 0;

View File

@@ -1,6 +1,4 @@
#nullable disable
using System;
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
@@ -8,7 +6,7 @@ using SharpCompress.IO;
namespace SharpCompress.Archives.Tar
{
internal sealed class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiveEntry
internal class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiveEntry
{
private readonly bool closeStream;
private readonly Stream stream;

View File

@@ -15,22 +15,20 @@ namespace SharpCompress.Archives.Zip
{
public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
{
#nullable disable
private readonly SeekableZipHeaderFactory headerFactory;
#nullable enable
/// <summary>
/// Gets or sets the compression level applied to files added to the archive,
/// if the compression method is set to deflate
/// </summary>
public CompressionLevel DeflateCompressionLevel { get; set; }
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
public static ZipArchive Open(string filePath, ReaderOptions readerOptions = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
@@ -41,7 +39,7 @@ namespace SharpCompress.Archives.Zip
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new ZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
@@ -52,18 +50,18 @@ namespace SharpCompress.Archives.Zip
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
public static ZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
return new ZipArchive(stream, readerOptions ?? new ReaderOptions());
}
public static bool IsZipFile(string filePath, string? password = null)
public static bool IsZipFile(string filePath, string password = null)
{
return IsZipFile(new FileInfo(filePath), password);
}
public static bool IsZipFile(FileInfo fileInfo, string? password = null)
public static bool IsZipFile(FileInfo fileInfo, string password = null)
{
if (!fileInfo.Exists)
{
@@ -75,13 +73,14 @@ namespace SharpCompress.Archives.Zip
}
}
public static bool IsZipFile(Stream stream, string? password = null)
public static bool IsZipFile(Stream stream, string password = null)
{
StreamingZipHeaderFactory headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding());
try
{
ZipHeader? header = headerFactory.ReadStreamHeader(stream).FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
ZipHeader header =
headerFactory.ReadStreamHeader(stream).FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header == null)
{
return false;
}
@@ -96,7 +95,7 @@ namespace SharpCompress.Archives.Zip
return false;
}
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
@@ -148,13 +147,13 @@ namespace SharpCompress.Archives.Zip
{
yield return new ZipArchiveEntry(this,
new SeekableZipFilePart(headerFactory,
(DirectoryEntryHeader)h,
h as DirectoryEntryHeader,
stream));
}
break;
case ZipHeaderType.DirectoryEnd:
{
byte[] bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
byte[] bytes = (h as DirectoryEndHeader).Comment;
volume.Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
yield break;
}

View File

@@ -6,7 +6,7 @@ namespace SharpCompress.Archives.Zip
{
public class ZipArchiveEntry : ZipEntry, IArchiveEntry
{
internal ZipArchiveEntry(ZipArchive archive, SeekableZipFilePart? part)
internal ZipArchiveEntry(ZipArchive archive, SeekableZipFilePart part)
: base(part)
{
Archive = archive;
@@ -25,6 +25,6 @@ namespace SharpCompress.Archives.Zip
#endregion
public string? Comment => ((SeekableZipFilePart)Parts.Single()).Comment;
public string Comment => (Parts.Single() as SeekableZipFilePart).Comment;
}
}

View File

@@ -5,6 +5,7 @@ using System.Runtime.CompilerServices;
[assembly: AssemblyTitle("SharpCompress")]
[assembly: AssemblyProduct("SharpCompress")]
[assembly: InternalsVisibleTo("SharpCompress.Test" + SharpCompress.AssemblyInfo.PublicKeySuffix)]
[assembly: InternalsVisibleTo("SharpCompress.Test.Portable" + SharpCompress.AssemblyInfo.PublicKeySuffix)]
[assembly: CLSCompliant(true)]
namespace SharpCompress

View File

@@ -18,30 +18,24 @@ namespace SharpCompress.Common
/// <summary>
/// Set this encoding when you want to force it for all encoding operations.
/// </summary>
public Encoding? Forced { get; set; }
public Encoding Forced { get; set; }
/// <summary>
/// Set this when you want to use a custom method for all decoding operations.
/// </summary>
/// <returns>string Func(bytes, index, length)</returns>
public Func<byte[], int, int, string>? CustomDecoder { get; set; }
public Func<byte[], int, int, string> CustomDecoder { get; set; }
public ArchiveEncoding()
: this(Encoding.Default, Encoding.Default)
{
}
public ArchiveEncoding(Encoding def, Encoding password)
{
Default = def;
Password = password;
Default = Encoding.GetEncoding(437);
Password = Encoding.GetEncoding(437);
}
#if !NET461
static ArchiveEncoding()
{
Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
}
#endif
public string Decode(byte[] bytes)
{
@@ -73,4 +67,4 @@ namespace SharpCompress.Common
return CustomDecoder ?? ((bytes, index, count) => GetEncoding().GetString(bytes, index, count));
}
}
}
}

View File

@@ -8,10 +8,5 @@ namespace SharpCompress.Common
: base(message)
{
}
public ArchiveException(string message, Exception inner)
: base(message, inner)
{
}
}
}

View File

@@ -6,7 +6,6 @@
Zip,
Tar,
SevenZip,
GZip,
Dmg
GZip
}
}

View File

@@ -2,22 +2,16 @@
namespace SharpCompress.Common
{
public sealed class CompressedBytesReadEventArgs : EventArgs
public class CompressedBytesReadEventArgs : EventArgs
{
public CompressedBytesReadEventArgs(long compressedBytesRead, long currentFilePartCompressedBytesRead)
{
CompressedBytesRead = compressedBytesRead;
CurrentFilePartCompressedBytesRead = currentFilePartCompressedBytesRead;
}
/// <summary>
/// Compressed bytes read for the current entry
/// </summary>
public long CompressedBytesRead { get; }
public long CompressedBytesRead { get; internal set; }
/// <summary>
/// Current file part read for Multipart files (e.g. Rar)
/// </summary>
public long CurrentFilePartCompressedBytesRead { get; }
public long CurrentFilePartCompressedBytesRead { get; internal set; }
}
}

View File

@@ -1,323 +0,0 @@
using SharpCompress.Common.Dmg.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.ADC;
using SharpCompress.Compressors.BZip2;
using SharpCompress.Compressors.Deflate;
using SharpCompress.IO;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
namespace SharpCompress.Common.Dmg
{
internal sealed class DmgBlockDataStream : Stream
{
private readonly Stream _baseStream;
private readonly DmgHeader _header;
private readonly BlkxTable _table;
private long _position;
private bool _isEnded;
private int _chunkIndex;
private Stream? _chunkStream;
private long _chunkPos;
public override bool CanRead => true;
public override bool CanWrite => false;
public override bool CanSeek => true;
public override long Length { get; }
public override long Position
{
get => _position;
set
{
if ((value < 0) || (value > Length)) throw new ArgumentOutOfRangeException(nameof(value));
if (value == Length)
{
// End of the stream
_position = Length;
_isEnded = true;
_chunkIndex = -1;
_chunkStream = null;
}
else if (value != _position)
{
_position = value;
// We can only seek over entire chunks at a time because some chunks may be compressed.
// So we first find the chunk that we are now in, then we read to the exact position inside that chunk.
for (int i = 0; i < _table.Chunks.Count; i++)
{
var chunk = _table.Chunks[i];
if (IsChunkValid(chunk) && (chunk.UncompressedOffset <= (ulong)_position)
&& ((chunk.UncompressedOffset + chunk.UncompressedLength) > (ulong)_position))
{
if (i == _chunkIndex)
{
// We are still in the same chunk, so if the new position is
// behind the previous one we can just read to the new position.
long offset = (long)chunk.UncompressedOffset + _chunkPos;
if (offset <= _position)
{
long skip = _position - offset;
_chunkStream!.Skip(skip);
_chunkPos += skip;
break;
}
}
_chunkIndex = i;
_chunkStream = GetChunkStream();
_chunkPos = 0;
// If the chunk happens to not be compressed this read will still result in a fast seek
if ((ulong)_position != chunk.UncompressedOffset)
{
long skip = _position - (long)chunk.UncompressedOffset;
_chunkStream.Skip(skip);
_chunkPos = skip;
}
break;
}
}
}
}
}
public DmgBlockDataStream(Stream baseStream, DmgHeader header, BlkxTable table)
{
if (!baseStream.CanRead) throw new ArgumentException("Requires a readable stream", nameof(baseStream));
if (!baseStream.CanSeek) throw new ArgumentException("Requires a seekable stream", nameof(baseStream));
_baseStream = baseStream;
_header = header;
_table = table;
Length = 0;
foreach (var chunk in table.Chunks)
{
if (IsChunkValid(chunk))
Length += (long)chunk.UncompressedLength;
}
_position = 0;
_chunkIndex = -1;
_chunkIndex = GetNextChunk();
_isEnded = _chunkIndex < 0;
if (!_isEnded) _chunkStream = GetChunkStream();
_chunkPos = 0;
}
private static bool IsChunkValid(BlkxChunk chunk)
{
return chunk.Type switch
{
BlkxChunkType.Zero => true,
BlkxChunkType.Uncompressed => true,
BlkxChunkType.Ignore => true,
BlkxChunkType.AdcCompressed => true,
BlkxChunkType.ZlibCompressed => true,
BlkxChunkType.Bz2Compressed => true,
_ => false
};
}
private int GetNextChunk()
{
int index = _chunkIndex;
bool isValid = false;
while (!isValid)
{
index++;
if (index >= _table.Chunks.Count) return -1;
var chunk = _table.Chunks[index];
if (chunk.Type == BlkxChunkType.Last) return -1;
isValid = IsChunkValid(chunk);
}
return index;
}
private Stream GetChunkStream()
{
if (_chunkIndex < 0)
throw new InvalidOperationException("Invalid chunk index");
var chunk = _table.Chunks[_chunkIndex];
// For our purposes, ignore behaves the same as zero
if ((chunk.Type == BlkxChunkType.Zero) || (chunk.Type == BlkxChunkType.Ignore))
return new ConstantStream(0, (long)chunk.UncompressedLength);
// We first create a sub-stream on the region of the base stream where the
// (possibly compressed) data is physically located at.
var subStream = new SeekableSubStream(_baseStream,
(long)(_header.DataForkOffset + _table.DataOffset + chunk.CompressedOffset),
(long)chunk.CompressedLength);
// Then we nest that sub-stream into the apropriate compressed stream.
return chunk.Type switch
{
BlkxChunkType.Uncompressed => subStream,
BlkxChunkType.AdcCompressed => new ADCStream(subStream, CompressionMode.Decompress),
BlkxChunkType.ZlibCompressed => new ZlibStream(subStream, CompressionMode.Decompress),
BlkxChunkType.Bz2Compressed => new BZip2Stream(subStream, CompressionMode.Decompress, false),
_ => throw new InvalidOperationException("Invalid chunk type")
};
}
// Decompresses the entire stream in memory for faster extraction.
// This is about two orders of magnitude faster than decompressing
// on-the-fly while extracting, but also eats RAM for breakfest.
public Stream Decompress()
{
// We have to load all the chunks into separate memory streams first
// because otherwise the decompression threads would block each other
// and actually be slower than just a single decompression thread.
var rawStreams = new Stream?[_table.Chunks.Count];
for (int i = 0; i < rawStreams.Length; i++)
{
var chunk = _table.Chunks[i];
if (IsChunkValid(chunk))
{
if ((chunk.Type == BlkxChunkType.Zero) || (chunk.Type == BlkxChunkType.Ignore))
{
rawStreams[i] = new ConstantStream(0, (long)chunk.UncompressedLength);
}
else
{
var subStream = new SeekableSubStream(_baseStream,
(long)(_header.DataForkOffset + _table.DataOffset + chunk.CompressedOffset),
(long)chunk.CompressedLength);
var memStream = new MemoryStream();
subStream.CopyTo(memStream);
memStream.Position = 0;
rawStreams[i] = memStream;
}
}
else
{
rawStreams[i] = null;
}
}
// Now we can decompress the chunks multithreaded
var streams = new Stream?[_table.Chunks.Count];
Parallel.For(0, streams.Length, i =>
{
var rawStream = rawStreams[i];
if (rawStream is not null)
{
var chunk = _table.Chunks[i];
if ((chunk.Type == BlkxChunkType.Zero)
|| (chunk.Type == BlkxChunkType.Ignore)
|| (chunk.Type == BlkxChunkType.Uncompressed))
{
streams[i] = rawStream;
}
else
{
Stream compStream = chunk.Type switch
{
BlkxChunkType.AdcCompressed => new ADCStream(rawStream, CompressionMode.Decompress),
BlkxChunkType.ZlibCompressed => new ZlibStream(rawStream, CompressionMode.Decompress),
BlkxChunkType.Bz2Compressed => new BZip2Stream(rawStream, CompressionMode.Decompress, false),
_ => throw new InvalidOperationException("Invalid chunk type")
};
var memStream = new MemoryStream();
compStream.CopyTo(memStream);
compStream.Dispose();
memStream.Position = 0;
streams[i] = memStream;
}
rawStream.Dispose();
rawStreams[i] = null;
}
else
{
streams[i] = null;
}
});
return new CompositeStream((IEnumerable<Stream>)streams.Where(s => s is not null));
}
public override int Read(byte[] buffer, int offset, int count)
{
if (_isEnded) return 0;
int readCount = _chunkStream!.Read(buffer, offset, count);
_chunkPos += readCount;
while (readCount < count)
{
// Current chunk has ended, so we have to continue reading from the next chunk.
_chunkIndex = GetNextChunk();
if (_chunkIndex < 0)
{
// We have reached the last chunk
_isEnded = true;
_chunkPos = 0;
_position += readCount;
return readCount;
}
_chunkStream = GetChunkStream();
int rc = _chunkStream.Read(buffer, offset + readCount, count - readCount);
_chunkPos = rc;
readCount += rc;
}
_position += readCount;
return readCount;
}
public override void Flush()
{ }
public override long Seek(long offset, SeekOrigin origin)
{
switch (origin)
{
case SeekOrigin.Begin:
Position = offset;
break;
case SeekOrigin.Current:
Position += offset;
break;
case SeekOrigin.End:
Position = Length - offset;
break;
}
return Position;
}
public override void SetLength(long value)
=> throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count)
=> throw new NotSupportedException();
protected override void Dispose(bool disposing)
{ }
}
}

View File

@@ -1,52 +0,0 @@
using SharpCompress.Common.Dmg.HFS;
using System;
using System.Collections.Generic;
namespace SharpCompress.Common.Dmg
{
public abstract class DmgEntry : Entry
{
public override string Key { get; }
public override bool IsDirectory { get; }
public override long Size { get; }
public override long CompressedSize { get; }
public override CompressionType CompressionType { get; }
public override DateTime? LastModifiedTime { get; }
public override DateTime? CreatedTime { get; }
public override DateTime? LastAccessedTime { get; }
public override DateTime? ArchivedTime { get; }
public override long Crc { get; } = 0; // Not stored
public override string? LinkTarget { get; } = null;
public override bool IsEncrypted { get; } = false;
public override bool IsSplitAfter { get; } = false;
internal override IEnumerable<FilePart> Parts { get; }
internal DmgEntry(HFSCatalogRecord record, string path, long size, DmgFilePart part)
{
Key = path;
IsDirectory = record.Type == HFSCatalogRecordType.Folder;
Size = CompressedSize = size; // There is no way to get the actual compressed size or the compression type of
CompressionType = CompressionType.Unknown; // a file in a DMG archive since the files are nested inside the HFS partition.
Parts = part.AsEnumerable();
if (IsDirectory)
{
var folder = (HFSCatalogFolder)record;
LastModifiedTime = (folder.AttributeModDate > folder.ContentModDate) ? folder.AttributeModDate : folder.ContentModDate;
CreatedTime = folder.CreateDate;
LastAccessedTime = folder.AccessDate;
ArchivedTime = folder.BackupDate;
}
else
{
var file = (HFSCatalogFile)record;
LastModifiedTime = (file.AttributeModDate > file.ContentModDate) ? file.AttributeModDate : file.ContentModDate;
CreatedTime = file.CreateDate;
LastAccessedTime = file.AccessDate;
ArchivedTime = file.BackupDate;
}
}
}
}

View File

@@ -1,21 +0,0 @@
using System.IO;
namespace SharpCompress.Common.Dmg
{
internal sealed class DmgFilePart : FilePart
{
private readonly Stream _stream;
internal override string FilePartName { get; }
public DmgFilePart(Stream stream, string fileName)
: base(new ArchiveEncoding())
{
_stream = stream;
FilePartName = fileName;
}
internal override Stream GetCompressedStream() => _stream;
internal override Stream? GetRawStream() => null;
}
}

View File

@@ -1,183 +0,0 @@
using SharpCompress.Common.Dmg.Headers;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Text;
using System.Xml.Linq;
namespace SharpCompress.Common.Dmg
{
internal static class DmgUtil
{
private const string MalformedXmlMessage = "Malformed XML block";
private static T[] ParseArray<T>(in XElement parent, in Func<XElement, T> parseElement)
{
var list = new List<T>();
foreach (var node in parent.Elements())
list.Add(parseElement(node));
return list.ToArray();
}
private static Dictionary<string, T> ParseDict<T>(in XElement parent, in Func<XElement, T> parseValue)
{
var dict = new Dictionary<string, T>();
string? key = null;
foreach (var node in parent.Elements())
{
if (string.Equals(node.Name.LocalName, "key", StringComparison.Ordinal))
{
key = node.Value;
}
else if (key is not null)
{
var value = parseValue(node);
dict.Add(key, value);
key = null;
}
}
return dict;
}
private static Dictionary<string, Dictionary<string, Dictionary<string, string>[]>> ParsePList(in XDocument doc)
{
var dictNode = doc.Root?.Element("dict");
if (dictNode is null) throw new InvalidFormatException(MalformedXmlMessage);
static Dictionary<string, string> ParseObject(XElement parent)
=> ParseDict(parent, node => node.Value);
static Dictionary<string, string>[] ParseObjectArray(XElement parent)
=> ParseArray(parent, ParseObject);
static Dictionary<string, Dictionary<string, string>[]> ParseSubDict(XElement parent)
=> ParseDict(parent, ParseObjectArray);
return ParseDict(dictNode, ParseSubDict);
}
private static BlkxData CreateDataFromDict(in Dictionary<string, string> dict)
{
static bool TryParseHex(string? s, out uint value)
{
value = 0;
if (string.IsNullOrEmpty(s)) return false;
if (s!.StartsWith("0x", StringComparison.OrdinalIgnoreCase))
s = s.Substring(2);
return uint.TryParse(s, NumberStyles.HexNumber, CultureInfo.InvariantCulture, out value);
}
if (!dict.TryGetValue("ID", out string? idStr) || !int.TryParse(idStr, out int id))
throw new InvalidFormatException(MalformedXmlMessage);
if (!dict.TryGetValue("Name", out string? name))
throw new InvalidFormatException(MalformedXmlMessage);
if (!dict.TryGetValue("Attributes", out string? attribStr) || !TryParseHex(attribStr, out uint attribs))
throw new InvalidFormatException(MalformedXmlMessage);
if (!dict.TryGetValue("Data", out string? base64Data) || string.IsNullOrEmpty(base64Data))
throw new InvalidFormatException(MalformedXmlMessage);
try
{
var data = Convert.FromBase64String(base64Data);
if (!BlkxTable.TryRead(data, out var table))
throw new InvalidFormatException("Invalid BLKX table");
return new BlkxData(id, name, attribs, table!);
}
catch (FormatException ex)
{
throw new InvalidFormatException(MalformedXmlMessage, ex);
}
}
public static DmgBlockDataStream? LoadHFSPartitionStream(Stream baseStream, DmgHeader header)
{
if ((header.XMLOffset + header.XMLLength) >= (ulong)baseStream.Length)
throw new IncompleteArchiveException("XML block incomplete");
if ((header.DataForkOffset + header.DataForkLength) >= (ulong)baseStream.Length)
throw new IncompleteArchiveException("Data block incomplete");
baseStream.Position = (long)header.XMLOffset;
var xmlBuffer = new byte[header.XMLLength];
baseStream.Read(xmlBuffer, 0, (int)header.XMLLength);
var xml = Encoding.ASCII.GetString(xmlBuffer);
var doc = XDocument.Parse(xml);
var pList = ParsePList(doc);
if (!pList.TryGetValue("resource-fork", out var resDict) || !resDict.TryGetValue("blkx", out var blkxDicts))
throw new InvalidFormatException(MalformedXmlMessage);
var objs = new BlkxData[blkxDicts.Length];
for (int i = 0; i < objs.Length; i++)
objs[i] = CreateDataFromDict(blkxDicts[i]);
// Index 0 is the protective MBR partition
// Index 1 is the GPT header
// Index 2 is the GPT partition table
try
{
var headerData = objs[1];
using var headerStream = new DmgBlockDataStream(baseStream, header, headerData.Table);
if (!GptHeader.TryRead(headerStream, out var gptHeader))
throw new InvalidFormatException("Invalid GPT header");
var tableData = objs[2];
using var tableStream = new DmgBlockDataStream(baseStream, header, tableData.Table);
var gptTable = new GptPartitionEntry[gptHeader!.EntriesCount];
for (int i = 0; i < gptHeader.EntriesCount; i++)
gptTable[i] = GptPartitionEntry.Read(tableStream);
foreach (var entry in gptTable)
{
if (entry.TypeGuid == PartitionFormat.AppleHFS)
{
BlkxData? partitionData = null;
for (int i = 3; i < objs.Length; i++)
{
if (objs[i].Name.StartsWith(entry.Name, StringComparison.Ordinal))
{
partitionData = objs[i];
break;
}
}
if (partitionData is null)
throw new InvalidFormatException($"Missing partition {entry.Name}");
return new DmgBlockDataStream(baseStream, header, partitionData.Table);
}
}
return null;
}
catch (EndOfStreamException ex)
{
throw new IncompleteArchiveException("Partition incomplete", ex);
}
}
private sealed class BlkxData
{
public int Id { get; }
public string Name { get; }
public uint Attributes { get; }
public BlkxTable Table { get; }
public BlkxData(int id, string name, uint attributes, BlkxTable table)
{
Id = id;
Name = name;
Attributes = attributes;
Table = table;
}
}
}
}

View File

@@ -1,38 +0,0 @@
using SharpCompress.Archives.Dmg;
using SharpCompress.Common.Dmg.Headers;
using SharpCompress.Common.Dmg.HFS;
using System;
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Common.Dmg
{
public class DmgVolume : Volume
{
private readonly DmgArchive _archive;
private readonly string _fileName;
internal DmgHeader Header { get; }
public DmgVolume(DmgArchive archive, Stream stream, string fileName, Readers.ReaderOptions readerOptions)
: base(stream, readerOptions)
{
_archive = archive;
_fileName = fileName;
long pos = stream.Length - DmgHeader.HeaderSize;
if (pos < 0) throw new InvalidFormatException("Invalid DMG volume");
stream.Position = pos;
if (DmgHeader.TryRead(stream, out var header)) Header = header!;
else throw new InvalidFormatException("Invalid DMG volume");
}
internal IEnumerable<DmgArchiveEntry> LoadEntries()
{
var partitionStream = DmgUtil.LoadHFSPartitionStream(Stream, Header);
if (partitionStream is null) return Array.Empty<DmgArchiveEntry>();
else return HFSUtil.LoadEntriesFromPartition(partitionStream, _fileName, _archive);
}
}
}

View File

@@ -1,336 +0,0 @@
using System;
namespace SharpCompress.Common.Dmg.HFS
{
internal sealed class HFSCatalogKey : HFSStructBase, IEquatable<HFSCatalogKey>, IComparable<HFSCatalogKey>, IComparable
{
private readonly StringComparer _comparer;
public uint ParentId { get; }
public string Name { get; }
private static StringComparer GetComparer(HFSKeyCompareType compareType, bool isHFSX)
{
if (isHFSX)
{
return compareType switch
{
HFSKeyCompareType.CaseFolding => StringComparer.InvariantCultureIgnoreCase,
HFSKeyCompareType.BinaryCompare => StringComparer.Ordinal,
_ => StringComparer.InvariantCultureIgnoreCase
};
}
else
{
return StringComparer.InvariantCultureIgnoreCase;
}
}
public HFSCatalogKey(uint parentId, string name, HFSKeyCompareType compareType, bool isHFSX)
{
ParentId = parentId;
Name = name;
_comparer = GetComparer(compareType, isHFSX);
}
public HFSCatalogKey(byte[] key, HFSKeyCompareType compareType, bool isHFSX)
{
ReadOnlySpan<byte> data = key.AsSpan();
ParentId = ReadUInt32(ref data);
Name = ReadString(ref data, true);
_comparer = GetComparer(compareType, isHFSX);
}
public bool Equals(HFSCatalogKey? other)
{
if (other is null) return false;
else return (ParentId == other.ParentId) && _comparer.Equals(Name, other.Name);
}
public override bool Equals(object? obj)
{
if (obj is HFSCatalogKey other) return Equals(other);
else return false;
}
public int CompareTo(HFSCatalogKey? other)
{
if (other is null) return 1;
int result = ParentId.CompareTo(other.ParentId);
if (result == 0) result = _comparer.Compare(Name, other.Name);
return result;
}
public int CompareTo(object? obj)
{
if (obj is null) return 1;
else if (obj is HFSCatalogKey other) return CompareTo(other);
else throw new ArgumentException("Object is not of type CatalogKey", nameof(obj));
}
public override int GetHashCode()
=> ParentId.GetHashCode() ^ _comparer.GetHashCode(Name);
public static bool operator ==(HFSCatalogKey? left, HFSCatalogKey? right)
{
if (left is null) return right is null;
else return left.Equals(right);
}
public static bool operator !=(HFSCatalogKey? left, HFSCatalogKey? right)
{
if (left is null) return right is not null;
else return !left.Equals(right);
}
public static bool operator <(HFSCatalogKey? left, HFSCatalogKey? right)
{
if (left is null) return right is not null;
else return left.CompareTo(right) < 0;
}
public static bool operator >(HFSCatalogKey? left, HFSCatalogKey? right)
{
if (left is null) return false;
else return left.CompareTo(right) > 0;
}
public static bool operator <=(HFSCatalogKey? left, HFSCatalogKey? right)
{
if (left is null) return true;
else return left.CompareTo(right) <= 0;
}
public static bool operator >=(HFSCatalogKey? left, HFSCatalogKey? right)
{
if (left is null) return right is null;
else return left.CompareTo(right) >= 0;
}
}
internal enum HFSCatalogRecordType : ushort
{
Folder = 0x0001,
File = 0x0002,
FolderThread = 0x0003,
FileThread = 0x0004
}
internal abstract class HFSCatalogRecord : HFSStructBase
{
public HFSCatalogRecordType Type { get; }
protected HFSCatalogRecord(HFSCatalogRecordType type)
=> Type = type;
public static bool TryRead(ref ReadOnlySpan<byte> data, HFSKeyCompareType compareType, bool isHFSX, out HFSCatalogRecord? record)
{
record = null;
ushort rawType = ReadUInt16(ref data);
if (!Enum.IsDefined(typeof(HFSCatalogRecordType), rawType)) return false;
var type = (HFSCatalogRecordType)rawType;
switch (type)
{
case HFSCatalogRecordType.Folder:
record = HFSCatalogFolder.Read(ref data);
return true;
case HFSCatalogRecordType.File:
record = HFSCatalogFile.Read(ref data);
return true;
case HFSCatalogRecordType.FolderThread:
record = HFSCatalogThread.Read(ref data, false, compareType, isHFSX);
return true;
case HFSCatalogRecordType.FileThread:
record = HFSCatalogThread.Read(ref data, true, compareType, isHFSX);
return true;
}
return false;
}
}
internal sealed class HFSCatalogFolder : HFSCatalogRecord
{
public uint Valence { get; }
public uint FolderId { get; }
public DateTime CreateDate { get; }
public DateTime ContentModDate { get; }
public DateTime AttributeModDate { get; }
public DateTime AccessDate { get; }
public DateTime BackupDate { get; }
public HFSPermissions Permissions { get; }
public HFSFolderInfo Info { get; }
public uint TextEncoding { get; }
private HFSCatalogFolder(
uint valence,
uint folderId,
DateTime createDate,
DateTime contentModDate,
DateTime attributeModDate,
DateTime accessDate,
DateTime backupDate,
HFSPermissions permissions,
HFSFolderInfo info,
uint textEncoding)
: base(HFSCatalogRecordType.Folder)
{
Valence = valence;
FolderId = folderId;
CreateDate = createDate;
ContentModDate = contentModDate;
AttributeModDate = attributeModDate;
AccessDate = accessDate;
BackupDate = backupDate;
Permissions = permissions;
Info = info;
TextEncoding = textEncoding;
}
public static HFSCatalogFolder Read(ref ReadOnlySpan<byte> data)
{
_ = ReadUInt16(ref data); // reserved
uint valence = ReadUInt32(ref data);
uint folderId = ReadUInt32(ref data);
var createDate = ReadDate(ref data);
var contentModDate = ReadDate(ref data);
var attributeModDate = ReadDate(ref data);
var accessDate = ReadDate(ref data);
var backupDate = ReadDate(ref data);
var permissions = HFSPermissions.Read(ref data);
var info = HFSFolderInfo.Read(ref data);
uint textEncoding = ReadUInt32(ref data);
_ = ReadUInt32(ref data); // reserved
return new HFSCatalogFolder(
valence,
folderId,
createDate,
contentModDate,
attributeModDate,
accessDate,
backupDate,
permissions,
info,
textEncoding);
}
}
internal enum HFSFileFlags : ushort
{
LockedBit = 0x0000,
LockedMask = 0x0001,
ThreadExistsBit = 0x0001,
ThreadExistsMask = 0x0002
}
internal sealed class HFSCatalogFile : HFSCatalogRecord
{
public HFSFileFlags Flags { get; }
public uint FileId { get; }
public DateTime CreateDate { get; }
public DateTime ContentModDate { get; }
public DateTime AttributeModDate { get; }
public DateTime AccessDate { get; }
public DateTime BackupDate { get; }
public HFSPermissions Permissions { get; }
public HFSFileInfo Info { get; }
public uint TextEncoding { get; }
public HFSForkData DataFork { get; }
public HFSForkData ResourceFork { get; }
private HFSCatalogFile(
HFSFileFlags flags,
uint fileId,
DateTime createDate,
DateTime contentModDate,
DateTime attributeModDate,
DateTime accessDate,
DateTime backupDate,
HFSPermissions permissions,
HFSFileInfo info,
uint textEncoding,
HFSForkData dataFork,
HFSForkData resourceFork)
:base(HFSCatalogRecordType.File)
{
Flags = flags;
FileId = fileId;
CreateDate = createDate;
ContentModDate = contentModDate;
AttributeModDate = attributeModDate;
AccessDate = accessDate;
BackupDate = backupDate;
Permissions = permissions;
Info = info;
TextEncoding = textEncoding;
DataFork = dataFork;
ResourceFork = resourceFork;
}
public static HFSCatalogFile Read(ref ReadOnlySpan<byte> data)
{
var flags = (HFSFileFlags)ReadUInt16(ref data);
_ = ReadUInt32(ref data); // reserved
uint fileId = ReadUInt32(ref data);
var createDate = ReadDate(ref data);
var contentModDate = ReadDate(ref data);
var attributeModDate = ReadDate(ref data);
var accessDate = ReadDate(ref data);
var backupDate = ReadDate(ref data);
var permissions = HFSPermissions.Read(ref data);
var info = HFSFileInfo.Read(ref data);
uint textEncoding = ReadUInt32(ref data);
_ = ReadUInt32(ref data); // reserved
var dataFork = HFSForkData.Read(ref data);
var resourceFork = HFSForkData.Read(ref data);
return new HFSCatalogFile(
flags,
fileId,
createDate,
contentModDate,
attributeModDate,
accessDate,
backupDate,
permissions,
info,
textEncoding,
dataFork,
resourceFork);
}
}
internal sealed class HFSCatalogThread : HFSCatalogRecord
{
public uint ParentId { get; }
public string NodeName { get; }
public HFSCatalogKey CatalogKey { get; }
private HFSCatalogThread(uint parentId, string nodeName, bool isFile, HFSKeyCompareType compareType, bool isHFSX)
: base(isFile ? HFSCatalogRecordType.FileThread : HFSCatalogRecordType.FolderThread)
{
ParentId = parentId;
NodeName = nodeName;
CatalogKey = new HFSCatalogKey(ParentId, NodeName, compareType, isHFSX);
}
public static HFSCatalogThread Read(ref ReadOnlySpan<byte> data, bool isFile, HFSKeyCompareType compareType, bool isHFSX)
{
_ = ReadInt16(ref data); // reserved
uint parentId = ReadUInt32(ref data);
string nodeName = ReadString(ref data, true);
return new HFSCatalogThread(parentId, nodeName, isFile, compareType, isHFSX);
}
}
}

View File

@@ -1,31 +0,0 @@
using System;
using System.IO;
namespace SharpCompress.Common.Dmg.HFS
{
internal sealed class HFSExtentDescriptor : HFSStructBase
{
public uint StartBlock { get; }
public uint BlockCount { get; }
private HFSExtentDescriptor(uint startBlock, uint blockCount)
{
StartBlock = startBlock;
BlockCount = blockCount;
}
public static HFSExtentDescriptor Read(Stream stream)
{
return new HFSExtentDescriptor(
ReadUInt32(stream),
ReadUInt32(stream));
}
public static HFSExtentDescriptor Read(ref ReadOnlySpan<byte> data)
{
return new HFSExtentDescriptor(
ReadUInt32(ref data),
ReadUInt32(ref data));
}
}
}

View File

@@ -1,115 +0,0 @@
using System;
using System.Collections.Generic;
namespace SharpCompress.Common.Dmg.HFS
{
internal sealed class HFSExtentKey : HFSStructBase, IEquatable<HFSExtentKey>, IComparable<HFSExtentKey>, IComparable
{
public byte ForkType { get; }
public uint FileId { get; }
public uint StartBlock { get; }
public HFSExtentKey(byte forkType, uint fileId, uint startBlock)
{
ForkType = forkType;
FileId = fileId;
StartBlock = startBlock;
}
public HFSExtentKey(byte[] key)
{
ReadOnlySpan<byte> data = key.AsSpan();
ForkType = ReadUInt8(ref data);
_ = ReadUInt8(ref data); // padding
FileId = ReadUInt32(ref data);
StartBlock = ReadUInt32(ref data);
}
public bool Equals(HFSExtentKey? other)
{
if (other is null) return false;
else return (ForkType == other.ForkType) && (FileId == other.FileId) && (StartBlock == other.StartBlock);
}
public override bool Equals(object? obj)
{
if (obj is HFSExtentKey other) return Equals(other);
else return false;
}
public int CompareTo(HFSExtentKey? other)
{
if (other is null) return 1;
int result = FileId.CompareTo(other.FileId);
if (result == 0) result = ForkType.CompareTo(other.ForkType);
if (result == 0) result = StartBlock.CompareTo(other.StartBlock);
return result;
}
public int CompareTo(object? obj)
{
if (obj is null) return 1;
else if (obj is HFSExtentKey other) return CompareTo(other);
else throw new ArgumentException("Object is not of type ExtentKey", nameof(obj));
}
public override int GetHashCode()
=> ForkType.GetHashCode() ^ FileId.GetHashCode() ^ StartBlock.GetHashCode();
public static bool operator ==(HFSExtentKey? left, HFSExtentKey? right)
{
if (left is null) return right is null;
else return left.Equals(right);
}
public static bool operator !=(HFSExtentKey? left, HFSExtentKey? right)
{
if (left is null) return right is not null;
else return !left.Equals(right);
}
public static bool operator <(HFSExtentKey? left, HFSExtentKey? right)
{
if (left is null) return right is not null;
else return left.CompareTo(right) < 0;
}
public static bool operator >(HFSExtentKey? left, HFSExtentKey? right)
{
if (left is null) return false;
else return left.CompareTo(right) > 0;
}
public static bool operator <=(HFSExtentKey? left, HFSExtentKey? right)
{
if (left is null) return true;
else return left.CompareTo(right) <= 0;
}
public static bool operator >=(HFSExtentKey? left, HFSExtentKey? right)
{
if (left is null) return right is null;
else return left.CompareTo(right) >= 0;
}
}
internal sealed class HFSExtentRecord : HFSStructBase
{
private const int ExtentCount = 8;
public IReadOnlyList<HFSExtentDescriptor> Extents { get; }
private HFSExtentRecord(IReadOnlyList<HFSExtentDescriptor> extents)
=> Extents = extents;
public static HFSExtentRecord Read(ref ReadOnlySpan<byte> data)
{
var extents = new HFSExtentDescriptor[ExtentCount];
for (int i = 0; i < ExtentCount; i++)
extents[i] = HFSExtentDescriptor.Read(ref data);
return new HFSExtentRecord(extents);
}
}
}

View File

@@ -1,145 +0,0 @@
using System;
namespace SharpCompress.Common.Dmg.HFS
{
internal struct HFSPoint
{
public short V;
public short H;
}
internal struct HFSRect
{
public short Top;
public short Left;
public short Bottom;
public short Right;
}
[Flags]
internal enum HFSFinderFlags : ushort
{
None = 0x0000,
IsOnDesk = 0x0001, /* Files and folders (System 6) */
Color = 0x000E, /* Files and folders */
IsShared = 0x0040, /* Files only (Applications only) If */
/* clear, the application needs */
/* to write to its resource fork, */
/* and therefore cannot be shared */
/* on a server */
HasNoINITs = 0x0080, /* Files only (Extensions/Control */
/* Panels only) */
/* This file contains no INIT resource */
HasBeenInited = 0x0100, /* Files only. Clear if the file */
/* contains desktop database resources */
/* ('BNDL', 'FREF', 'open', 'kind'...) */
/* that have not been added yet. Set */
/* only by the Finder. */
/* Reserved for folders */
HasCustomIcon = 0x0400, /* Files and folders */
IsStationery = 0x0800, /* Files only */
NameLocked = 0x1000, /* Files and folders */
HasBundle = 0x2000, /* Files only */
IsInvisible = 0x4000, /* Files and folders */
IsAlias = 0x8000 /* Files only */
}
[Flags]
internal enum HFSExtendedFinderFlags : ushort
{
None = 0x0000,
ExtendedFlagsAreInvalid = 0x8000, /* The other extended flags */
/* should be ignored */
HasCustomBadge = 0x0100, /* The file or folder has a */
/* badge resource */
HasRoutingInfo = 0x0004 /* The file contains routing */
/* info resource */
}
internal sealed class HFSFileInfo : HFSStructBase
{
public string FileType { get; } /* The type of the file */
public string FileCreator { get; } /* The file's creator */
public HFSFinderFlags FinderFlags { get; }
public HFSPoint Location { get; } /* File's location in the folder. */
public HFSExtendedFinderFlags ExtendedFinderFlags { get; }
public int PutAwayFolderId { get; }
private HFSFileInfo(
string fileType,
string fileCreator,
HFSFinderFlags finderFlags,
HFSPoint location,
HFSExtendedFinderFlags extendedFinderFlags,
int putAwayFolderId)
{
FileType = fileType;
FileCreator = fileCreator;
FinderFlags = finderFlags;
Location = location;
ExtendedFinderFlags = extendedFinderFlags;
PutAwayFolderId = putAwayFolderId;
}
public static HFSFileInfo Read(ref ReadOnlySpan<byte> data)
{
string fileType = ReadOSType(ref data);
string fileCreator = ReadOSType(ref data);
var finderFlags = (HFSFinderFlags)ReadUInt16(ref data);
var location = ReadPoint(ref data);
_ = ReadUInt16(ref data); // reserved
data = data.Slice(4 * sizeof(short)); // reserved
var extendedFinderFlags = (HFSExtendedFinderFlags)ReadUInt16(ref data);
_ = ReadInt16(ref data); // reserved
int putAwayFolderId = ReadInt32(ref data);
return new HFSFileInfo(fileType, fileCreator, finderFlags, location, extendedFinderFlags, putAwayFolderId);
}
}
internal sealed class HFSFolderInfo : HFSStructBase
{
public HFSRect WindowBounds { get; } /* The position and dimension of the */
/* folder's window */
public HFSFinderFlags FinderFlags { get; }
public HFSPoint Location { get; } /* Folder's location in the parent */
/* folder. If set to {0, 0}, the Finder */
/* will place the item automatically */
public HFSPoint ScrollPosition { get; } /* Scroll position (for icon views) */
public HFSExtendedFinderFlags ExtendedFinderFlags { get; }
public int PutAwayFolderId { get; }
private HFSFolderInfo(
HFSRect windowBounds,
HFSFinderFlags finderFlags,
HFSPoint location,
HFSPoint scrollPosition,
HFSExtendedFinderFlags extendedFinderFlags,
int putAwayFolderId)
{
WindowBounds = windowBounds;
FinderFlags = finderFlags;
Location = location;
ScrollPosition = scrollPosition;
ExtendedFinderFlags = extendedFinderFlags;
PutAwayFolderId = putAwayFolderId;
}
public static HFSFolderInfo Read(ref ReadOnlySpan<byte> data)
{
var windowBounds = ReadRect(ref data);
var finderFlags = (HFSFinderFlags)ReadUInt16(ref data);
var location = ReadPoint(ref data);
_ = ReadUInt16(ref data); // reserved
var scrollPosition = ReadPoint(ref data);
_ = ReadInt32(ref data); // reserved
var extendedFinderFlags = (HFSExtendedFinderFlags)ReadUInt16(ref data);
_ = ReadInt16(ref data); // reserved
int putAwayFolderId = ReadInt32(ref data);
return new HFSFolderInfo(windowBounds, finderFlags, location, scrollPosition, extendedFinderFlags, putAwayFolderId);
}
}
}

View File

@@ -1,50 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Common.Dmg.HFS
{
internal sealed class HFSForkData : HFSStructBase
{
private const int ExtentCount = 8;
public ulong LogicalSize { get; }
public uint ClumpSize { get; }
public uint TotalBlocks { get; }
public IReadOnlyList<HFSExtentDescriptor> Extents { get; }
private HFSForkData(ulong logicalSize, uint clumpSize, uint totalBlocks, IReadOnlyList<HFSExtentDescriptor> extents)
{
LogicalSize = logicalSize;
ClumpSize = clumpSize;
TotalBlocks = totalBlocks;
Extents = extents;
}
public static HFSForkData Read(Stream stream)
{
ulong logicalSize = ReadUInt64(stream);
uint clumpSize = ReadUInt32(stream);
uint totalBlocks = ReadUInt32(stream);
var extents = new HFSExtentDescriptor[ExtentCount];
for (int i = 0; i < ExtentCount; i++)
extents[i] = HFSExtentDescriptor.Read(stream);
return new HFSForkData(logicalSize, clumpSize, totalBlocks, extents);
}
public static HFSForkData Read(ref ReadOnlySpan<byte> data)
{
ulong logicalSize = ReadUInt64(ref data);
uint clumpSize = ReadUInt32(ref data);
uint totalBlocks = ReadUInt32(ref data);
var extents = new HFSExtentDescriptor[ExtentCount];
for (int i = 0; i < ExtentCount; i++)
extents[i] = HFSExtentDescriptor.Read(ref data);
return new HFSForkData(logicalSize, clumpSize, totalBlocks, extents);
}
}
}

View File

@@ -1,196 +0,0 @@
using SharpCompress.IO;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
namespace SharpCompress.Common.Dmg.HFS
{
internal sealed class HFSForkStream : Stream
{
private readonly Stream _baseStream;
private readonly HFSVolumeHeader _volumeHeader;
private readonly IReadOnlyList<HFSExtentDescriptor> _extents;
private long _position;
private bool _isEnded;
private int _extentIndex;
private Stream? _extentStream;
public override bool CanRead => true;
public override bool CanWrite => false;
public override bool CanSeek => true;
public override long Length { get; }
public override long Position
{
get => _position;
set
{
if ((value < 0) || (value > Length)) throw new ArgumentOutOfRangeException(nameof(value));
if (value == Length)
{
// End of the stream
_position = Length;
_isEnded = true;
_extentIndex = -1;
_extentStream = null;
}
else if (value != _position)
{
_position = value;
// We first have to determine in which extent we are now, then we seek to the exact position in that extent.
long offsetInExtent = _position;
for (int i = 0; i < _extents.Count; i++)
{
var extent = _extents[i];
long extentSize = extent.BlockCount * _volumeHeader.BlockSize;
if (extentSize < offsetInExtent)
{
if (i == _extentIndex)
{
// We are in the same extent so just seek to the correct position
_extentStream!.Position = offsetInExtent;
}
else
{
_extentIndex = i;
_extentStream = GetExtentStream();
_extentStream.Position = offsetInExtent;
}
break;
}
else
{
offsetInExtent -= extentSize;
}
}
}
}
}
public HFSForkStream(Stream baseStream, HFSVolumeHeader volumeHeader, HFSForkData forkData)
{
_baseStream = baseStream;
_volumeHeader = volumeHeader;
_extents = forkData.Extents;
Length = (long)forkData.LogicalSize;
_position = 0;
_extentIndex = -1;
_extentIndex = GetNextExtent();
_isEnded = _extentIndex < 0;
if (!_isEnded) _extentStream = GetExtentStream();
}
public HFSForkStream(
Stream baseStream, HFSVolumeHeader volumeHeader, HFSForkData forkData, uint fileId,
IReadOnlyDictionary<HFSExtentKey, HFSExtentRecord> extents)
{
_baseStream = baseStream;
_volumeHeader = volumeHeader;
Length = (long)forkData.LogicalSize;
uint blocks = (uint)forkData.Extents.Sum(e => e.BlockCount);
var totalExtents = new List<HFSExtentDescriptor>(forkData.Extents);
_extents = totalExtents;
var nextKey = new HFSExtentKey(0, fileId, blocks);
while (extents.TryGetValue(nextKey, out var record))
{
blocks += (uint)record.Extents.Sum(e => e.BlockCount);
totalExtents.AddRange(record.Extents);
nextKey = new HFSExtentKey(0, fileId, blocks);
}
_position = 0;
_extentIndex = -1;
_extentIndex = GetNextExtent();
_isEnded = _extentIndex < 0;
if (!_isEnded) _extentStream = GetExtentStream();
}
private int GetNextExtent()
{
int index = _extentIndex + 1;
if (index >= _extents.Count) return -1;
var extent = _extents[index];
if ((extent.StartBlock == 0) && (extent.BlockCount == 0)) return -1;
return index;
}
private Stream GetExtentStream()
{
if (_extentIndex < 0)
throw new InvalidOperationException("Invalid extent index");
var extent = _extents[_extentIndex];
return new HFSExtentStream(_baseStream, _volumeHeader, extent);
}
public override void Flush()
{ }
public override int Read(byte[] buffer, int offset, int count)
{
if (_isEnded) return 0;
count = (int)Math.Min(count, Length - Position);
int readCount = _extentStream!.Read(buffer, offset, count);
while (readCount < count)
{
_extentIndex = GetNextExtent();
if (_extentIndex < 0)
{
_isEnded = true;
return readCount;
}
_extentStream = GetExtentStream();
readCount += _extentStream.Read(buffer, offset + readCount, count - readCount);
}
_position += readCount;
return readCount;
}
public override long Seek(long offset, SeekOrigin origin)
{
switch (origin)
{
case SeekOrigin.Begin:
Position = offset;
break;
case SeekOrigin.Current:
Position += offset;
break;
case SeekOrigin.End:
Position = Length - offset;
break;
}
return Position;
}
public override void SetLength(long value)
=> throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count)
=> throw new NotSupportedException();
private sealed class HFSExtentStream : SeekableSubStream
{
public HFSExtentStream(Stream stream, HFSVolumeHeader volumeHeader, HFSExtentDescriptor extent)
: base(stream, (long)extent.StartBlock * volumeHeader.BlockSize, (long)extent.BlockCount * volumeHeader.BlockSize)
{ }
}
}
}

View File

@@ -1,91 +0,0 @@
using System;
namespace SharpCompress.Common.Dmg.HFS
{
internal abstract class HFSKeyedRecord : HFSStructBase
{
private readonly HFSKeyCompareType _compareType;
private readonly bool _isHFSX;
private HFSCatalogKey? _catalogKey;
private HFSExtentKey? _extentKey;
public byte[] Key { get; }
public HFSCatalogKey GetCatalogKey() => _catalogKey ??= new HFSCatalogKey(Key, _compareType, _isHFSX);
public HFSExtentKey GetExtentKey() => _extentKey ??= new HFSExtentKey(Key);
protected HFSKeyedRecord(byte[] key, HFSKeyCompareType compareType, bool isHFSX)
{
Key = key;
_compareType = compareType;
_isHFSX = isHFSX;
}
}
internal sealed class HFSPointerRecord : HFSKeyedRecord
{
public uint NodeNumber { get; }
private HFSPointerRecord(byte[] key, uint nodeNumber, HFSKeyCompareType compareType, bool isHFSX)
: base(key, compareType, isHFSX)
{
NodeNumber = nodeNumber;
}
public static HFSPointerRecord Read(ref ReadOnlySpan<byte> data, HFSTreeHeaderRecord headerRecord, bool isHFSX)
{
bool isBigKey = headerRecord.Attributes.HasFlag(HFSTreeAttributes.BigKeys);
ushort keyLength = isBigKey ? ReadUInt16(ref data) : ReadUInt8(ref data);
if (!headerRecord.Attributes.HasFlag(HFSTreeAttributes.VariableIndexKeys)) keyLength = headerRecord.MaxKeyLength;
int keySize = (isBigKey ? 2 : 1) + keyLength;
var key = new byte[keyLength];
data.Slice(0, keyLength).CopyTo(key);
data = data.Slice(keyLength);
// data is always aligned to 2 bytes
if (keySize % 2 == 1) data = data.Slice(1);
uint nodeNumber = ReadUInt32(ref data);
return new HFSPointerRecord(key, nodeNumber, headerRecord.KeyCompareType, isHFSX);
}
}
internal sealed class HFSDataRecord : HFSKeyedRecord
{
public byte[] Data { get; }
private HFSDataRecord(byte[] key, byte[] data, HFSKeyCompareType compareType, bool isHFSX)
: base(key, compareType, isHFSX)
{
Data = data;
}
public static HFSDataRecord Read(ref ReadOnlySpan<byte> data, int size, HFSTreeHeaderRecord headerRecord, bool isHFSX)
{
bool isBigKey = headerRecord.Attributes.HasFlag(HFSTreeAttributes.BigKeys);
ushort keyLength = isBigKey ? ReadUInt16(ref data) : ReadUInt8(ref data);
int keySize = (isBigKey ? 2 : 1) + keyLength;
size -= keySize;
var key = new byte[keyLength];
data.Slice(0, keyLength).CopyTo(key);
data = data.Slice(keyLength);
// data is always aligned to 2 bytes
if (keySize % 2 == 1)
{
data = data.Slice(1);
size--;
}
var structData = new byte[size];
data.Slice(0, size).CopyTo(structData);
data = data.Slice(size);
return new HFSDataRecord(key, structData, headerRecord.KeyCompareType, isHFSX);
}
}
}

View File

@@ -1,35 +0,0 @@
using System;
namespace SharpCompress.Common.Dmg.HFS
{
internal sealed class HFSPermissions : HFSStructBase
{
public uint OwnerID { get; }
public uint GroupID { get; }
public byte AdminFlags { get; }
public byte OwnerFlags { get; }
public ushort FileMode { get; }
public uint Special { get; }
private HFSPermissions(uint ownerID, uint groupID, byte adminFlags, byte ownerFlags, ushort fileMode, uint special)
{
OwnerID = ownerID;
GroupID = groupID;
AdminFlags = adminFlags;
OwnerFlags = ownerFlags;
FileMode = fileMode;
Special = special;
}
public static HFSPermissions Read(ref ReadOnlySpan<byte> data)
{
return new HFSPermissions(
ReadUInt32(ref data),
ReadUInt32(ref data),
ReadUInt8(ref data),
ReadUInt8(ref data),
ReadUInt16(ref data),
ReadUInt32(ref data));
}
}
}

View File

@@ -1,187 +0,0 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Text;
namespace SharpCompress.Common.Dmg.HFS
{
internal abstract class HFSStructBase
{
private const int StringSize = 510;
private const int OSTypeSize = 4;
private static readonly DateTime Epoch = new DateTime(1904, 1, 1, 0, 0, 0, DateTimeKind.Utc);
private static readonly byte[] _buffer = new byte[StringSize];
protected static byte ReadUInt8(Stream stream)
{
if (stream.Read(_buffer, 0, sizeof(byte)) != sizeof(byte))
throw new EndOfStreamException();
return _buffer[0];
}
protected static ushort ReadUInt16(Stream stream)
{
if (stream.Read(_buffer, 0, sizeof(ushort)) != sizeof(ushort))
throw new EndOfStreamException();
return BinaryPrimitives.ReadUInt16BigEndian(_buffer);
}
protected static short ReadInt16(Stream stream)
{
if (stream.Read(_buffer, 0, sizeof(short)) != sizeof(short))
throw new EndOfStreamException();
return BinaryPrimitives.ReadInt16BigEndian(_buffer);
}
protected static uint ReadUInt32(Stream stream)
{
if (stream.Read(_buffer, 0, sizeof(uint)) != sizeof(uint))
throw new EndOfStreamException();
return BinaryPrimitives.ReadUInt32BigEndian(_buffer);
}
protected static int ReadInt32(Stream stream)
{
if (stream.Read(_buffer, 0, sizeof(int)) != sizeof(int))
throw new EndOfStreamException();
return BinaryPrimitives.ReadInt32BigEndian(_buffer);
}
protected static ulong ReadUInt64(Stream stream)
{
if (stream.Read(_buffer, 0, sizeof(ulong)) != sizeof(ulong))
throw new EndOfStreamException();
return BinaryPrimitives.ReadUInt64BigEndian(_buffer);
}
protected static long ReadInt64(Stream stream)
{
if (stream.Read(_buffer, 0, sizeof(long)) != sizeof(long))
throw new EndOfStreamException();
return BinaryPrimitives.ReadInt64BigEndian(_buffer);
}
protected static string ReadString(Stream stream)
{
ushort length = ReadUInt16(stream);
if (stream.Read(_buffer, 0, StringSize) != StringSize)
throw new EndOfStreamException();
return Encoding.Unicode.GetString(_buffer, 0, Math.Min(length * 2, StringSize));
}
protected static DateTime ReadDate(Stream stream)
{
uint seconds = ReadUInt32(stream);
var span = TimeSpan.FromSeconds(seconds);
return Epoch + span;
}
protected static byte ReadUInt8(ref ReadOnlySpan<byte> data)
{
byte val = data[0];
data = data.Slice(sizeof(byte));
return val;
}
protected static ushort ReadUInt16(ref ReadOnlySpan<byte> data)
{
ushort val = BinaryPrimitives.ReadUInt16BigEndian(data);
data = data.Slice(sizeof(ushort));
return val;
}
protected static short ReadInt16(ref ReadOnlySpan<byte> data)
{
short val = BinaryPrimitives.ReadInt16BigEndian(data);
data = data.Slice(sizeof(short));
return val;
}
protected static uint ReadUInt32(ref ReadOnlySpan<byte> data)
{
uint val = BinaryPrimitives.ReadUInt32BigEndian(data);
data = data.Slice(sizeof(uint));
return val;
}
protected static int ReadInt32(ref ReadOnlySpan<byte> data)
{
int val = BinaryPrimitives.ReadInt32BigEndian(data);
data = data.Slice(sizeof(int));
return val;
}
protected static ulong ReadUInt64(ref ReadOnlySpan<byte> data)
{
ulong val = BinaryPrimitives.ReadUInt64BigEndian(data);
data = data.Slice(sizeof(ulong));
return val;
}
protected static long ReadInt64(ref ReadOnlySpan<byte> data)
{
long val = BinaryPrimitives.ReadInt64BigEndian(data);
data = data.Slice(sizeof(long));
return val;
}
protected static string ReadString(ref ReadOnlySpan<byte> data, bool truncate)
{
int length = ReadUInt16(ref data);
if (truncate)
{
length = Math.Min(length * 2, StringSize);
data.Slice(0, length).CopyTo(_buffer);
data = data.Slice(length);
return Encoding.BigEndianUnicode.GetString(_buffer, 0, length);
}
else
{
data.Slice(0, StringSize).CopyTo(_buffer);
data = data.Slice(StringSize);
return Encoding.BigEndianUnicode.GetString(_buffer, 0, Math.Min(length * 2, StringSize));
}
}
protected static DateTime ReadDate(ref ReadOnlySpan<byte> data)
{
uint seconds = ReadUInt32(ref data);
var span = TimeSpan.FromSeconds(seconds);
return Epoch + span;
}
protected static string ReadOSType(ref ReadOnlySpan<byte> data)
{
data.Slice(0, OSTypeSize).CopyTo(_buffer);
data = data.Slice(OSTypeSize);
return Encoding.ASCII.GetString(_buffer, 0, OSTypeSize).NullTerminate();
}
protected static HFSPoint ReadPoint(ref ReadOnlySpan<byte> data)
{
return new HFSPoint()
{
V = ReadInt16(ref data),
H = ReadInt16(ref data)
};
}
protected static HFSRect ReadRect(ref ReadOnlySpan<byte> data)
{
return new HFSRect()
{
Top = ReadInt16(ref data),
Left = ReadInt16(ref data),
Bottom = ReadInt16(ref data),
Right = ReadInt16(ref data)
};
}
}
}

View File

@@ -1,108 +0,0 @@
using System;
using System.IO;
namespace SharpCompress.Common.Dmg.HFS
{
internal enum HFSTreeType : byte
{
HFS = 0, // control file
User = 128, // user btree type starts from 128
Reserved = 255
}
internal enum HFSKeyCompareType : byte
{
CaseFolding = 0xCF, // case-insensitive
BinaryCompare = 0xBC // case-sensitive
}
[Flags]
internal enum HFSTreeAttributes : uint
{
None = 0x00000000,
BadClose = 0x00000001,
BigKeys = 0x00000002,
VariableIndexKeys = 0x00000004
}
internal sealed class HFSTreeHeaderRecord : HFSStructBase
{
public ushort TreeDepth;
public uint RootNode;
public uint LeafRecords;
public uint FirstLeafNode;
public uint LastLeafNode;
public ushort NodeSize;
public ushort MaxKeyLength;
public uint TotalNodes;
public uint FreeNodes;
public uint ClumpSize;
public HFSTreeType TreeType;
public HFSKeyCompareType KeyCompareType;
public HFSTreeAttributes Attributes;
private HFSTreeHeaderRecord(
ushort treeDepth,
uint rootNode,
uint leafRecords,
uint firstLeafNode,
uint lastLeafNode,
ushort nodeSize,
ushort maxKeyLength,
uint totalNodes,
uint freeNodes,
uint clumpSize,
HFSTreeType treeType,
HFSKeyCompareType keyCompareType,
HFSTreeAttributes attributes)
{
TreeDepth = treeDepth;
RootNode = rootNode;
LeafRecords = leafRecords;
FirstLeafNode = firstLeafNode;
LastLeafNode = lastLeafNode;
NodeSize = nodeSize;
MaxKeyLength = maxKeyLength;
TotalNodes = totalNodes;
FreeNodes = freeNodes;
ClumpSize = clumpSize;
TreeType = treeType;
KeyCompareType = keyCompareType;
Attributes = attributes;
}
public static HFSTreeHeaderRecord Read(Stream stream)
{
ushort treeDepth = ReadUInt16(stream);
uint rootNode = ReadUInt32(stream);
uint leafRecords = ReadUInt32(stream);
uint firstLeafNode = ReadUInt32(stream);
uint lastLeafNode = ReadUInt32(stream);
ushort nodeSize = ReadUInt16(stream);
ushort maxKeyLength = ReadUInt16(stream);
uint totalNodes = ReadUInt32(stream);
uint freeNodes = ReadUInt32(stream);
_ = ReadUInt16(stream); // reserved
uint clumpSize = ReadUInt32(stream);
var treeType = (HFSTreeType)ReadUInt8(stream);
var keyCompareType = (HFSKeyCompareType)ReadUInt8(stream);
var attributes = (HFSTreeAttributes)ReadUInt32(stream);
for (int i = 0; i < 16; i++) _ = ReadUInt32(stream); // reserved
return new HFSTreeHeaderRecord(
treeDepth,
rootNode,
leafRecords,
firstLeafNode,
lastLeafNode,
nodeSize,
maxKeyLength,
totalNodes,
freeNodes,
clumpSize,
treeType,
keyCompareType,
attributes);
}
}
}

View File

@@ -1,167 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Common.Dmg.HFS
{
internal abstract class HFSTreeNode : HFSStructBase
{
private static byte[]? _buffer = null;
public HFSTreeNodeDescriptor Descriptor { get; }
protected HFSTreeNode(HFSTreeNodeDescriptor descriptor)
=> Descriptor = descriptor;
public static bool TryRead(Stream stream, HFSTreeHeaderRecord headerRecord, bool isHFSX, out HFSTreeNode? node)
{
node = null;
if (!HFSTreeNodeDescriptor.TryRead(stream, out var descriptor)) return false;
int size = (int)headerRecord.NodeSize - HFSTreeNodeDescriptor.Size;
if ((_buffer is null) || (_buffer.Length < size))
_buffer = new byte[size * 2];
if (stream.Read(_buffer, 0, size) != size)
throw new EndOfStreamException();
ReadOnlySpan<byte> data = _buffer.AsSpan(0, size);
switch (descriptor!.Kind)
{
case HFSTreeNodeKind.Leaf:
node = HFSLeafTreeNode.Read(descriptor, data, headerRecord, isHFSX);
return true;
case HFSTreeNodeKind.Index:
node = HFSIndexTreeNode.Read(descriptor, data, headerRecord, isHFSX);
return true;
case HFSTreeNodeKind.Map:
node = HFSMapTreeNode.Read(descriptor, data);
return true;
}
return false;
}
}
internal sealed class HFSHeaderTreeNode : HFSTreeNode
{
private const int UserDataSize = 128;
public HFSTreeHeaderRecord HeaderRecord { get; }
public IReadOnlyList<byte> UserData { get; }
public IReadOnlyList<byte> Map { get; }
private HFSHeaderTreeNode(
HFSTreeNodeDescriptor descriptor,
HFSTreeHeaderRecord headerRecord,
IReadOnlyList<byte> userData,
IReadOnlyList<byte> map)
: base(descriptor)
{
HeaderRecord = headerRecord;
UserData = userData;
Map = map;
}
public static HFSHeaderTreeNode Read(HFSTreeNodeDescriptor descriptor, Stream stream)
{
if (descriptor.Kind != HFSTreeNodeKind.Header)
throw new ArgumentException("Descriptor does not define a header node");
var headerRecord = HFSTreeHeaderRecord.Read(stream);
var userData = new byte[UserDataSize];
if (stream.Read(userData, 0, UserDataSize) != UserDataSize)
throw new EndOfStreamException();
int mapSize = (int)(headerRecord.NodeSize - 256);
var map = new byte[mapSize];
if (stream.Read(map, 0, mapSize) != mapSize)
throw new EndOfStreamException();
// offset values (not required for header node)
_ = ReadUInt16(stream);
_ = ReadUInt16(stream);
_ = ReadUInt16(stream);
_ = ReadUInt16(stream);
return new HFSHeaderTreeNode(descriptor, headerRecord, userData, map);
}
}
internal sealed class HFSMapTreeNode : HFSTreeNode
{
public IReadOnlyList<byte> Map { get; }
private HFSMapTreeNode(HFSTreeNodeDescriptor descriptor, IReadOnlyList<byte> map)
: base(descriptor)
{
Map = map;
}
public static HFSMapTreeNode Read(HFSTreeNodeDescriptor descriptor, ReadOnlySpan<byte> data)
{
int mapSize = data.Length - 6;
var map = new byte[mapSize];
data.Slice(0, mapSize).CopyTo(map);
return new HFSMapTreeNode(descriptor, map);
}
}
internal sealed class HFSIndexTreeNode : HFSTreeNode
{
public IReadOnlyList<HFSPointerRecord> Records { get; }
private HFSIndexTreeNode(HFSTreeNodeDescriptor descriptor, IReadOnlyList<HFSPointerRecord> records)
: base(descriptor)
{
Records = records;
}
public static HFSIndexTreeNode Read(HFSTreeNodeDescriptor descriptor, ReadOnlySpan<byte> data, HFSTreeHeaderRecord headerRecord, bool isHFSX)
{
int recordCount = descriptor.NumRecords;
var records = new HFSPointerRecord[recordCount];
for (int i = 0; i < recordCount; i++)
records[i] = HFSPointerRecord.Read(ref data, headerRecord, isHFSX);
return new HFSIndexTreeNode(descriptor, records);
}
}
internal sealed class HFSLeafTreeNode : HFSTreeNode
{
public IReadOnlyList<HFSDataRecord> Records { get; }
private HFSLeafTreeNode(HFSTreeNodeDescriptor descriptor, IReadOnlyList<HFSDataRecord> records)
: base(descriptor)
{
Records = records;
}
public static HFSLeafTreeNode Read(HFSTreeNodeDescriptor descriptor, ReadOnlySpan<byte> data, HFSTreeHeaderRecord headerRecord, bool isHFSX)
{
int recordCount = descriptor.NumRecords;
var recordOffsets = new int[recordCount + 1];
for (int i = 0; i < recordOffsets.Length; i++)
{
var offsetData = data.Slice(data.Length - (2 * i) - 2);
ushort offset = ReadUInt16(ref offsetData);
recordOffsets[i] = offset;
}
var records = new HFSDataRecord[recordCount];
for (int i = 0; i < recordCount; i++)
{
int size = recordOffsets[i + 1] - recordOffsets[i];
records[i] = HFSDataRecord.Read(ref data, size, headerRecord, isHFSX);
}
return new HFSLeafTreeNode(descriptor, records);
}
}
}

View File

@@ -1,55 +0,0 @@
using System;
using System.IO;
namespace SharpCompress.Common.Dmg.HFS
{
internal enum HFSTreeNodeKind : sbyte
{
Leaf = -1,
Index = 0,
Header = 1,
Map = 2
}
internal sealed class HFSTreeNodeDescriptor : HFSStructBase
{
public const int Size = 14;
public uint FLink { get; }
public uint BLink { get; }
public HFSTreeNodeKind Kind { get; }
public byte Height { get; }
public ushort NumRecords { get; }
private HFSTreeNodeDescriptor(uint fLink, uint bLink, HFSTreeNodeKind kind, byte height, ushort numRecords)
{
FLink = fLink;
BLink = bLink;
Kind = kind;
Height = height;
NumRecords = numRecords;
}
public static bool TryRead(Stream stream, out HFSTreeNodeDescriptor? descriptor)
{
descriptor = null;
uint fLink = ReadUInt32(stream);
uint bLink = ReadUInt32(stream);
sbyte rawKind = (sbyte)ReadUInt8(stream);
if (!Enum.IsDefined(typeof(HFSTreeNodeKind), rawKind)) return false;
var kind = (HFSTreeNodeKind)rawKind;
byte height = ReadUInt8(stream);
if (((kind == HFSTreeNodeKind.Header) || (kind == HFSTreeNodeKind.Map)) && (height != 0)) return false;
if ((kind == HFSTreeNodeKind.Leaf) && (height != 1)) return false;
ushort numRecords = ReadUInt16(stream);
_ = ReadUInt16(stream); // reserved
descriptor = new HFSTreeNodeDescriptor(fLink, bLink, kind, height, numRecords);
return true;
}
}
}

View File

@@ -1,206 +0,0 @@
using SharpCompress.Archives.Dmg;
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace SharpCompress.Common.Dmg.HFS
{
internal static class HFSUtil
{
private const string CorruptHFSMessage = "Corrupt HFS volume";
private static (HFSHeaderTreeNode, IReadOnlyList<HFSTreeNode>) ReadTree(Stream stream, bool isHFSX)
{
if (!HFSTreeNodeDescriptor.TryRead(stream, out var headerDesc))
throw new InvalidFormatException(CorruptHFSMessage);
var header = HFSHeaderTreeNode.Read(headerDesc!, stream);
var nodes = new HFSTreeNode[header.HeaderRecord.TotalNodes];
nodes[0] = header;
for (int i = 1; i < nodes.Length; i++)
{
if (!HFSTreeNode.TryRead(stream, header.HeaderRecord, isHFSX, out var node))
throw new InvalidFormatException(CorruptHFSMessage);
nodes[i] = node!;
}
return (header, nodes);
}
private static void EnumerateExtentsTree(
IReadOnlyList<HFSTreeNode> extentsTree,
IDictionary<HFSExtentKey, HFSExtentRecord> records,
int parentIndex)
{
var parent = extentsTree[parentIndex];
if (parent is HFSLeafTreeNode leafNode)
{
foreach (var record in leafNode.Records)
{
ReadOnlySpan<byte> data = record.Data.AsSpan();
var recordData = HFSExtentRecord.Read(ref data);
var key = record.GetExtentKey();
records.Add(key, recordData);
}
}
else if (parent is HFSIndexTreeNode indexNode)
{
foreach (var record in indexNode.Records)
EnumerateExtentsTree(extentsTree, records, (int)record.NodeNumber);
}
else
{
throw new InvalidFormatException(CorruptHFSMessage);
}
}
private static IReadOnlyDictionary<HFSExtentKey, HFSExtentRecord> LoadExtents(IReadOnlyList<HFSTreeNode> extentsTree, int rootIndex)
{
var records = new Dictionary<HFSExtentKey, HFSExtentRecord>();
if (rootIndex == 0) return records;
EnumerateExtentsTree(extentsTree, records, rootIndex);
return records;
}
private static void EnumerateCatalogTree(
HFSHeaderTreeNode catalogHeader,
IReadOnlyList<HFSTreeNode> catalogTree,
IDictionary<HFSCatalogKey, HFSCatalogRecord> records,
IDictionary<uint, HFSCatalogThread> threads,
int parentIndex,
bool isHFSX)
{
var parent = catalogTree[parentIndex];
if (parent is HFSLeafTreeNode leafNode)
{
foreach (var record in leafNode.Records)
{
ReadOnlySpan<byte> data = record.Data.AsSpan();
if (HFSCatalogRecord.TryRead(ref data, catalogHeader.HeaderRecord.KeyCompareType, isHFSX, out var recordData))
{
var key = record.GetCatalogKey();
if ((recordData!.Type == HFSCatalogRecordType.FileThread) || (recordData!.Type == HFSCatalogRecordType.FolderThread))
{
threads.Add(key.ParentId, (HFSCatalogThread)recordData);
}
else
{
records.Add(key, recordData);
}
}
else
{
throw new InvalidFormatException(CorruptHFSMessage);
}
}
}
else if (parent is HFSIndexTreeNode indexNode)
{
foreach (var record in indexNode.Records)
EnumerateCatalogTree(catalogHeader, catalogTree, records, threads, (int)record.NodeNumber, isHFSX);
}
else
{
throw new InvalidFormatException(CorruptHFSMessage);
}
}
private static (HFSCatalogKey, HFSCatalogRecord) GetRecord(uint id, IDictionary<HFSCatalogKey, HFSCatalogRecord> records, IDictionary<uint, HFSCatalogThread> threads)
{
if (threads.TryGetValue(id, out var thread))
{
if (records.TryGetValue(thread.CatalogKey, out var record))
return (thread.CatalogKey, record!);
}
throw new InvalidFormatException(CorruptHFSMessage);
}
private static string SanitizePath(string path)
{
var sb = new StringBuilder(path.Length);
foreach (char c in path)
{
if (!char.IsControl(c))
sb.Append(c);
}
return sb.ToString();
}
private static string GetPath(HFSCatalogKey key, IDictionary<HFSCatalogKey, HFSCatalogRecord> records, IDictionary<uint, HFSCatalogThread> threads)
{
if (key.ParentId == 1)
{
return key.Name;
}
else
{
var (parentKey, _) = GetRecord(key.ParentId, records, threads);
var path = Path.Combine(GetPath(parentKey, records, threads), key.Name);
return SanitizePath(path);
}
}
private static IEnumerable<DmgArchiveEntry> LoadEntriesFromCatalogTree(
Stream partitionStream,
DmgFilePart filePart,
HFSVolumeHeader volumeHeader,
HFSHeaderTreeNode catalogHeader,
IReadOnlyList<HFSTreeNode> catalogTree,
IReadOnlyDictionary<HFSExtentKey, HFSExtentRecord> extents,
DmgArchive archive,
int rootIndex)
{
if (rootIndex == 0) return Array.Empty<DmgArchiveEntry>();
var records = new Dictionary<HFSCatalogKey, HFSCatalogRecord>();
var threads = new Dictionary<uint, HFSCatalogThread>();
EnumerateCatalogTree(catalogHeader, catalogTree, records, threads, rootIndex, volumeHeader.IsHFSX);
var entries = new List<DmgArchiveEntry>();
foreach (var kvp in records)
{
var key = kvp.Key;
var record = kvp.Value;
string path = GetPath(key, records, threads);
var stream = (record is HFSCatalogFile file) ? new HFSForkStream(partitionStream, volumeHeader, file.DataFork, file.FileId, extents) : null;
var entry = new DmgArchiveEntry(stream, archive, record, path, filePart);
entries.Add(entry);
}
return entries;
}
public static IEnumerable<DmgArchiveEntry> LoadEntriesFromPartition(Stream partitionStream, string fileName, DmgArchive archive)
{
if (!HFSVolumeHeader.TryRead(partitionStream, out var volumeHeader))
throw new InvalidFormatException(CorruptHFSMessage);
var filePart = new DmgFilePart(partitionStream, fileName);
var extentsFile = volumeHeader!.ExtentsFile;
var extentsStream = new HFSForkStream(partitionStream, volumeHeader, extentsFile);
var (extentsHeader, extentsTree) = ReadTree(extentsStream, volumeHeader.IsHFSX);
var extents = LoadExtents(extentsTree, (int)extentsHeader.HeaderRecord.RootNode);
var catalogFile = volumeHeader!.CatalogFile;
var catalogStream = new HFSForkStream(partitionStream, volumeHeader, catalogFile);
var (catalogHeader, catalogTree) = ReadTree(catalogStream, volumeHeader.IsHFSX);
return LoadEntriesFromCatalogTree(
partitionStream,
filePart,
volumeHeader,
catalogHeader,
catalogTree,
extents,
archive,
(int)catalogHeader.HeaderRecord.RootNode);
}
}
}

View File

@@ -1,179 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Common.Dmg.HFS
{
internal sealed class HFSVolumeHeader : HFSStructBase
{
private const ushort SignaturePlus = 0x482B;
private const ushort SignatureX = 0x4858;
private const int FinderInfoCount = 8;
public bool IsHFSX { get; }
public ushort Version { get; }
public uint Attributes { get; }
public uint LastMountedVersion { get; }
public uint JournalInfoBlock { get; }
public DateTime CreateDate { get; }
public DateTime ModifyDate { get; }
public DateTime BackupDate { get; }
public DateTime CheckedDate { get; }
public uint FileCount { get; }
public uint FolderCount { get; }
public uint BlockSize { get; }
public uint TotalBlocks { get; }
public uint FreeBlocks { get; }
public uint NextAllocation { get; }
public uint RsrcClumpSize { get; }
public uint DataClumpSize { get; }
public uint NextCatalogID { get; }
public uint WriteCount { get; }
public ulong EncodingsBitmap { get; }
public IReadOnlyList<uint> FinderInfo { get; }
public HFSForkData AllocationFile { get; }
public HFSForkData ExtentsFile { get; }
public HFSForkData CatalogFile { get; }
public HFSForkData AttributesFile { get; }
public HFSForkData StartupFile { get; }
public HFSVolumeHeader(
bool isHFSX,
ushort version,
uint attributes,
uint lastMountedVersion,
uint journalInfoBlock,
DateTime createDate,
DateTime modifyDate,
DateTime backupDate,
DateTime checkedDate,
uint fileCount,
uint folderCount,
uint blockSize,
uint totalBlocks,
uint freeBlocks,
uint nextAllocation,
uint rsrcClumpSize,
uint dataClumpSize,
uint nextCatalogID,
uint writeCount,
ulong encodingsBitmap,
IReadOnlyList<uint> finderInfo,
HFSForkData allocationFile,
HFSForkData extentsFile,
HFSForkData catalogFile,
HFSForkData attributesFile,
HFSForkData startupFile)
{
IsHFSX = isHFSX;
Version = version;
Attributes = attributes;
LastMountedVersion = lastMountedVersion;
JournalInfoBlock = journalInfoBlock;
CreateDate = createDate;
ModifyDate = modifyDate;
BackupDate = backupDate;
CheckedDate = checkedDate;
FileCount = fileCount;
FolderCount = folderCount;
BlockSize = blockSize;
TotalBlocks = totalBlocks;
FreeBlocks = freeBlocks;
NextAllocation = nextAllocation;
RsrcClumpSize = rsrcClumpSize;
DataClumpSize = dataClumpSize;
NextCatalogID = nextCatalogID;
WriteCount = writeCount;
EncodingsBitmap = encodingsBitmap;
FinderInfo = finderInfo;
AllocationFile = allocationFile;
ExtentsFile = extentsFile;
CatalogFile = catalogFile;
AttributesFile = attributesFile;
StartupFile = startupFile;
}
private static IReadOnlyList<uint> ReadFinderInfo(Stream stream)
{
var finderInfo = new uint[FinderInfoCount];
for (int i = 0; i < FinderInfoCount; i++)
finderInfo[i] = ReadUInt32(stream);
return finderInfo;
}
public static bool TryRead(Stream stream, out HFSVolumeHeader? header)
{
header = null;
stream.Skip(1024); // reserved bytes
bool isHFSX;
ushort sig = ReadUInt16(stream);
if (sig == SignaturePlus) isHFSX = false;
else if (sig == SignatureX) isHFSX = true;
else return false;
ushort version = ReadUInt16(stream);
uint attributes = ReadUInt32(stream);
uint lastMountedVersion = ReadUInt32(stream);
uint journalInfoBlock = ReadUInt32(stream);
DateTime createDate = ReadDate(stream);
DateTime modifyDate = ReadDate(stream);
DateTime backupDate = ReadDate(stream);
DateTime checkedDate = ReadDate(stream);
uint fileCount = ReadUInt32(stream);
uint folderCount = ReadUInt32(stream);
uint blockSize = ReadUInt32(stream);
uint totalBlocks = ReadUInt32(stream);
uint freeBlocks = ReadUInt32(stream);
uint nextAllocation = ReadUInt32(stream);
uint rsrcClumpSize = ReadUInt32(stream);
uint dataClumpSize = ReadUInt32(stream);
uint nextCatalogID = ReadUInt32(stream);
uint writeCount = ReadUInt32(stream);
ulong encodingsBitmap = ReadUInt64(stream);
IReadOnlyList<uint> finderInfo = ReadFinderInfo(stream);
HFSForkData allocationFile = HFSForkData.Read(stream);
HFSForkData extentsFile = HFSForkData.Read(stream);
HFSForkData catalogFile = HFSForkData.Read(stream);
HFSForkData attributesFile = HFSForkData.Read(stream);
HFSForkData startupFile = HFSForkData.Read(stream);
header = new HFSVolumeHeader(
isHFSX,
version,
attributes,
lastMountedVersion,
journalInfoBlock,
createDate,
modifyDate,
backupDate,
checkedDate,
fileCount,
folderCount,
blockSize,
totalBlocks,
freeBlocks,
nextAllocation,
rsrcClumpSize,
dataClumpSize,
nextCatalogID,
writeCount,
encodingsBitmap,
finderInfo,
allocationFile,
extentsFile,
catalogFile,
attributesFile,
startupFile);
return true;
}
}
}

View File

@@ -1,49 +0,0 @@
using System;
namespace SharpCompress.Common.Dmg.Headers
{
internal enum BlkxChunkType : uint
{
Zero = 0x00000000u,
Uncompressed = 0x00000001u,
Ignore = 0x00000002u,
AdcCompressed = 0x80000004u,
ZlibCompressed = 0x80000005u,
Bz2Compressed = 0x80000006u,
Comment = 0x7FFFFFFEu,
Last = 0xFFFFFFFFu,
}
internal sealed class BlkxChunk : DmgStructBase
{
private const int SectorSize = 512;
public BlkxChunkType Type { get; } // Compression type used or chunk type
public uint Comment { get; } // "+beg" or "+end", if EntryType is comment (0x7FFFFFFE). Else reserved.
public ulong UncompressedOffset { get; } // Start sector of this chunk
public ulong UncompressedLength { get; } // Number of sectors in this chunk
public ulong CompressedOffset { get; } // Start of chunk in data fork
public ulong CompressedLength { get; } // Count of bytes of chunk, in data fork
private BlkxChunk(BlkxChunkType type, uint comment, ulong sectorNumber, ulong sectorCount, ulong compressedOffset, ulong compressedLength)
{
Type = type;
Comment = comment;
UncompressedOffset = sectorNumber * SectorSize;
UncompressedLength = sectorCount * SectorSize;
CompressedOffset = compressedOffset;
CompressedLength = compressedLength;
}
public static bool TryRead(ref ReadOnlySpan<byte> data, out BlkxChunk? chunk)
{
chunk = null;
var type = (BlkxChunkType)ReadUInt32(ref data);
if (!Enum.IsDefined(typeof(BlkxChunkType), type)) return false;
chunk = new BlkxChunk(type, ReadUInt32(ref data), ReadUInt64(ref data), ReadUInt64(ref data), ReadUInt64(ref data), ReadUInt64(ref data));
return true;
}
}
}

View File

@@ -1,75 +0,0 @@
using System;
using System.Collections.Generic;
namespace SharpCompress.Common.Dmg.Headers
{
internal sealed class BlkxTable : DmgStructBase
{
private const uint Signature = 0x6d697368u;
public uint Version { get; } // Current version is 1
public ulong SectorNumber { get; } // Starting disk sector in this blkx descriptor
public ulong SectorCount { get; } // Number of disk sectors in this blkx descriptor
public ulong DataOffset { get; }
public uint BuffersNeeded { get; }
public uint BlockDescriptors { get; } // Number of descriptors
public UdifChecksum Checksum { get; }
public IReadOnlyList<BlkxChunk> Chunks { get; }
private BlkxTable(
uint version,
ulong sectorNumber,
ulong sectorCount,
ulong dataOffset,
uint buffersNeeded,
uint blockDescriptors,
UdifChecksum checksum,
IReadOnlyList<BlkxChunk> chunks)
{
Version = version;
SectorNumber = sectorNumber;
SectorCount = sectorCount;
DataOffset = dataOffset;
BuffersNeeded = buffersNeeded;
BlockDescriptors = blockDescriptors;
Checksum = checksum;
Chunks = chunks;
}
public static bool TryRead(in byte[] buffer, out BlkxTable? header)
{
header = null;
ReadOnlySpan<byte> data = buffer.AsSpan();
uint sig = ReadUInt32(ref data);
if (sig != Signature) return false;
uint version = ReadUInt32(ref data);
ulong sectorNumber = ReadUInt64(ref data);
ulong sectorCount = ReadUInt64(ref data);
ulong dataOffset = ReadUInt64(ref data);
uint buffersNeeded = ReadUInt32(ref data);
uint blockDescriptors = ReadUInt32(ref data);
data = data.Slice(6 * sizeof(uint)); // reserved
var checksum = UdifChecksum.Read(ref data);
uint chunkCount = ReadUInt32(ref data);
var chunks = new BlkxChunk[chunkCount];
for (int i = 0; i < chunkCount; i++)
{
if (!BlkxChunk.TryRead(ref data, out var chunk)) return false;
chunks[i] = chunk!;
}
header = new BlkxTable(version, sectorNumber, sectorCount, dataOffset, buffersNeeded, blockDescriptors, checksum, chunks);
return true;
}
}
}

View File

@@ -1,138 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Common.Dmg.Headers
{
internal sealed class DmgHeader : DmgStructBase
{
public const int HeaderSize = 512;
private const uint Signature = 0x6B6F6C79u;
private const int UuidSize = 16; // 128 bit
public uint Version { get; } // Current version is 4
public uint Flags { get; } // Flags
public ulong RunningDataForkOffset { get; } //
public ulong DataForkOffset { get; } // Data fork offset (usually 0, beginning of file)
public ulong DataForkLength { get; } // Size of data fork (usually up to the XMLOffset, below)
public ulong RsrcForkOffset { get; } // Resource fork offset, if any
public ulong RsrcForkLength { get; } // Resource fork length, if any
public uint SegmentNumber { get; } // Usually 1, may be 0
public uint SegmentCount { get; } // Usually 1, may be 0
public IReadOnlyList<byte> SegmentID { get; } // 128-bit GUID identifier of segment (if SegmentNumber !=0)
public UdifChecksum DataChecksum { get; }
public ulong XMLOffset { get; } // Offset of property list in DMG, from beginning
public ulong XMLLength { get; } // Length of property list
public UdifChecksum Checksum { get; }
public uint ImageVariant { get; } // Commonly 1
public ulong SectorCount { get; } // Size of DMG when expanded, in sectors
private DmgHeader(
uint version,
uint flags,
ulong runningDataForkOffset,
ulong dataForkOffset,
ulong dataForkLength,
ulong rsrcForkOffset,
ulong rsrcForkLength,
uint segmentNumber,
uint segmentCount,
IReadOnlyList<byte> segmentID,
UdifChecksum dataChecksum,
ulong xMLOffset,
ulong xMLLength,
UdifChecksum checksum,
uint imageVariant,
ulong sectorCount)
{
Version = version;
Flags = flags;
RunningDataForkOffset = runningDataForkOffset;
DataForkOffset = dataForkOffset;
DataForkLength = dataForkLength;
RsrcForkOffset = rsrcForkOffset;
RsrcForkLength = rsrcForkLength;
SegmentNumber = segmentNumber;
SegmentCount = segmentCount;
SegmentID = segmentID;
DataChecksum = dataChecksum;
XMLOffset = xMLOffset;
XMLLength = xMLLength;
Checksum = checksum;
ImageVariant = imageVariant;
SectorCount = sectorCount;
}
private static void ReadUuid(ref ReadOnlySpan<byte> data, byte[] buffer)
{
data.Slice(0, UuidSize).CopyTo(buffer);
data = data.Slice(UuidSize);
}
internal static bool TryRead(Stream input, out DmgHeader? header)
{
header = null;
var buffer = new byte[HeaderSize];
int count = input.Read(buffer, 0, HeaderSize);
if (count != HeaderSize) return false;
ReadOnlySpan<byte> data = buffer.AsSpan();
uint sig = ReadUInt32(ref data);
if (sig != Signature) return false;
uint version = ReadUInt32(ref data);
uint size = ReadUInt32(ref data);
if (size != (uint)HeaderSize) return false;
uint flags = ReadUInt32(ref data);
ulong runningDataForkOffset = ReadUInt64(ref data);
ulong dataForkOffset = ReadUInt64(ref data);
ulong dataForkLength = ReadUInt64(ref data);
ulong rsrcForkOffset = ReadUInt64(ref data);
ulong rsrcForkLength = ReadUInt64(ref data);
uint segmentNumber = ReadUInt32(ref data);
uint segmentCount = ReadUInt32(ref data);
var segmentID = new byte[UuidSize];
ReadUuid(ref data, segmentID);
var dataChecksum = UdifChecksum.Read(ref data);
ulong xmlOffset = ReadUInt64(ref data);
ulong xmlLength = ReadUInt64(ref data);
data = data.Slice(120); // Reserved bytes
var checksum = UdifChecksum.Read(ref data);
uint imageVariant = ReadUInt32(ref data);
ulong sectorCount = ReadUInt64(ref data);
header = new DmgHeader(
version,
flags,
runningDataForkOffset,
dataForkOffset,
dataForkLength,
rsrcForkOffset,
rsrcForkLength,
segmentNumber,
segmentCount,
segmentID,
dataChecksum,
xmlOffset,
xmlLength,
checksum,
imageVariant,
sectorCount);
return true;
}
}
}

View File

@@ -1,22 +0,0 @@
using System;
using System.Buffers.Binary;
namespace SharpCompress.Common.Dmg.Headers
{
internal abstract class DmgStructBase
{
protected static uint ReadUInt32(ref ReadOnlySpan<byte> data)
{
uint val = BinaryPrimitives.ReadUInt32BigEndian(data);
data = data.Slice(sizeof(uint));
return val;
}
protected static ulong ReadUInt64(ref ReadOnlySpan<byte> data)
{
ulong val = BinaryPrimitives.ReadUInt64BigEndian(data);
data = data.Slice(sizeof(ulong));
return val;
}
}
}

View File

@@ -1,90 +0,0 @@
using System;
using System.Buffers.Binary;
using System.IO;
namespace SharpCompress.Common.Dmg.Headers
{
internal sealed class GptHeader : GptStructBase
{
private const int HeaderSize = 92;
private static readonly ulong Signature = BinaryPrimitives.ReadUInt64LittleEndian(new byte[] { 69, 70, 73, 32, 80, 65, 82, 84 });
public uint Revision { get; }
public uint Crc32Header { get; }
public ulong CurrentLba { get; }
public ulong BackupLba { get; }
public ulong FirstUsableLba { get; }
public ulong LastUsableLba { get; }
public Guid DiskGuid { get; }
public ulong EntriesStart { get; }
public uint EntriesCount { get; }
public uint EntriesSize { get; }
public uint Crc32Array { get; }
private GptHeader(
uint revision,
uint crc32Header,
ulong currentLba,
ulong backupLba,
ulong firstUsableLba,
ulong lastUsableLba,
Guid diskGuid,
ulong entriesStart,
uint entriesCount,
uint entriesSize,
uint crc32Array)
{
Revision = revision;
Crc32Header = crc32Header;
CurrentLba = currentLba;
BackupLba = backupLba;
FirstUsableLba = firstUsableLba;
LastUsableLba = lastUsableLba;
DiskGuid = diskGuid;
EntriesStart = entriesStart;
EntriesCount = entriesCount;
EntriesSize = entriesSize;
Crc32Array = crc32Array;
}
public static bool TryRead(Stream stream, out GptHeader? header)
{
header = null;
ulong sig = ReadUInt64(stream);
if (sig != Signature) return false;
uint revision = ReadUInt32(stream);
uint headerSize = ReadUInt32(stream);
if (headerSize != HeaderSize) return false;
uint crc32Header = ReadUInt32(stream);
_ = ReadUInt32(stream); // reserved
ulong currentLba = ReadUInt64(stream);
ulong backupLba = ReadUInt64(stream);
ulong firstUsableLba = ReadUInt64(stream);
ulong lastUsableLba = ReadUInt64(stream);
Guid diskGuid = ReadGuid(stream);
ulong entriesStart = ReadUInt64(stream);
uint entriesCount = ReadUInt32(stream);
uint entriesSize = ReadUInt32(stream);
uint crc32Array = ReadUInt32(stream);
header = new GptHeader(
revision,
crc32Header,
currentLba,
backupLba,
firstUsableLba,
lastUsableLba,
diskGuid,
entriesStart,
entriesCount,
entriesSize,
crc32Array);
return true;
}
}
}

View File

@@ -1,36 +0,0 @@
using System;
using System.IO;
namespace SharpCompress.Common.Dmg.Headers
{
internal sealed class GptPartitionEntry : GptStructBase
{
public Guid TypeGuid { get; }
public Guid Guid { get; }
public ulong FirstLba { get; }
public ulong LastLba { get; }
public ulong Attributes { get; }
public string Name { get; }
private GptPartitionEntry(Guid typeGuid, Guid guid, ulong firstLba, ulong lastLba, ulong attributes, string name)
{
TypeGuid = typeGuid;
Guid = guid;
FirstLba = firstLba;
LastLba = lastLba;
Attributes = attributes;
Name = name;
}
public static GptPartitionEntry Read(Stream stream)
{
return new GptPartitionEntry(
ReadGuid(stream),
ReadGuid(stream),
ReadUInt64(stream),
ReadUInt64(stream),
ReadUInt64(stream),
ReadString(stream, 72));
}
}
}

View File

@@ -1,56 +0,0 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Text;
namespace SharpCompress.Common.Dmg.Headers
{
internal abstract class GptStructBase
{
private static readonly byte[] _buffer = new byte[8];
protected static ushort ReadUInt16(Stream stream)
{
if (stream.Read(_buffer, 0, sizeof(ushort)) != sizeof(ushort))
throw new EndOfStreamException();
return BinaryPrimitives.ReadUInt16LittleEndian(_buffer);
}
protected static uint ReadUInt32(Stream stream)
{
if (stream.Read(_buffer, 0, sizeof(uint)) != sizeof(uint))
throw new EndOfStreamException();
return BinaryPrimitives.ReadUInt32LittleEndian(_buffer);
}
protected static ulong ReadUInt64(Stream stream)
{
if (stream.Read(_buffer, 0, sizeof(ulong)) != sizeof(ulong))
throw new EndOfStreamException();
return BinaryPrimitives.ReadUInt64LittleEndian(_buffer);
}
protected static Guid ReadGuid(Stream stream)
{
int a = (int)ReadUInt32(stream);
short b = (short)ReadUInt16(stream);
short c = (short)ReadUInt16(stream);
if (stream.Read(_buffer, 0, 8) != 8)
throw new EndOfStreamException();
return new Guid(a, b, c, _buffer);
}
protected static string ReadString(Stream stream, int byteSize)
{
var buffer = new byte[byteSize];
if (stream.Read(buffer, 0, byteSize) != byteSize)
throw new EndOfStreamException();
return Encoding.Unicode.GetString(buffer).NullTerminate();
}
}
}

View File

@@ -1,33 +0,0 @@
using System;
using System.Collections.Generic;
namespace SharpCompress.Common.Dmg.Headers
{
internal sealed class UdifChecksum : DmgStructBase
{
private const int MaxSize = 32; // * 4 to get byte size
public uint Type { get; }
public uint Size { get; } // in bits
public IReadOnlyList<uint> Bits { get; }
private UdifChecksum(uint type, uint size, IReadOnlyList<uint> bits)
{
Type = type;
Size = size;
Bits = bits;
}
public static UdifChecksum Read(ref ReadOnlySpan<byte> data)
{
uint type = ReadUInt32(ref data);
uint size = ReadUInt32(ref data);
var bits = new uint[MaxSize];
for (int i = 0; i < MaxSize; i++)
bits[i] = ReadUInt32(ref data);
return new UdifChecksum(type, size, bits);
}
}
}

View File

@@ -1,14 +0,0 @@
using System;
namespace SharpCompress.Common.Dmg
{
internal static class PartitionFormat
{
public static readonly Guid AppleHFS = new Guid("48465300-0000-11AA-AA11-00306543ECAC");
public static readonly Guid AppleUFS = new Guid("55465300-0000-11AA-AA11-00306543ECAC");
public static readonly Guid AppleBoot = new Guid("426F6F74-0000-11AA-AA11-00306543ECAC");
public static readonly Guid AppleRaid = new Guid("52414944-0000-11AA-AA11-00306543ECAC");
public static readonly Guid AppleRaidOffline = new Guid("52414944-5F4F-11AA-AA11-00306543ECAC");
public static readonly Guid AppleLabel = new Guid("4C616265-6C00-11AA-AA11-00306543ECAC");
}
}

View File

@@ -18,7 +18,7 @@ namespace SharpCompress.Common
/// <summary>
/// The target of a symlink entry internal to the Archive. Will be null if not a symlink.
/// </summary>
public abstract string? LinkTarget { get; }
public abstract string LinkTarget { get; }
/// <summary>
/// The compressed file size
@@ -71,10 +71,12 @@ namespace SharpCompress.Common
public abstract bool IsSplitAfter { get; }
/// <inheritdoc/>
public override string ToString() => Key;
public override string ToString()
{
return Key;
}
internal abstract IEnumerable<FilePart> Parts { get; }
internal bool IsSolid { get; set; }
internal virtual void Close()

View File

@@ -47,8 +47,7 @@ namespace SharpCompress.Common
public override bool CanWrite => false;
public override void Flush()
{
public override void Flush() {
}
public override long Length => _stream.Length;

View File

@@ -8,28 +8,28 @@ namespace SharpCompress.Common
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteEntryToDirectory(IEntry entry,
string destinationDirectory,
ExtractionOptions? options,
Action<string, ExtractionOptions?> write)
public static void WriteEntryToDirectory(IEntry entry, string destinationDirectory,
ExtractionOptions options, Action<string, ExtractionOptions> write)
{
string destinationFileName;
string file = Path.GetFileName(entry.Key);
string fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
options ??= new ExtractionOptions()
{
Overwrite = true
};
options = options ?? new ExtractionOptions()
{
Overwrite = true
};
if (options.ExtractFullPath)
{
string folder = Path.GetDirectoryName(entry.Key)!;
string destdir = Path.GetFullPath(Path.Combine(fullDestinationDirectoryPath, folder));
string folder = Path.GetDirectoryName(entry.Key);
string destdir = Path.GetFullPath(
Path.Combine(fullDestinationDirectoryPath, folder)
);
if (!Directory.Exists(destdir))
{
if (!destdir.StartsWith(fullDestinationDirectoryPath, StringComparison.Ordinal))
if (!destdir.StartsWith(fullDestinationDirectoryPath))
{
throw new ExtractionException("Entry is trying to create a directory outside of the destination directory.");
}
@@ -39,7 +39,7 @@ namespace SharpCompress.Common
destinationFileName = Path.Combine(destdir, file);
}
else
{
{
destinationFileName = Path.Combine(fullDestinationDirectoryPath, file);
}
@@ -48,7 +48,7 @@ namespace SharpCompress.Common
{
destinationFileName = Path.GetFullPath(destinationFileName);
if (!destinationFileName.StartsWith(fullDestinationDirectoryPath, StringComparison.Ordinal))
if (!destinationFileName.StartsWith(fullDestinationDirectoryPath))
{
throw new ExtractionException("Entry is trying to write a file outside of the destination directory.");
}
@@ -59,14 +59,14 @@ namespace SharpCompress.Common
Directory.CreateDirectory(destinationFileName);
}
}
public static void WriteEntryToFile(IEntry entry, string destinationFileName,
ExtractionOptions? options,
ExtractionOptions options,
Action<string, FileMode> openAndWrite)
{
if (entry.LinkTarget != null)
{
if (options?.WriteSymbolicLink is null)
if (null == options.WriteSymbolicLink)
{
throw new ExtractionException("Entry is a symbolic link but ExtractionOptions.WriteSymbolicLink delegate is null");
}
@@ -75,10 +75,10 @@ namespace SharpCompress.Common
else
{
FileMode fm = FileMode.Create;
options ??= new ExtractionOptions()
{
Overwrite = true
};
options = options ?? new ExtractionOptions()
{
Overwrite = true
};
if (!options.Overwrite)
{

View File

@@ -1,13 +1,11 @@
using System;
namespace SharpCompress.Common
namespace SharpCompress.Common
{
public class ExtractionOptions
{
/// <summary>
/// overwrite target if it exists
/// </summary>
public bool Overwrite { get; set; }
public bool Overwrite {get; set; }
/// <summary>
/// extract with internal directory structure
@@ -31,10 +29,6 @@ namespace SharpCompress.Common
/// </summary>
public delegate void SymbolicLinkWriterDelegate(string sourcePath, string targetPath);
public SymbolicLinkWriterDelegate WriteSymbolicLink =
(sourcePath, targetPath) =>
{
Console.WriteLine($"Could not write symlink {sourcePath} -> {targetPath}, for more information please see https://github.com/dotnet/runtime/issues/24271");
};
public SymbolicLinkWriterDelegate WriteSymbolicLink;
}
}

View File

@@ -14,7 +14,7 @@ namespace SharpCompress.Common
internal abstract string FilePartName { get; }
internal abstract Stream GetCompressedStream();
internal abstract Stream? GetRawStream();
internal abstract Stream GetRawStream();
internal bool Skipped { get; set; }
}
}

View File

@@ -2,28 +2,21 @@
namespace SharpCompress.Common
{
public sealed class FilePartExtractionBeginEventArgs : EventArgs
public class FilePartExtractionBeginEventArgs : EventArgs
{
public FilePartExtractionBeginEventArgs(string name, long size, long compressedSize)
{
Name = name;
Size = size;
CompressedSize = compressedSize;
}
/// <summary>
/// File name for the part for the current entry
/// </summary>
public string Name { get; }
public string Name { get; internal set; }
/// <summary>
/// Uncompressed size of the current entry in the part
/// </summary>
public long Size { get; }
public long Size { get; internal set; }
/// <summary>
/// Compressed size of the current entry in the part
/// </summary>
public long CompressedSize { get; }
public long CompressedSize { get; internal set; }
}
}

View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace SharpCompress.Common.GZip
{
@@ -15,15 +16,15 @@ namespace SharpCompress.Common.GZip
public override CompressionType CompressionType => CompressionType.GZip;
public override long Crc => _filePart.Crc ?? 0;
public override long Crc => 0;
public override string Key => _filePart.FilePartName;
public override string? LinkTarget => null;
public override string LinkTarget => null;
public override long CompressedSize => 0;
public override long Size => _filePart.UncompressedSize ?? 0;
public override long Size => 0;
public override DateTime? LastModifiedTime => _filePart.DateModified;

View File

@@ -1,40 +1,32 @@
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Converters;
using System.Text;
namespace SharpCompress.Common.GZip
{
internal sealed class GZipFilePart : FilePart
internal class GZipFilePart : FilePart
{
private string? _name;
private string _name;
private readonly Stream _stream;
internal GZipFilePart(Stream stream, ArchiveEncoding archiveEncoding)
: base(archiveEncoding)
: base(archiveEncoding)
{
_stream = stream;
ReadAndValidateGzipHeader();
if (stream.CanSeek)
{
long position = stream.Position;
stream.Position = stream.Length - 8;
ReadTrailer();
stream.Position = position;
}
ReadAndValidateGzipHeader(stream);
EntryStartPosition = stream.Position;
_stream = stream;
}
internal long EntryStartPosition { get; }
internal DateTime? DateModified { get; private set; }
internal int? Crc { get; private set; }
internal int? UncompressedSize { get; private set; }
internal override string FilePartName => _name!;
internal override string FilePartName => _name;
internal override Stream GetCompressedStream()
{
@@ -46,21 +38,11 @@ namespace SharpCompress.Common.GZip
return _stream;
}
private void ReadTrailer()
{
// Read and potentially verify the GZIP trailer: CRC32 and size mod 2^32
Span<byte> trailer = stackalloc byte[8];
int n = _stream.Read(trailer);
Crc = BinaryPrimitives.ReadInt32LittleEndian(trailer);
UncompressedSize = BinaryPrimitives.ReadInt32LittleEndian(trailer.Slice(4));
}
private void ReadAndValidateGzipHeader()
private void ReadAndValidateGzipHeader(Stream stream)
{
// read the header on the first read
Span<byte> header = stackalloc byte[10];
int n = _stream.Read(header);
byte[] header = new byte[10];
int n = stream.Read(header, 0, header.Length);
// workitem 8501: handle edge case (decompress empty stream)
if (n == 0)
@@ -78,17 +60,17 @@ namespace SharpCompress.Common.GZip
throw new ZlibException("Bad GZIP header.");
}
int timet = BinaryPrimitives.ReadInt32LittleEndian(header.Slice(4));
Int32 timet = DataConverter.LittleEndian.GetInt32(header, 4);
DateModified = TarHeader.EPOCH.AddSeconds(timet);
if ((header[3] & 0x04) == 0x04)
{
// read and discard extra field
n = _stream.Read(header.Slice(0, 2)); // 2-byte length field
n = stream.Read(header, 0, 2); // 2-byte length field
short extraLength = (short)(header[0] + header[1] * 256);
Int16 extraLength = (Int16)(header[0] + header[1] * 256);
byte[] extra = new byte[extraLength];
if (!_stream.ReadFully(extra))
if (!stream.ReadFully(extra))
{
throw new ZlibException("Unexpected end-of-file reading GZIP header.");
}
@@ -96,27 +78,27 @@ namespace SharpCompress.Common.GZip
}
if ((header[3] & 0x08) == 0x08)
{
_name = ReadZeroTerminatedString(_stream);
_name = ReadZeroTerminatedString(stream);
}
if ((header[3] & 0x10) == 0x010)
{
ReadZeroTerminatedString(_stream);
ReadZeroTerminatedString(stream);
}
if ((header[3] & 0x02) == 0x02)
{
_stream.ReadByte(); // CRC16, ignore
stream.ReadByte(); // CRC16, ignore
}
}
private string ReadZeroTerminatedString(Stream stream)
{
Span<byte> buf1 = stackalloc byte[1];
byte[] buf1 = new byte[1];
var list = new List<byte>();
bool done = false;
do
{
// workitem 7740
int n = stream.Read(buf1);
int n = stream.Read(buf1, 0, 1);
if (n != 1)
{
throw new ZlibException("Unexpected EOF reading GZIP header.");
@@ -135,4 +117,4 @@ namespace SharpCompress.Common.GZip
return ArchiveEncoding.Decode(buffer);
}
}
}
}

View File

@@ -1,4 +1,5 @@
using System.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common
{

View File

@@ -10,7 +10,7 @@ namespace SharpCompress.Common
long Crc { get; }
DateTime? CreatedTime { get; }
string Key { get; }
string? LinkTarget { get; }
string LinkTarget { get; }
bool IsDirectory { get; }
bool IsEncrypted { get; }
bool IsSplitAfter { get; }

View File

@@ -1,6 +1,4 @@
using System;
namespace SharpCompress.Common
namespace SharpCompress.Common
{
public class IncompleteArchiveException : ArchiveException
{
@@ -8,10 +6,5 @@ namespace SharpCompress.Common
: base(message)
{
}
public IncompleteArchiveException(string message, Exception inner)
: base(message, inner)
{
}
}
}

View File

@@ -4,13 +4,11 @@ namespace SharpCompress.Common.Rar.Headers
{
internal class AvHeader : RarHeader
{
public AvHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Av)
public AvHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Av)
{
if (IsRar5)
{
if (IsRar5)
throw new InvalidFormatException("unexpected rar5 record");
}
}
protected override void ReadFinish(MarkingBinaryReader reader)

View File

@@ -1,12 +1,10 @@
#nullable disable
using SharpCompress.IO;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{
internal class ArchiveCryptHeader : RarHeader
{
private const int CRYPT_VERSION = 0; // Supported encryption version.
private const int SIZE_SALT50 = 16;
private const int SIZE_SALT30 = 8;
@@ -15,14 +13,14 @@ namespace SharpCompress.Common.Rar.Headers
private const int SIZE_PSWCHECK_CSUM = 4;
private const int CRYPT5_KDF_LG2_COUNT = 15; // LOG2 of PDKDF2 iteration count.
private const int CRYPT5_KDF_LG2_COUNT_MAX = 24; // LOG2 of maximum accepted iteration count.
private bool _usePswCheck;
private uint _lg2Count; // Log2 of PBKDF2 repetition count.
private byte[] _salt;
private byte[] _pswCheck;
private byte[] _pswCheckCsm;
public ArchiveCryptHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Crypt)
{
@@ -35,12 +33,12 @@ namespace SharpCompress.Common.Rar.Headers
{
//error?
return;
}
}
var encryptionFlags = reader.ReadRarVIntUInt32();
_usePswCheck = FlagUtility.HasFlag(encryptionFlags, EncryptionFlagsV5.CHFL_CRYPT_PSWCHECK);
_lg2Count = reader.ReadRarVIntByte(1);
//UsePswCheck = HasHeaderFlag(EncryptionFlagsV5.CHFL_CRYPT_PSWCHECK);
if (_lg2Count > CRYPT5_KDF_LG2_COUNT_MAX)
{

View File

@@ -2,16 +2,16 @@ using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{
internal sealed class ArchiveHeader : RarHeader
internal class ArchiveHeader : RarHeader
{
public ArchiveHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Archive)
public ArchiveHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Archive)
{
}
protected override void ReadFinish(MarkingBinaryReader reader)
{
if (IsRar5)
if (IsRar5)
{
Flags = reader.ReadRarVIntUInt16();
if (HasFlag(ArchiveFlagsV5.HAS_VOLUME_NUMBER))
@@ -22,8 +22,8 @@ namespace SharpCompress.Common.Rar.Headers
//if (ExtraSize != 0) {
// ReadLocator(reader);
//}
}
else
}
else
{
Flags = HeaderFlags;
HighPosAv = reader.ReadInt16();
@@ -35,33 +35,26 @@ namespace SharpCompress.Common.Rar.Headers
}
}
private void ReadLocator(MarkingBinaryReader reader)
{
private void ReadLocator(MarkingBinaryReader reader) {
var size = reader.ReadRarVIntUInt16();
var type = reader.ReadRarVIntUInt16();
if (type != 1)
{
throw new InvalidFormatException("expected locator record");
}
if (type != 1) throw new InvalidFormatException("expected locator record");
var flags = reader.ReadRarVIntUInt16();
const ushort hasQuickOpenOffset = 0x01;
const ushort hasRecoveryOffset = 0x02;
ulong quickOpenOffset = 0;
if ((flags & hasQuickOpenOffset) == hasQuickOpenOffset)
{
if ((flags & hasQuickOpenOffset) == hasQuickOpenOffset) {
quickOpenOffset = reader.ReadRarVInt();
}
ulong recoveryOffset = 0;
if ((flags & hasRecoveryOffset) == hasRecoveryOffset)
{
if ((flags & hasRecoveryOffset) == hasRecoveryOffset) {
recoveryOffset = reader.ReadRarVInt();
}
}
private ushort Flags { get; set; }
private ushort Flags { get; set; }
private bool HasFlag(ushort flag)
private bool HasFlag(ushort flag)
{
return (Flags & flag) == flag;
}
@@ -81,7 +74,7 @@ namespace SharpCompress.Common.Rar.Headers
public bool IsVolume => HasFlag(IsRar5 ? ArchiveFlagsV5.VOLUME : ArchiveFlagsV4.VOLUME);
// RAR5: Volume number field is present. True for all volumes except first.
public bool IsFirstVolume => IsRar5 ? VolumeNumber is null : HasFlag(ArchiveFlagsV4.FIRST_VOLUME);
public bool IsFirstVolume => IsRar5 ? VolumeNumber == null : HasFlag(ArchiveFlagsV4.FIRST_VOLUME);
public bool IsSolid => HasFlag(IsRar5 ? ArchiveFlagsV5.SOLID : ArchiveFlagsV4.SOLID);
}

View File

@@ -5,12 +5,9 @@ namespace SharpCompress.Common.Rar.Headers
internal class CommentHeader : RarHeader
{
protected CommentHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Comment)
{
if (IsRar5)
{
throw new InvalidFormatException("unexpected rar5 record");
}
: base(header, reader, HeaderType.Comment)
{
if (IsRar5) throw new InvalidFormatException("unexpected rar5 record");
}
protected override void ReadFinish(MarkingBinaryReader reader)

View File

@@ -4,14 +4,14 @@ namespace SharpCompress.Common.Rar.Headers
{
internal class EndArchiveHeader : RarHeader
{
public EndArchiveHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.EndArchive)
public EndArchiveHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.EndArchive)
{
}
protected override void ReadFinish(MarkingBinaryReader reader)
{
if (IsRar5)
if (IsRar5)
{
Flags = reader.ReadRarVIntUInt16();
}
@@ -31,7 +31,7 @@ namespace SharpCompress.Common.Rar.Headers
private ushort Flags { get; set; }
private bool HasFlag(ushort flag)
private bool HasFlag(ushort flag)
{
return (Flags & flag) == flag;
}

View File

@@ -1,5 +1,3 @@
#nullable disable
#if !Rar2017_64bit
using nint = System.Int32;
using nuint = System.UInt32;
@@ -21,18 +19,18 @@ namespace SharpCompress.Common.Rar.Headers
{
private uint _fileCrc;
public FileHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType)
: base(header, reader, headerType)
public FileHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType)
: base(header, reader, headerType)
{
}
protected override void ReadFinish(MarkingBinaryReader reader)
protected override void ReadFinish(MarkingBinaryReader reader)
{
if (IsRar5)
if (IsRar5)
{
ReadFromReaderV5(reader);
}
else
}
else
{
ReadFromReaderV4(reader);
}
@@ -49,13 +47,11 @@ namespace SharpCompress.Common.Rar.Headers
FileAttributes = reader.ReadRarVIntUInt32();
if (HasFlag(FileFlagsV5.HAS_MOD_TIME))
{
if (HasFlag(FileFlagsV5.HAS_MOD_TIME)) {
FileLastModifiedTime = Utility.UnixTimeToDateTime(reader.ReadUInt32());
}
if (HasFlag(FileFlagsV5.HAS_CRC32))
{
if (HasFlag(FileFlagsV5.HAS_CRC32)) {
FileCrc = reader.ReadUInt32();
}
@@ -67,7 +63,7 @@ namespace SharpCompress.Common.Rar.Headers
// but it was already used in RAR 1.5 and Unpack needs to distinguish
// them.
CompressionAlgorithm = (byte)((compressionInfo & 0x3f) + 50);
// 7th bit (0x0040) defines the solid flag. If it is set, RAR continues to use the compression dictionary left after processing preceding files.
// It can be set only for file headers and is never set for service headers.
IsSolid = (compressionInfo & 0x40) == 0x40;
@@ -76,7 +72,7 @@ namespace SharpCompress.Common.Rar.Headers
CompressionMethod = (byte)((compressionInfo >> 7) & 0x7);
// Bits 11 - 14 (0x3c00) define the minimum size of dictionary size required to extract data. Value 0 means 128 KB, 1 - 256 KB, ..., 14 - 2048 MB, 15 - 4096 MB.
WindowSize = IsDirectory ? 0 : ((size_t)0x20000) << ((compressionInfo >> 10) & 0xf);
WindowSize = IsDirectory ? 0 : ((size_t)0x20000) << ((compressionInfo>>10) & 0xf);
HostOs = reader.ReadRarVIntByte();
@@ -103,20 +99,18 @@ namespace SharpCompress.Common.Rar.Headers
FileName = ConvertPathV5(Encoding.UTF8.GetString(b, 0, b.Length));
// extra size seems to be redudant since we know the total header size
if (ExtraSize != RemainingHeaderBytes(reader))
if (ExtraSize != RemainingHeaderBytes(reader))
{
throw new InvalidFormatException("rar5 header size / extra size inconsistency");
}
isEncryptedRar5 = false;
while (RemainingHeaderBytes(reader) > 0)
{
while (RemainingHeaderBytes(reader) > 0) {
var size = reader.ReadRarVIntUInt16();
int n = RemainingHeaderBytes(reader);
var type = reader.ReadRarVIntUInt16();
switch (type)
{
switch (type) {
//TODO
case 1: // file encryption
{
@@ -124,7 +118,7 @@ namespace SharpCompress.Common.Rar.Headers
//var version = reader.ReadRarVIntByte();
//if (version != 0) throw new InvalidFormatException("unknown encryption algorithm " + version);
}
}
break;
// case 2: // file hash
// {
@@ -135,41 +129,38 @@ namespace SharpCompress.Common.Rar.Headers
{
ushort flags = reader.ReadRarVIntUInt16();
var isWindowsTime = (flags & 1) == 0;
if ((flags & 0x2) == 0x2)
{
if ((flags & 0x2) == 0x2) {
FileLastModifiedTime = ReadExtendedTimeV5(reader, isWindowsTime);
}
if ((flags & 0x4) == 0x4)
{
if ((flags & 0x4) == 0x4) {
FileCreatedTime = ReadExtendedTimeV5(reader, isWindowsTime);
}
if ((flags & 0x8) == 0x8)
{
if ((flags & 0x8) == 0x8) {
FileLastAccessedTime = ReadExtendedTimeV5(reader, isWindowsTime);
}
}
break;
//TODO
// case 4: // file version
// {
//
// }
// break;
// case 5: // file system redirection
// {
//
// }
// break;
// case 6: // unix owner
// {
//
// }
// break;
// case 7: // service data
// {
//
// }
// break;
//TODO
// case 4: // file version
// {
//
// }
// break;
// case 5: // file system redirection
// {
//
// }
// break;
// case 6: // unix owner
// {
//
// }
// break;
// case 7: // service data
// {
//
// }
// break;
default:
// skip unknown record types to allow new record types to be added in the future
@@ -178,26 +169,25 @@ namespace SharpCompress.Common.Rar.Headers
// drain any trailing bytes of extra record
int did = n - RemainingHeaderBytes(reader);
int drain = size - did;
if (drain > 0)
if (drain > 0)
{
reader.ReadBytes(drain);
}
}
if (AdditionalDataSize != 0)
{
if (AdditionalDataSize != 0) {
CompressedSize = AdditionalDataSize;
}
}
private static DateTime ReadExtendedTimeV5(MarkingBinaryReader reader, bool isWindowsTime)
private static DateTime ReadExtendedTimeV5(MarkingBinaryReader reader, bool isWindowsTime)
{
if (isWindowsTime)
if (isWindowsTime)
{
return DateTime.FromFileTime(reader.ReadInt64());
}
else
}
else
{
return Utility.UnixTimeToDateTime(reader.ReadUInt32());
}
@@ -209,7 +199,7 @@ namespace SharpCompress.Common.Rar.Headers
{
// replace embedded \\ with valid filename char
return path.Replace('\\', '-').Replace('/', '\\');
}
}
return path;
}
@@ -384,22 +374,20 @@ namespace SharpCompress.Common.Rar.Headers
private ushort Flags { get; set; }
private bool HasFlag(ushort flag)
private bool HasFlag(ushort flag)
{
return (Flags & flag) == flag;
}
internal uint FileCrc
{
get
{
if (IsRar5 && !HasFlag(FileFlagsV5.HAS_CRC32))
{
//!!! rar5:
internal uint FileCrc
{
get {
if (IsRar5 && !HasFlag(FileFlagsV5.HAS_CRC32)) {
//!!! rar5:
throw new InvalidOperationException("TODO rar5");
}
return _fileCrc;
}
return _fileCrc;
}
private set => _fileCrc = value;
}
@@ -419,7 +407,7 @@ namespace SharpCompress.Common.Rar.Headers
//case 29: // rar 3.x compression
//case 50: // RAR 5.0 compression algorithm.
internal byte CompressionAlgorithm { get; private set; }
public bool IsSolid { get; private set; }
// unused for UnpackV1 implementation (limitation)
@@ -437,14 +425,13 @@ namespace SharpCompress.Common.Rar.Headers
internal long DataStartPosition { get; set; }
public Stream PackedStream { get; set; }
public bool IsSplitBefore => IsRar5 ? HasHeaderFlag(HeaderFlagsV5.SPLIT_BEFORE) : HasFlag(FileFlagsV4.SPLIT_BEFORE);
public bool IsSplitAfter => IsRar5 ? HasHeaderFlag(HeaderFlagsV5.SPLIT_AFTER) : HasFlag(FileFlagsV4.SPLIT_AFTER);
public bool IsDirectory => HasFlag(IsRar5 ? FileFlagsV5.DIRECTORY : FileFlagsV4.DIRECTORY);
private bool isEncryptedRar5 = false;
public bool IsEncrypted => IsRar5 ? isEncryptedRar5 : HasFlag(FileFlagsV4.PASSWORD);
public bool IsEncrypted => IsRar5 ? isEncryptedRar5: HasFlag(FileFlagsV4.PASSWORD);
internal DateTime? FileLastModifiedTime { get; private set; }
internal DateTime? FileCreatedTime { get; private set; }

View File

@@ -42,10 +42,10 @@ namespace SharpCompress.Common.Rar.Headers
}
internal static class EncryptionFlagsV5
{
{
// RAR 5.0 archive encryption header specific flags.
public const uint CHFL_CRYPT_PSWCHECK = 0x01; // Password check data is present.
public const uint FHEXTRA_CRYPT_PSWCHECK = 0x01; // Password check data is present.
public const uint FHEXTRA_CRYPT_HASHMAC = 0x02;
}

View File

@@ -1,6 +1,6 @@
namespace SharpCompress.Common.Rar.Headers
{
internal interface IRarHeader
internal interface IRarHeader
{
HeaderType HeaderType { get; }
}

View File

@@ -11,98 +11,71 @@ namespace SharpCompress.Common.Rar.Headers
public bool IsRar5 { get; }
private MarkHeader(bool isRar5)
{
private MarkHeader(bool isRar5)
{
IsRar5 = isRar5;
}
public HeaderType HeaderType => HeaderType.Mark;
private static byte GetByte(Stream stream)
private static byte GetByte(Stream stream)
{
var b = stream.ReadByte();
if (b != -1)
if (b != -1)
{
return (byte)b;
}
throw new EndOfStreamException();
}
public static MarkHeader Read(Stream stream, bool leaveStreamOpen, bool lookForHeader)
public static MarkHeader Read(Stream stream, bool leaveStreamOpen, bool lookForHeader)
{
int maxScanIndex = lookForHeader ? MAX_SFX_SIZE : 0;
try
{
int start = -1;
var b = GetByte(stream); start++;
while (start <= maxScanIndex)
while (start <= maxScanIndex)
{
// Rar old signature: 52 45 7E 5E
// Rar4 signature: 52 61 72 21 1A 07 00
// Rar5 signature: 52 61 72 21 1A 07 01 00
if (b == 0x52)
if (b == 0x52)
{
b = GetByte(stream); start++;
if (b == 0x61)
if (b == 0x61)
{
b = GetByte(stream); start++;
if (b != 0x72)
{
continue;
}
if (b != 0x72) continue;
b = GetByte(stream); start++;
if (b != 0x21) continue;
b = GetByte(stream); start++;
if (b != 0x1a) continue;
b = GetByte(stream); start++;
if (b != 0x07) continue;
b = GetByte(stream); start++;
if (b != 0x21)
{
continue;
}
b = GetByte(stream); start++;
if (b != 0x1a)
{
continue;
}
b = GetByte(stream); start++;
if (b != 0x07)
{
continue;
}
b = GetByte(stream); start++;
if (b == 1)
if (b == 1)
{
b = GetByte(stream); start++;
if (b != 0)
{
continue;
}
if (b != 0) continue;
return new MarkHeader(true); // Rar5
}
else if (b == 0)
}
else if (b == 0)
{
return new MarkHeader(false); // Rar4
}
}
else if (b == 0x45)
}
}
else if (b == 0x45)
{
b = GetByte(stream); start++;
if (b != 0x7e)
{
continue;
}
if (b != 0x7e) continue;
b = GetByte(stream); start++;
if (b != 0x5e)
{
continue;
}
if (b != 0x5e) continue;
throw new InvalidFormatException("Rar format version pre-4 is unsupported.");
}
}
else
}
}
else
{
b = GetByte(stream); start++;
}

View File

@@ -2,23 +2,23 @@
namespace SharpCompress.Common.Rar.Headers
{
internal sealed class NewSubHeaderType : IEquatable<NewSubHeaderType>
internal class NewSubHeaderType : IEquatable<NewSubHeaderType>
{
internal static readonly NewSubHeaderType SUBHEAD_TYPE_CMT = new('C', 'M', 'T');
internal static readonly NewSubHeaderType SUBHEAD_TYPE_CMT = new NewSubHeaderType('C', 'M', 'T');
//internal static final NewSubHeaderType SUBHEAD_TYPE_ACL = new (new byte[]{'A','C','L'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_ACL = new NewSubHeaderType(new byte[]{'A','C','L'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_STREAM = new (new byte[]{'S','T','M'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_STREAM = new NewSubHeaderType(new byte[]{'S','T','M'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_UOWNER = new (new byte[]{'U','O','W'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_UOWNER = new NewSubHeaderType(new byte[]{'U','O','W'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_AV = new (new byte[]{'A','V'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_AV = new NewSubHeaderType(new byte[]{'A','V'});
internal static readonly NewSubHeaderType SUBHEAD_TYPE_RR = new('R', 'R');
internal static readonly NewSubHeaderType SUBHEAD_TYPE_RR = new NewSubHeaderType('R', 'R');
//internal static final NewSubHeaderType SUBHEAD_TYPE_OS2EA = new (new byte[]{'E','A','2'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_OS2EA = new NewSubHeaderType(new byte[]{'E','A','2'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_BEOSEA = new (new byte[]{'E','A','B','E'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_BEOSEA = new NewSubHeaderType(new byte[]{'E','A','B','E'});
private readonly byte[] _bytes;
@@ -37,13 +37,19 @@ namespace SharpCompress.Common.Rar.Headers
{
return false;
}
return _bytes.AsSpan().SequenceEqual(bytes);
for (int i = 0; i < bytes.Length; ++i)
{
if (_bytes[i] != bytes[i])
{
return false;
}
}
return true;
}
public bool Equals(NewSubHeaderType? other)
public bool Equals(NewSubHeaderType other)
{
return other is not null && Equals(other._bytes);
return Equals(other._bytes);
}
}
}

View File

@@ -3,15 +3,12 @@
namespace SharpCompress.Common.Rar.Headers
{
// ProtectHeader is part of the Recovery Record feature
internal sealed class ProtectHeader : RarHeader
internal class ProtectHeader : RarHeader
{
public ProtectHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Protect)
public ProtectHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Protect)
{
if (IsRar5)
{
throw new InvalidFormatException("unexpected rar5 record");
}
if (IsRar5) throw new InvalidFormatException("unexpected rar5 record");
}
protected override void ReadFinish(MarkingBinaryReader reader)
@@ -26,6 +23,6 @@ namespace SharpCompress.Common.Rar.Headers
internal byte Version { get; private set; }
internal ushort RecSectors { get; private set; }
internal uint TotalBlocks { get; private set; }
internal byte[]? Mark { get; private set; }
internal byte[] Mark { get; private set; }
}
}

View File

@@ -11,7 +11,7 @@ namespace SharpCompress.Common.Rar.Headers
private readonly HeaderType _headerType;
private readonly bool _isRar5;
internal static RarHeader? TryReadBase(RarCrcBinaryReader reader, bool isRar5, ArchiveEncoding archiveEncoding)
internal static RarHeader TryReadBase(RarCrcBinaryReader reader, bool isRar5, ArchiveEncoding archiveEncoding)
{
try
{
@@ -23,12 +23,12 @@ namespace SharpCompress.Common.Rar.Headers
}
}
private RarHeader(RarCrcBinaryReader reader, bool isRar5, ArchiveEncoding archiveEncoding)
private RarHeader(RarCrcBinaryReader reader, bool isRar5, ArchiveEncoding archiveEncoding)
{
_headerType = HeaderType.Null;
_isRar5 = isRar5;
ArchiveEncoding = archiveEncoding;
if (IsRar5)
if (IsRar5)
{
HeaderCrc = reader.ReadUInt32();
reader.ResetCrc();
@@ -45,9 +45,7 @@ namespace SharpCompress.Common.Rar.Headers
{
AdditionalDataSize = (long)reader.ReadRarVInt();
}
}
else
{
} else {
reader.Mark();
HeaderCrc = reader.ReadUInt16();
reader.ResetCrc();
@@ -61,8 +59,7 @@ namespace SharpCompress.Common.Rar.Headers
}
}
protected RarHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType)
{
protected RarHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType) {
_headerType = headerType;
_isRar5 = header.IsRar5;
HeaderCrc = header.HeaderCrc;
@@ -83,8 +80,7 @@ namespace SharpCompress.Common.Rar.Headers
VerifyHeaderCrc(reader.GetCrc32());
}
protected int RemainingHeaderBytes(MarkingBinaryReader reader)
{
protected int RemainingHeaderBytes(MarkingBinaryReader reader) {
return checked(HeaderSize - (int)reader.CurrentReadByteCount);
}
@@ -112,7 +108,7 @@ namespace SharpCompress.Common.Rar.Headers
protected ushort HeaderFlags { get; }
protected bool HasHeaderFlag(ushort flag)
protected bool HasHeaderFlag(ushort flag)
{
return (HeaderFlags & flag) == flag;
}

View File

@@ -1,3 +1,4 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.IO;
@@ -25,7 +26,7 @@ namespace SharpCompress.Common.Rar.Headers
_isRar5 = markHeader.IsRar5;
yield return markHeader;
RarHeader? header;
RarHeader header;
while ((header = TryReadNextHeader(stream)) != null)
{
yield return header;
@@ -38,16 +39,16 @@ namespace SharpCompress.Common.Rar.Headers
}
}
private RarHeader? TryReadNextHeader(Stream stream)
private RarHeader TryReadNextHeader(Stream stream)
{
RarCrcBinaryReader reader;
if (!IsEncrypted)
if (!IsEncrypted)
{
reader = new RarCrcBinaryReader(stream);
}
else
}
else
{
if (Options.Password is null)
if (Options.Password == null)
{
throw new CryptographicException("Encrypted Rar archive has no password specified.");
}
@@ -55,7 +56,7 @@ namespace SharpCompress.Common.Rar.Headers
}
var header = RarHeader.TryReadBase(reader, _isRar5, Options.ArchiveEncoding);
if (header is null)
if (header == null)
{
return null;
}
@@ -65,7 +66,7 @@ namespace SharpCompress.Common.Rar.Headers
case HeaderCodeV.RAR4_ARCHIVE_HEADER:
{
var ah = new ArchiveHeader(header, reader);
if (ah.IsEncrypted == true)
if (ah.IsEncrypted == true)
{
//!!! rar5 we don't know yet
IsEncrypted = true;
@@ -127,13 +128,13 @@ namespace SharpCompress.Common.Rar.Headers
case StreamingMode.Streaming:
{
var ms = new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize);
if (fh.R4Salt is null)
if (fh.R4Salt == null)
{
fh.PackedStream = ms;
}
else
{
fh.PackedStream = new RarCryptoWrapper(ms, Options.Password!, fh.R4Salt);
fh.PackedStream = new RarCryptoWrapper(ms, Options.Password, fh.R4Salt);
}
}
break;
@@ -150,11 +151,11 @@ namespace SharpCompress.Common.Rar.Headers
return new EndArchiveHeader(header, reader);
}
case HeaderCodeV.RAR5_ARCHIVE_ENCRYPTION_HEADER:
{
var ch = new ArchiveCryptHeader(header, reader);
IsEncrypted = true;
return ch;
}
{
var ch = new ArchiveCryptHeader(header, reader);
IsEncrypted = true;
return ch;
}
default:
{
throw new InvalidFormatException("Unknown Rar Header: " + header.HeaderCode);
@@ -162,26 +163,21 @@ namespace SharpCompress.Common.Rar.Headers
}
}
private void SkipData(FileHeader fh, RarCrcBinaryReader reader)
{
switch (StreamingMode)
{
case StreamingMode.Seekable:
{
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
private void SkipData(FileHeader fh, RarCrcBinaryReader reader) {
switch (StreamingMode) {
case StreamingMode.Seekable: {
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming:
{
//skip the data because it's useless?
reader.BaseStream.Skip(fh.CompressedSize);
}
case StreamingMode.Streaming: {
//skip the data because it's useless?
reader.BaseStream.Skip(fh.CompressedSize);
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
default: {
throw new InvalidFormatException("Invalid StreamingMode");
}
}
}
}

View File

@@ -5,12 +5,9 @@ namespace SharpCompress.Common.Rar.Headers
internal class SignHeader : RarHeader
{
protected SignHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Sign)
{
if (IsRar5)
{
throw new InvalidFormatException("unexpected rar5 record");
}
: base(header, reader, HeaderType.Sign)
{
if (IsRar5) throw new InvalidFormatException("unexpected rar5 record");
}
protected override void ReadFinish(MarkingBinaryReader reader)

View File

@@ -3,7 +3,7 @@ using System.IO;
namespace SharpCompress.Common.Rar
{
internal sealed class RarCryptoBinaryReader : RarCrcBinaryReader
internal class RarCryptoBinaryReader : RarCrcBinaryReader
{
private RarRijndael _rijndael;
private byte[] _salt;
@@ -19,9 +19,7 @@ namespace SharpCompress.Common.Rar
// coderb: not sure why this was being done at this logical point
//SkipQueue();
byte[] salt = ReadBytes(8);
_salt = salt;
_rijndael = RarRijndael.InitializeFrom(_password, salt);
InitializeAes(salt);
}
// track read count ourselves rather than using the underlying stream since we buffer
@@ -41,6 +39,12 @@ namespace SharpCompress.Common.Rar
private bool UseEncryption => _salt != null;
internal void InitializeAes(byte[] salt)
{
_salt = salt;
_rijndael = RarRijndael.InitializeFrom(_password, salt);
}
public override byte ReadByte()
{
if (UseEncryption)
@@ -77,9 +81,7 @@ namespace SharpCompress.Common.Rar
byte[] cipherText = ReadBytesNoCrc(16);
var readBytes = _rijndael.ProcessBlock(cipherText);
foreach (var readByte in readBytes)
{
_data.Enqueue(readByte);
}
}
}

View File

@@ -4,7 +4,7 @@ using System.IO;
namespace SharpCompress.Common.Rar
{
internal sealed class RarCryptoWrapper : Stream
internal class RarCryptoWrapper : Stream
{
private readonly Stream _actualStream;
private readonly byte[] _salt;
@@ -35,7 +35,7 @@ namespace SharpCompress.Common.Rar
public override int Read(byte[] buffer, int offset, int count)
{
if (_salt is null)
if (_salt == null)
{
return _actualStream.Read(buffer, offset, count);
}
@@ -50,23 +50,20 @@ namespace SharpCompress.Common.Rar
if (sizeToRead > 0)
{
int alignedSize = sizeToRead + ((~sizeToRead + 1) & 0xf);
Span<byte> cipherText = stackalloc byte[RarRijndael.CRYPTO_BLOCK_SIZE];
for (int i = 0; i < alignedSize / 16; i++)
{
//long ax = System.currentTimeMillis();
_actualStream.Read(cipherText);
byte[] cipherText = new byte[RarRijndael.CRYPTO_BLOCK_SIZE];
_actualStream.Read(cipherText, 0, RarRijndael.CRYPTO_BLOCK_SIZE);
var readBytes = _rijndael.ProcessBlock(cipherText);
foreach (var readByte in readBytes)
{
_data.Enqueue(readByte);
}
}
for (int i = 0; i < count; i++)
{
buffer[offset + i] = _data.Dequeue();
}
}
return count;
}
@@ -91,9 +88,9 @@ namespace SharpCompress.Common.Rar
if (_rijndael != null)
{
_rijndael.Dispose();
_rijndael = null!;
_rijndael = null;
}
base.Dispose(disposing);
}
}
}
}

View File

@@ -11,7 +11,7 @@ namespace SharpCompress.Common.Rar
/// As the V2017 port isn't complete, add this check to use the legacy Rar code.
/// </summary>
internal bool IsRarV3 => FileHeader.CompressionAlgorithm == 29 || FileHeader.CompressionAlgorithm == 36;
/// <summary>
/// The File's 32 bit CRC Hash
/// </summary>
@@ -22,7 +22,7 @@ namespace SharpCompress.Common.Rar
/// </summary>
public override string Key => FileHeader.FileName;
public override string? LinkTarget => null;
public override string LinkTarget => null;
/// <summary>
/// The entry last modified time in the archive, if recorded

Some files were not shown because too many files have changed in this diff Show More