Compare commits

...

49 Commits

Author SHA1 Message Date
Adam Hathcock
513e59f830 Mark for 0.17.1 2017-06-09 08:28:35 +01:00
Adam Hathcock
b10a1cf2bd Bug on Windows on .NET Core fix (#257)
* Bug on Windows on .NET Core fix: https://github.com/dotnet/corefx/issues/20676

* Add comment
2017-06-09 08:22:47 +01:00
Adam Hathcock
1656edaa29 Add some more details to nuget package 2017-06-01 12:36:01 +01:00
Adam Hathcock
cff49aacba Added explicit tar skip check. Caught skip issue. 2017-06-01 11:25:32 +01:00
Adam Hathcock
19c32aff6c README fixes 2017-06-01 10:56:11 +01:00
Adam Hathcock
db3ec8337f Mark for 0.17 2017-06-01 10:54:50 +01:00
Adam Hathcock
e7bfc40461 Fix Skipping when compressed size is unknown (fallback to decompressing) 2017-06-01 09:26:08 +01:00
Adam Hathcock
3d3ca254ba Zip64 introduced seekable behavior into ZipWriter. The position may … (#252)
* Zip64 introduced seekable behavior into ZipWriter.  The position may not be zero.

* Remove some dead code

* Update formats for zip64

* Make version created by and version needed to extract the same

* Running tests is faster than skipping
2017-05-31 16:55:49 +01:00
Adam Hathcock
b45bc859a4 XZ Format (#247)
* Started integrated XZ format from https://github.com/sambott/XZ.NET

* Add readme line as it was copy/pasted

* Tar used with XZ

* update formats
2017-05-31 16:55:26 +01:00
Adam Hathcock
912d7a8775 Lzip (#245)
* First pass.  Writing isn't implemented on stream.  Tests are busted.

* LZipReader works...no file name :(

* LZipWriter works

* Writing tests are actually correct now.  LZipStream correctly writes trailer now.  lzip command line tool likes it.

* Add recommendation blurb

* Update notes for formats

* LZip isn't an archive format

* Attempting to fix and implement crc32

* LZip writing test passes

* Had to invert crc to check uncompressed data.
2017-05-31 16:51:24 +01:00
Adam Hathcock
16885da1b5 Mark for 0.16.2 2017-05-31 14:47:51 +01:00
Adam Hathcock
26714052eb Merge pull request #249 from adamhathcock/zip_entry_compression_fix
Per entry compression was being written out incorrectly on the centra…
2017-05-31 12:55:37 +01:00
Adam Hathcock
3df763a783 Merge branch 'master' into zip_entry_compression_fix 2017-05-31 11:15:30 +01:00
Adam Hathcock
925842bc4b Merge pull request #251 from dbaumber/Issue-250
Fix for Issue #250: remove extra build flags for .NET 3.5
2017-05-31 10:54:52 +01:00
Dan Baumberger
cead62704e Fix for Issue #250: remove extra build flags for .NET 3.5 as to
enable WinZipAes for .NET 3.5.
2017-05-30 13:43:48 -07:00
Adam Hathcock
3f24a744c0 Merge branch 'master' into zip_entry_compression_fix 2017-05-30 16:10:41 +01:00
Adam Hathcock
cce97548a2 Merge pull request #212 from kenkendk/remove_unused_code
Removed the unused code to write entries in Zip Headers
2017-05-30 16:09:04 +01:00
Adam Hathcock
9270d7cabf Add cache for dotnet packages 2017-05-30 16:04:55 +01:00
Adam Hathcock
264aa6d366 Merge branch 'master' into remove_unused_code 2017-05-30 15:58:44 +01:00
Adam Hathcock
69fc74e376 Per entry compression was being written out incorrectly on the central directory. Fix for that. 2017-05-30 15:37:41 +01:00
Adam Hathcock
a361d41e68 Fix test namespaces 2017-05-30 15:14:02 +01:00
Adam Hathcock
38766dac99 Wrong logic for skipping tests 2017-05-30 12:50:03 +01:00
Adam Hathcock
c30bc65281 Don't run tests on travis either 2017-05-30 12:46:34 +01:00
Adam Hathcock
296ebd942a Shrink script a bit 2017-05-30 12:37:16 +01:00
Adam Hathcock
afa19f7ad8 Add xplat cake and travis build 2017-05-30 12:35:12 +01:00
Adam Hathcock
a193b2d3b1 Add xplat build 2017-05-29 10:35:55 +01:00
Adam Hathcock
be4a65e572 update readme 2017-05-24 08:52:12 +01:00
Adam Hathcock
6832918e71 Mark for 0.16.1 2017-05-23 16:21:07 +01:00
Adam Hathcock
fd9a3ffbcc Merge commit '18641d4f9b849daea7b6fbb7edad51369534ffa3'
* commit '18641d4f9b849daea7b6fbb7edad51369534ffa3':
  Normalize Rar keys
2017-05-23 16:15:58 +01:00
Adam Hathcock
41added690 Private setter clean up 2017-05-23 16:15:47 +01:00
Adam Hathcock
18641d4f9b Merge pull request #238 from adamhathcock/issue_201
Normalize Rar keys
2017-05-23 16:14:55 +01:00
Adam Hathcock
4d0c5099d4 Merge branch 'master' into issue_201 2017-05-23 16:13:09 +01:00
Adam Hathcock
9d9d491245 Slightly better fix for https://github.com/adamhathcock/sharpcompress/pull/235 2017-05-23 16:10:15 +01:00
Adam Hathcock
7b81d18071 Merge pull request #235 from dbaumber/Issue-230
Issue #230: preserve the compression method when getting a compressed…
2017-05-23 15:50:32 +01:00
Dan Baumberger
7d0acbc988 Merge branch 'Issue-230' of https://github.com/dbaumber/sharpcompress into Issue-230 2017-05-23 07:46:48 -07:00
Dan Baumberger
313c044c41 Added a unit test for the WinZipAes multiple OpenEntryStream() bug. 2017-05-23 07:44:45 -07:00
Dan Baumberger
f6f8adf97e Merge branch 'master' into Issue-230 2017-05-23 07:43:02 -07:00
Adam Hathcock
bc97d325ca Normalize Rar keys 2017-05-22 10:55:15 +01:00
Adam Hathcock
0f2d325f20 oh yeah, appveyor doesn't like the tests 2017-05-22 09:08:16 +01:00
Adam Hathcock
63d5503e12 forgot to actually add tests to script 2017-05-22 09:06:33 +01:00
Adam Hathcock
e53f2cac4a Mark for 0.16.0 2017-05-22 08:58:52 +01:00
Adam Hathcock
3b73464233 Merge pull request #236 from damieng/zip-min-version-of-20
Default zip ver to 20 (deflate/encyption), fixes #164
2017-05-22 08:38:18 +01:00
Damien Guard
575f10f766 Default zip ver to 20 (deflate/encyption), fixes #164 2017-05-19 16:37:20 -07:00
Dan Baumberger
8d3fc3533b Issue #230: preserve the compression method when getting a compressed stream for encrypted ZIP archives. 2017-05-19 08:36:11 -07:00
Adam Hathcock
60370b8539 don't run appveyor tests 2017-05-19 15:51:06 +01:00
Adam Hathcock
f6db114865 Remove console writelines 2017-05-19 15:47:53 +01:00
Adam Hathcock
1c6c344b6b Tests don't run on appveyor 2017-05-19 15:45:29 +01:00
Adam Hathcock
d0302898e0 Add back net45,net35 and cake 2017-05-19 13:33:12 +01:00
Kenneth Skovhede
ba12019bc7 Removed the unused code to write entries in Zip Headers 2017-03-11 08:05:49 +01:00
76 changed files with 1762 additions and 321 deletions

13
.travis.yml Normal file
View File

@@ -0,0 +1,13 @@
dist: trusty
language: csharp
cache:
directories:
- $HOME/.dotnet
solution: SharpCompress.sln
matrix:
include:
- dotnet: 1.0.4
mono: none
env: DOTNETCORE=1
script:
- ./build.sh

View File

@@ -1,10 +1,10 @@
# Archive Formats
# Formats
## Accessing Archives
Archive classes allow random access to a seekable stream.
Reader classes allow forward-only reading
Writer classes allow forward-only Writing
- Archive classes allow random access to a seekable stream.
- Reader classes allow forward-only reading on a stream.
- Writer classes allow forward-only Writing on a stream.
## Supported Format Table
@@ -12,18 +12,24 @@ Writer classes allow forward-only Writing
| --- | --- | --- | --- | --- | --- |
| Rar | Rar | Decompress (1) | RarArchive | RarReader | N/A |
| Zip (2) | None, DEFLATE, BZip2, LZMA/LZMA2, PPMd | Both | ZipArchive | ZipReader | ZipWriter |
| Tar | None, BZip2, GZip, LZip | Both | TarArchive | TarReader | TarWriter (3) |
| GZip (single file) | GZip | Both | GZipArchive | GZipReader | GZipWriter |
| Tar | None | Both | TarArchive | TarReader | TarWriter (3) |
| Tar.GZip | DEFLATE | Both | TarArchive | TarReader | TarWriter (3) |
| Tar.BZip2 | BZip2 | Both | TarArchive | TarReader | TarWriter (3) |
| Tar.LZip | LZMA | Both | TarArchive | TarReader | TarWriter (3) |
| Tar.XZ | LZMA2 | Decompress | TarArchive | TarReader | TarWriter (3) |
| GZip (single file) | DEFLATE | Both | GZipArchive | GZipReader | GZipWriter |
| 7Zip (4) | LZMA, LZMA2, BZip2, PPMd, BCJ, BCJ2, Deflate | Decompress | SevenZipArchive | N/A | N/A |
| LZip (single file) (5) | LZip (LZMA) | Both | LZipArchive | LZipReader | LZipWriter |
1. SOLID Rars are only supported in the RarReader API.
2. Zip format supports pkware and WinzipAES encryption. However, encrypted LZMA is not supported. Zip64 reading is supported.
2. Zip format supports pkware and WinzipAES encryption. However, encrypted LZMA is not supported. Zip64 reading/writing is supported but only with seekable streams as the Zip spec doesn't support Zip64 data in post data descriptors.
3. The Tar format requires a file size in the header. If no size is specified to the TarWriter and the stream is not seekable, then an exception will be thrown.
4. The 7Zip format doesn't allow for reading as a forward-only stream so 7Zip is only supported through the Archive API
5. LZip has no support for extra data like the file name or timestamp. There is a default filename used when looking at the entry Key on the archive.
## Compressors
## Compression Streams
For those who want to directly compress/decompress bits
For those who want to directly compress/decompress bits. The single file formats are represented here as well. However, BZip2, LZip and XZ have no metadata (GZip has a little) so using them without something like a Tar file makes little sense.
| Compressor | Compress/Decompress |
| --- | --- |
@@ -33,4 +39,22 @@ For those who want to directly compress/decompress bits
| LZMAStream | Both |
| PPMdStream | Both |
| ADCStream | Decompress |
| LZipStream | Decompress |
| LZipStream | Both |
| XZStream | Decompress |
## Archive Formats vs Compression
Sometimes the terminology gets mixed.
### Compression
DEFLATE, LZMA are pure compression algorithms
### Formats
Formats like Zip, 7Zip, Rar are archive formats only. They use other compression methods (e.g. DEFLATE, LZMA, etc.) or propriatory (e.g RAR)
### Overlap
GZip, BZip2 and LZip are single file archival formats. The overlap in the API happens because Tar uses the single file formats as "compression" methods and the API tries to hide this a bit.

View File

@@ -1,16 +1,30 @@
# SharpCompress
SharpCompress is a compression library for .NET/Mono/Silverlight/WP7 that can unrar, un7zip, unzip, untar unbzip2 and ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip are implemented.
SharpCompress is a compression library in pure C# for .NET 3.5, 4.5, .NET Standard 1.0, 1.3 that can unrar, un7zip, unzip, untar unbzip2 and ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip are implemented.
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).
AppVeyor Build -
[![Build status](https://ci.appveyor.com/api/projects/status/voxg971oemmvxh1e/branch/master?svg=true)](https://ci.appveyor.com/project/adamhathcock/sharpcompress/branch/master)
Travis CI Build -
[![Build Status](https://travis-ci.org/adamhathcock/sharpcompress.svg?branch=master)](https://travis-ci.org/adamhathcock/sharpcompress)
## Need Help?
Post Issues on Github!
Check the [Supported Formats](FORMATS.md) and [Basic Usage.](USAGE.md)
## Recommended Formats
In general, I recommend GZip (Deflate)/BZip2 (BZip)/LZip (LZMA) as the simplicity of the formats lend to better long term archival as well as the streamability. Tar is often used in conjunction for multiple files in a single archive (e.g. `.tar.gz`)
Zip is okay, but it's a very hap-hazard format and the variation in headers and implementations makes it hard to get correct. Uses Deflate by default but supports a lot of compression methods.
RAR is not recommended as it's a propriatory format and the compression is closed source. Use Tar/LZip for LZMA
7Zip and XZ both are overly complicated. 7Zip does not support streamable formats. XZ has known holes explained here: (http://www.nongnu.org/lzip/xz_inadequate.html) Use Tar/LZip for LZMA compression instead.
## A Simple Request
Hi everyone. I hope you're using SharpCompress and finding it useful. Please give me feedback on what you'd like to see changed especially as far as usability goes. New feature suggestions are always welcome as well. I would also like to know what projects SharpCompress is being used in. I like seeing how it is used to give me ideas for future versions. Thanks!
@@ -27,10 +41,39 @@ I'm always looking for help or ideas. Please submit code or email with ideas. Un
* 7Zip writing
* Zip64 (Need writing and extend Reading)
* Multi-volume Zip support.
* RAR5 support
## Version Log
### Version 0.17.1
* Fix - [Bug Fix for .NET Core on Windows](https://github.com/adamhathcock/sharpcompress/pull/257)
### Version 0.17.0
* New - Full LZip support! Can read and write LZip files and Tars inside LZip files. [Make LZip a first class citizen. #241](https://github.com/adamhathcock/sharpcompress/issues/241)
* New - XZ read support! Can read XZ files and Tars inside XZ files. [XZ in SharpCompress #91](https://github.com/adamhathcock/sharpcompress/issues/94)
* Fix - [Regression - zip file writing on seekable streams always assumed stream start was 0. Introduced with Zip64 writing.](https://github.com/adamhathcock/sharpcompress/issues/244)
* Fix - [Zip files with post-data descriptors can be properly skipped via decompression](https://github.com/adamhathcock/sharpcompress/issues/162)
### Version 0.16.2
* Fix [.NET 3.5 should support files and cryptography (was a regression from 0.16.0)](https://github.com/adamhathcock/sharpcompress/pull/251)
* Fix [Zip per entry compression customization wrote the wrong method into the zip archive](https://github.com/adamhathcock/sharpcompress/pull/249)
### Version 0.16.1
* Fix [Preserve compression method when getting a compressed stream](https://github.com/adamhathcock/sharpcompress/pull/235)
* Fix [RAR entry key normalization fix](https://github.com/adamhathcock/sharpcompress/issues/201)
### Version 0.16.0
* Breaking - [Progress Event Tracking rethink](https://github.com/adamhathcock/sharpcompress/pull/226)
* Update to VS2017 - [VS2017](https://github.com/adamhathcock/sharpcompress/pull/231) - Framework targets have been changed.
* New - [Add Zip64 writing](https://github.com/adamhathcock/sharpcompress/pull/211)
* [Fix invalid/mismatching Zip version flags.](https://github.com/adamhathcock/sharpcompress/issues/164) - This allows nuget/System.IO.Packaging to read zip files generated by SharpCompress
* [Fix 7Zip directory hiding](https://github.com/adamhathcock/sharpcompress/pull/215/files)
* [Verify RAR CRC headers](https://github.com/adamhathcock/sharpcompress/pull/220)
### Version 0.15.2
* [Fix invalid headers](https://github.com/adamhathcock/sharpcompress/pull/210) - fixes an issue creating large-ish zip archives that was introduced with zip64 reading.
@@ -135,6 +178,8 @@ I'm always looking for help or ideas. Please submit code or email with ideas. Un
* Embedded some BouncyCastle crypto classes to allow RAR Decryption and Winzip AES Decryption in Portable and Windows Store DLLs
* Built in Release (I think)
XZ implementation based on: https://github.com/sambott/XZ.NET by @sambott
7Zip implementation based on: https://code.google.com/p/managed-lzma/
LICENSE

View File

@@ -11,18 +11,10 @@ branches:
nuget:
disable_publish_on_pr: true
before_build:
- cmd: dotnet restore
build_script:
- ps: .\build.ps1
build:
parallel: true
verbosity: minimal
after_build:
- dotnet pack "src\SharpCompress\SharpCompress.csproj" -c Release
test_script:
- dotnet test --no-build .\tests\SharpCompress.Test\SharpCompress.Test.csproj
test: off
artifacts:
- path: src\SharpCompress\bin\Release\*.nupkg

93
build.cake Normal file
View File

@@ -0,0 +1,93 @@
var target = Argument("target", "Default");
var tag = Argument("tag", "cake");
Task("Restore")
.Does(() =>
{
DotNetCoreRestore(".");
});
Task("Build")
.IsDependentOn("Restore")
.Does(() =>
{
if (IsRunningOnWindows())
{
MSBuild("./sharpcompress.sln", c =>
{
c.SetConfiguration("Release")
.SetVerbosity(Verbosity.Minimal)
.UseToolVersion(MSBuildToolVersion.VS2017);
});
}
else
{
var settings = new DotNetCoreBuildSettings
{
Framework = "netstandard1.0",
Configuration = "Release"
};
DotNetCoreBuild("./src/SharpCompress/SharpCompress.csproj", settings);
settings.Framework = "netcoreapp1.1";
DotNetCoreBuild("./tests/SharpCompress.Test/SharpCompress.Test.csproj", settings);
}
});
Task("Test")
.IsDependentOn("Build")
.Does(() =>
{
if (!bool.Parse(EnvironmentVariable("APPVEYOR") ?? "false")
&& !bool.Parse(EnvironmentVariable("TRAVIS") ?? "false"))
{
var files = GetFiles("tests/**/*.csproj");
foreach(var file in files)
{
var settings = new DotNetCoreTestSettings
{
Configuration = "Release"
};
DotNetCoreTest(file.ToString(), settings);
}
}
else
{
Information("Skipping tests as this is AppVeyor or Travis CI");
}
});
Task("Pack")
.IsDependentOn("Build")
.Does(() =>
{
if (IsRunningOnWindows())
{
MSBuild("src/SharpCompress/SharpCompress.csproj", c => c
.SetConfiguration("Release")
.SetVerbosity(Verbosity.Minimal)
.UseToolVersion(MSBuildToolVersion.VS2017)
.WithProperty("NoBuild", "true")
.WithTarget("Pack"));
}
else
{
Information("Skipping Pack as this is not Windows");
}
});
Task("Default")
.IsDependentOn("Restore")
.IsDependentOn("Build")
.IsDependentOn("Test")
.IsDependentOn("Pack");
Task("RunTests")
.IsDependentOn("Restore")
.IsDependentOn("Build")
.IsDependentOn("Test");
RunTarget(target);

228
build.ps1 Normal file
View File

@@ -0,0 +1,228 @@
##########################################################################
# This is the Cake bootstrapper script for PowerShell.
# This file was downloaded from https://github.com/cake-build/resources
# Feel free to change this file to fit your needs.
##########################################################################
<#
.SYNOPSIS
This is a Powershell script to bootstrap a Cake build.
.DESCRIPTION
This Powershell script will download NuGet if missing, restore NuGet tools (including Cake)
and execute your Cake build script with the parameters you provide.
.PARAMETER Script
The build script to execute.
.PARAMETER Target
The build script target to run.
.PARAMETER Configuration
The build configuration to use.
.PARAMETER Verbosity
Specifies the amount of information to be displayed.
.PARAMETER Experimental
Tells Cake to use the latest Roslyn release.
.PARAMETER WhatIf
Performs a dry run of the build script.
No tasks will be executed.
.PARAMETER Mono
Tells Cake to use the Mono scripting engine.
.PARAMETER SkipToolPackageRestore
Skips restoring of packages.
.PARAMETER ScriptArgs
Remaining arguments are added here.
.LINK
http://cakebuild.net
#>
[CmdletBinding()]
Param(
[string]$Script = "build.cake",
[string]$Target = "Default",
[ValidateSet("Release", "Debug")]
[string]$Configuration = "Release",
[ValidateSet("Quiet", "Minimal", "Normal", "Verbose", "Diagnostic")]
[string]$Verbosity = "Verbose",
[switch]$Experimental,
[Alias("DryRun","Noop")]
[switch]$WhatIf,
[switch]$Mono,
[switch]$SkipToolPackageRestore,
[Parameter(Position=0,Mandatory=$false,ValueFromRemainingArguments=$true)]
[string[]]$ScriptArgs
)
[Reflection.Assembly]::LoadWithPartialName("System.Security") | Out-Null
function MD5HashFile([string] $filePath)
{
if ([string]::IsNullOrEmpty($filePath) -or !(Test-Path $filePath -PathType Leaf))
{
return $null
}
[System.IO.Stream] $file = $null;
[System.Security.Cryptography.MD5] $md5 = $null;
try
{
$md5 = [System.Security.Cryptography.MD5]::Create()
$file = [System.IO.File]::OpenRead($filePath)
return [System.BitConverter]::ToString($md5.ComputeHash($file))
}
finally
{
if ($file -ne $null)
{
$file.Dispose()
}
}
}
Write-Host "Preparing to run build script..."
if(!$PSScriptRoot){
$PSScriptRoot = Split-Path $MyInvocation.MyCommand.Path -Parent
}
$TOOLS_DIR = Join-Path $PSScriptRoot "tools"
$ADDINS_DIR = Join-Path $TOOLS_DIR "addins"
$MODULES_DIR = Join-Path $TOOLS_DIR "modules"
$NUGET_EXE = Join-Path $TOOLS_DIR "nuget.exe"
$CAKE_EXE = Join-Path $TOOLS_DIR "Cake/Cake.exe"
$NUGET_URL = "https://dist.nuget.org/win-x86-commandline/latest/nuget.exe"
$PACKAGES_CONFIG = Join-Path $TOOLS_DIR "packages.config"
$PACKAGES_CONFIG_MD5 = Join-Path $TOOLS_DIR "packages.config.md5sum"
$ADDINS_PACKAGES_CONFIG = Join-Path $ADDINS_DIR "packages.config"
$MODULES_PACKAGES_CONFIG = Join-Path $MODULES_DIR "packages.config"
# Should we use mono?
$UseMono = "";
if($Mono.IsPresent) {
Write-Verbose -Message "Using the Mono based scripting engine."
$UseMono = "-mono"
}
# Should we use the new Roslyn?
$UseExperimental = "";
if($Experimental.IsPresent -and !($Mono.IsPresent)) {
Write-Verbose -Message "Using experimental version of Roslyn."
$UseExperimental = "-experimental"
}
# Is this a dry run?
$UseDryRun = "";
if($WhatIf.IsPresent) {
$UseDryRun = "-dryrun"
}
# Make sure tools folder exists
if ((Test-Path $PSScriptRoot) -and !(Test-Path $TOOLS_DIR)) {
Write-Verbose -Message "Creating tools directory..."
New-Item -Path $TOOLS_DIR -Type directory | out-null
}
# Make sure that packages.config exist.
if (!(Test-Path $PACKAGES_CONFIG)) {
Write-Verbose -Message "Downloading packages.config..."
try { (New-Object System.Net.WebClient).DownloadFile("http://cakebuild.net/download/bootstrapper/packages", $PACKAGES_CONFIG) } catch {
Throw "Could not download packages.config."
}
}
# Try find NuGet.exe in path if not exists
if (!(Test-Path $NUGET_EXE)) {
Write-Verbose -Message "Trying to find nuget.exe in PATH..."
$existingPaths = $Env:Path -Split ';' | Where-Object { (![string]::IsNullOrEmpty($_)) -and (Test-Path $_ -PathType Container) }
$NUGET_EXE_IN_PATH = Get-ChildItem -Path $existingPaths -Filter "nuget.exe" | Select -First 1
if ($NUGET_EXE_IN_PATH -ne $null -and (Test-Path $NUGET_EXE_IN_PATH.FullName)) {
Write-Verbose -Message "Found in PATH at $($NUGET_EXE_IN_PATH.FullName)."
$NUGET_EXE = $NUGET_EXE_IN_PATH.FullName
}
}
# Try download NuGet.exe if not exists
if (!(Test-Path $NUGET_EXE)) {
Write-Verbose -Message "Downloading NuGet.exe..."
try {
(New-Object System.Net.WebClient).DownloadFile($NUGET_URL, $NUGET_EXE)
} catch {
Throw "Could not download NuGet.exe."
}
}
# Save nuget.exe path to environment to be available to child processed
$ENV:NUGET_EXE = $NUGET_EXE
# Restore tools from NuGet?
if(-Not $SkipToolPackageRestore.IsPresent) {
Push-Location
Set-Location $TOOLS_DIR
# Check for changes in packages.config and remove installed tools if true.
[string] $md5Hash = MD5HashFile($PACKAGES_CONFIG)
if((!(Test-Path $PACKAGES_CONFIG_MD5)) -Or
($md5Hash -ne (Get-Content $PACKAGES_CONFIG_MD5 ))) {
Write-Verbose -Message "Missing or changed package.config hash..."
Remove-Item * -Recurse -Exclude packages.config,nuget.exe
}
Write-Verbose -Message "Restoring tools from NuGet..."
$NuGetOutput = Invoke-Expression "&`"$NUGET_EXE`" install -ExcludeVersion -OutputDirectory `"$TOOLS_DIR`""
if ($LASTEXITCODE -ne 0) {
Throw "An error occured while restoring NuGet tools."
}
else
{
$md5Hash | Out-File $PACKAGES_CONFIG_MD5 -Encoding "ASCII"
}
Write-Verbose -Message ($NuGetOutput | out-string)
Pop-Location
}
# Restore addins from NuGet
if (Test-Path $ADDINS_PACKAGES_CONFIG) {
Push-Location
Set-Location $ADDINS_DIR
Write-Verbose -Message "Restoring addins from NuGet..."
$NuGetOutput = Invoke-Expression "&`"$NUGET_EXE`" install -ExcludeVersion -OutputDirectory `"$ADDINS_DIR`""
if ($LASTEXITCODE -ne 0) {
Throw "An error occured while restoring NuGet addins."
}
Write-Verbose -Message ($NuGetOutput | out-string)
Pop-Location
}
# Restore modules from NuGet
if (Test-Path $MODULES_PACKAGES_CONFIG) {
Push-Location
Set-Location $MODULES_DIR
Write-Verbose -Message "Restoring modules from NuGet..."
$NuGetOutput = Invoke-Expression "&`"$NUGET_EXE`" install -ExcludeVersion -OutputDirectory `"$MODULES_DIR`""
if ($LASTEXITCODE -ne 0) {
Throw "An error occured while restoring NuGet modules."
}
Write-Verbose -Message ($NuGetOutput | out-string)
Pop-Location
}
# Make sure that Cake has been installed.
if (!(Test-Path $CAKE_EXE)) {
Throw "Could not find Cake.exe at $CAKE_EXE"
}
# Start Cake
Write-Host "Running build script..."
Invoke-Expression "& `"$CAKE_EXE`" `"$Script`" -target=`"$Target`" -configuration=`"$Configuration`" -verbosity=`"$Verbosity`" $UseMono $UseDryRun $UseExperimental $ScriptArgs"
exit $LASTEXITCODE

42
build.sh Executable file
View File

@@ -0,0 +1,42 @@
#!/usr/bin/env bash
##########################################################################
# This is the Cake bootstrapper script for Linux and OS X.
# This file was downloaded from https://github.com/cake-build/resources
# Feel free to change this file to fit your needs.
##########################################################################
# Define directories.
SCRIPT_DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
TOOLS_DIR=$SCRIPT_DIR/tools
CAKE_VERSION=0.19.1
CAKE_DLL=$TOOLS_DIR/Cake.CoreCLR.$CAKE_VERSION/Cake.dll
# Make sure the tools folder exist.
if [ ! -d "$TOOLS_DIR" ]; then
mkdir "$TOOLS_DIR"
fi
###########################################################################
# INSTALL CAKE
###########################################################################
if [ ! -f "$CAKE_DLL" ]; then
curl -Lsfo Cake.CoreCLR.zip "https://www.nuget.org/api/v2/package/Cake.CoreCLR/$CAKE_VERSION" && unzip -q Cake.CoreCLR.zip -d "$TOOLS_DIR/Cake.CoreCLR.$CAKE_VERSION" && rm -f Cake.CoreCLR.zip
if [ $? -ne 0 ]; then
echo "An error occured while installing Cake."
exit 1
fi
fi
# Make sure that Cake has been installed.
if [ ! -f "$CAKE_DLL" ]; then
echo "Could not find Cake.exe at '$CAKE_DLL'."
exit 1
fi
###########################################################################
# RUN BUILD SCRIPT
###########################################################################
# Start Cake
exec dotnet "$CAKE_DLL" "$@"

View File

@@ -6,6 +6,7 @@ using SharpCompress.Archives.SevenZip;
using SharpCompress.Archives.Tar;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Readers;
namespace SharpCompress.Archives
@@ -55,7 +56,7 @@ namespace SharpCompress.Archives
stream.Seek(0, SeekOrigin.Begin);
return TarArchive.Open(stream, readerOptions);
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip, LZip");
}
public static IWritableArchive Create(ArchiveType type)

View File

@@ -9,6 +9,6 @@ namespace SharpCompress.Common
Item = entry;
}
public T Item { get; private set; }
public T Item { get; }
}
}

View File

@@ -12,6 +12,7 @@
BCJ,
BCJ2,
LZip,
Xz,
Unknown
}
}

View File

@@ -165,25 +165,13 @@ namespace SharpCompress.Common.Rar.Headers
#if NO_FILE
return path.Replace('\\', '/');
#else
switch (os)
if (Path.DirectorySeparatorChar == '/')
{
case HostOS.MacOS:
case HostOS.Unix:
{
if (Path.DirectorySeparatorChar == '\\')
{
return path.Replace('/', '\\');
}
}
break;
default:
{
if (Path.DirectorySeparatorChar == '/')
{
return path.Replace('\\', '/');
}
}
break;
return path.Replace('\\', '/');
}
else if (Path.DirectorySeparatorChar == '\\')
{
return path.Replace('/', '\\');
}
return path;
#endif

View File

@@ -14,9 +14,9 @@ namespace SharpCompress.Common.Rar
FileHeader = fh;
}
internal MarkHeader MarkHeader { get; private set; }
internal MarkHeader MarkHeader { get; }
internal FileHeader FileHeader { get; private set; }
internal FileHeader FileHeader { get; }
internal override Stream GetRawStream()
{

View File

@@ -11,7 +11,7 @@ namespace SharpCompress.Common
ReaderProgress = readerProgress;
}
public T Item { get; private set; }
public ReaderProgress ReaderProgress { get; private set; }
public T Item { get; }
public ReaderProgress ReaderProgress { get; }
}
}

View File

@@ -21,18 +21,6 @@ namespace SharpCompress.Common.Zip.Headers
Comment = reader.ReadBytes(CommentLength);
}
internal override void Write(BinaryWriter writer)
{
writer.Write(VolumeNumber);
writer.Write(FirstVolumeWithDirectory);
writer.Write(TotalNumberOfEntriesInDisk);
writer.Write(TotalNumberOfEntries);
writer.Write(DirectorySize);
writer.Write(DirectoryStartOffsetRelativeToDisk);
writer.Write(CommentLength);
writer.Write(Comment);
}
public ushort VolumeNumber { get; private set; }
public ushort FirstVolumeWithDirectory { get; private set; }

View File

@@ -61,56 +61,6 @@ namespace SharpCompress.Common.Zip.Headers
}
}
internal override void Write(BinaryWriter writer)
{
var zip64 = CompressedSize >= uint.MaxValue || UncompressedSize >= uint.MaxValue || RelativeOffsetOfEntryHeader >= uint.MaxValue;
if (zip64)
Version = (ushort)(Version > 45 ? Version : 45);
writer.Write(Version);
writer.Write(VersionNeededToExtract);
writer.Write((ushort)Flags);
writer.Write((ushort)CompressionMethod);
writer.Write(LastModifiedTime);
writer.Write(LastModifiedDate);
writer.Write(Crc);
writer.Write(zip64 ? uint.MaxValue : CompressedSize);
writer.Write(zip64 ? uint.MaxValue : UncompressedSize);
byte[] nameBytes = EncodeString(Name);
writer.Write((ushort)nameBytes.Length);
if (zip64)
{
writer.Write((ushort)(2 + 2 + 8 + 8 + 8 + 4));
}
else
{
//writer.Write((ushort)Extra.Length);
writer.Write((ushort)0);
}
writer.Write((ushort)Comment.Length);
writer.Write(DiskNumberStart);
writer.Write(InternalFileAttributes);
writer.Write(ExternalFileAttributes);
writer.Write(zip64 ? uint.MaxValue : RelativeOffsetOfEntryHeader);
writer.Write(nameBytes);
if (zip64)
{
writer.Write((ushort)0x0001);
writer.Write((ushort)((8 + 8 + 8 + 4)));
writer.Write((ulong)UncompressedSize);
writer.Write((ulong)CompressedSize);
writer.Write((ulong)RelativeOffsetOfEntryHeader);
writer.Write((uint)0); // VolumeNumber = 0
}
writer.Write(Comment);
}
internal ushort Version { get; private set; }
public ushort VersionNeededToExtract { get; set; }

View File

@@ -13,10 +13,5 @@ namespace SharpCompress.Common.Zip.Headers
internal override void Read(BinaryReader reader)
{
}
internal override void Write(BinaryWriter writer)
{
throw new NotImplementedException();
}
}
}

View File

@@ -47,56 +47,6 @@ namespace SharpCompress.Common.Zip.Headers
}
}
internal override void Write(BinaryWriter writer)
{
if (IsZip64)
Version = (ushort)(Version > 45 ? Version : 45);
writer.Write(Version);
writer.Write((ushort)Flags);
writer.Write((ushort)CompressionMethod);
writer.Write(LastModifiedTime);
writer.Write(LastModifiedDate);
writer.Write(Crc);
if (IsZip64)
{
writer.Write(uint.MaxValue);
writer.Write(uint.MaxValue);
}
else
{
writer.Write(CompressedSize);
writer.Write(UncompressedSize);
}
byte[] nameBytes = EncodeString(Name);
writer.Write((ushort)nameBytes.Length);
if (IsZip64)
{
writer.Write((ushort)(2 + 2 + (2 * 8)));
}
else
{
writer.Write((ushort)0);
}
//if (Extra != null)
//{
// writer.Write(Extra);
//}
writer.Write(nameBytes);
if (IsZip64)
{
writer.Write((ushort)0x0001);
writer.Write((ushort)(2 * 8));
writer.Write((ulong)CompressedSize);
writer.Write((ulong)UncompressedSize);
}
}
internal ushort Version { get; private set; }
}
}

View File

@@ -14,10 +14,5 @@ namespace SharpCompress.Common.Zip.Headers
{
throw new NotImplementedException();
}
internal override void Write(BinaryWriter writer)
{
throw new NotImplementedException();
}
}
}

View File

@@ -26,11 +26,6 @@ namespace SharpCompress.Common.Zip.Headers
const int SizeOfFixedHeaderDataExceptSignatureAndSizeFields = 44;
internal override void Write(BinaryWriter writer)
{
throw new NotImplementedException();
}
public long SizeOfDirectoryEndRecord { get; private set; }
public ushort VersionMadeBy { get; private set; }

View File

@@ -16,11 +16,6 @@ namespace SharpCompress.Common.Zip.Headers
TotalNumberOfVolumes = reader.ReadUInt32();
}
internal override void Write(BinaryWriter writer)
{
throw new System.NotImplementedException();
}
public uint FirstVolumeWithDirectory { get; private set; }
public long RelativeOffsetOfTheEndOfDirectoryRecord { get; private set; }

View File

@@ -10,12 +10,10 @@ namespace SharpCompress.Common.Zip.Headers
HasData = true;
}
internal ZipHeaderType ZipHeaderType { get; private set; }
internal ZipHeaderType ZipHeaderType { get; }
internal abstract void Read(BinaryReader reader);
internal abstract void Write(BinaryWriter writer);
internal bool HasData { get; set; }
}
}

View File

@@ -25,7 +25,7 @@ namespace SharpCompress.Common.Zip
{
return Stream.Null;
}
decompressionStream = CreateDecompressionStream(GetCryptoStream(CreateBaseStream()));
decompressionStream = CreateDecompressionStream(GetCryptoStream(CreateBaseStream()), Header.CompressionMethod);
if (LeaveStreamOpen)
{
return new NonDisposingStream(decompressionStream);

View File

@@ -21,7 +21,7 @@ namespace SharpCompress.Common.Zip
BaseStream = stream;
}
internal Stream BaseStream { get; private set; }
internal Stream BaseStream { get; }
internal ZipFileEntry Header { get; set; }
internal override string FilePartName => Header.Name;
@@ -32,7 +32,7 @@ namespace SharpCompress.Common.Zip
{
return Stream.Null;
}
Stream decompressionStream = CreateDecompressionStream(GetCryptoStream(CreateBaseStream()));
Stream decompressionStream = CreateDecompressionStream(GetCryptoStream(CreateBaseStream()), Header.CompressionMethod);
if (LeaveStreamOpen)
{
return new NonDisposingStream(decompressionStream);
@@ -53,9 +53,9 @@ namespace SharpCompress.Common.Zip
protected bool LeaveStreamOpen => FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor) || Header.IsZip64;
protected Stream CreateDecompressionStream(Stream stream)
protected Stream CreateDecompressionStream(Stream stream, ZipCompressionMethod method)
{
switch (Header.CompressionMethod)
switch (method)
{
case ZipCompressionMethod.None:
{
@@ -102,9 +102,9 @@ namespace SharpCompress.Common.Zip
{
throw new InvalidFormatException("Winzip data length is not 7.");
}
ushort method = DataConverter.LittleEndian.GetUInt16(data.DataBytes, 0);
ushort compressedMethod = DataConverter.LittleEndian.GetUInt16(data.DataBytes, 0);
if (method != 0x01 && method != 0x02)
if (compressedMethod != 0x01 && compressedMethod != 0x02)
{
throw new InvalidFormatException("Unexpected vendor version number for WinZip AES metadata");
}
@@ -114,8 +114,7 @@ namespace SharpCompress.Common.Zip
{
throw new InvalidFormatException("Unexpected vendor ID for WinZip AES metadata");
}
Header.CompressionMethod = (ZipCompressionMethod)DataConverter.LittleEndian.GetUInt16(data.DataBytes, 5);
return CreateDecompressionStream(stream);
return CreateDecompressionStream(stream, (ZipCompressionMethod)DataConverter.LittleEndian.GetUInt16(data.DataBytes, 5));
}
default:
{

View File

@@ -99,7 +99,7 @@ namespace SharpCompress.Compressors.Deflate
/// </summary>
/// <param name="input">The stream over which to calculate the CRC32</param>
/// <returns>the CRC32 calculation</returns>
public Int32 GetCrc32(Stream input)
public UInt32 GetCrc32(Stream input)
{
return GetCrc32AndCopy(input, null);
}
@@ -111,7 +111,7 @@ namespace SharpCompress.Compressors.Deflate
/// <param name="input">The stream over which to calculate the CRC32</param>
/// <param name="output">The stream into which to deflate the input</param>
/// <returns>the CRC32 calculation</returns>
public Int32 GetCrc32AndCopy(Stream input, Stream output)
public UInt32 GetCrc32AndCopy(Stream input, Stream output)
{
if (input == null)
{
@@ -143,7 +143,7 @@ namespace SharpCompress.Compressors.Deflate
TotalBytesRead += count;
}
return (Int32)(~runningCrc32Result);
return ~runningCrc32Result;
}
}

View File

@@ -1,5 +1,8 @@
using System;
using System.IO;
using SharpCompress.Converters;
using SharpCompress.Crypto;
using SharpCompress.IO;
namespace SharpCompress.Compressors.LZMA
{
@@ -14,29 +17,62 @@ namespace SharpCompress.Compressors.LZMA
public class LZipStream : Stream
{
private readonly Stream stream;
private readonly CountingWritableSubStream rawStream;
private bool disposed;
private readonly bool leaveOpen;
private bool finished;
public LZipStream(Stream stream, CompressionMode mode)
: this(stream, mode, false)
{
}
private long writeCount;
public LZipStream(Stream stream, CompressionMode mode, bool leaveOpen)
public LZipStream(Stream stream, CompressionMode mode, bool leaveOpen = false)
{
if (mode != CompressionMode.Decompress)
{
throw new NotImplementedException("Only LZip decompression is currently supported");
}
Mode = mode;
this.leaveOpen = leaveOpen;
int dictionarySize = ValidateAndReadSize(stream);
if (dictionarySize == 0)
if (mode == CompressionMode.Decompress)
{
throw new IOException("Not an LZip stream");
int dSize = ValidateAndReadSize(stream);
if (dSize == 0)
{
throw new IOException("Not an LZip stream");
}
byte[] properties = GetProperties(dSize);
this.stream = new LzmaStream(properties, stream);
}
else
{
//default
int dSize = 104 * 1024;
WriteHeaderSize(stream);
rawStream = new CountingWritableSubStream(stream);
this.stream = new Crc32Stream(new LzmaStream(new LzmaEncoderProperties(true, dSize), false, rawStream));
}
}
public void Finish()
{
if (!finished)
{
if (Mode == CompressionMode.Compress)
{
var crc32Stream = (Crc32Stream)stream;
crc32Stream.WrappedStream.Dispose();
crc32Stream.Dispose();
var compressedCount = rawStream.Count;
var bytes = DataConverter.LittleEndian.GetBytes(crc32Stream.Crc);
rawStream.Write(bytes, 0, bytes.Length);
bytes = DataConverter.LittleEndian.GetBytes(writeCount);
rawStream.Write(bytes, 0, bytes.Length);
//total with headers
bytes = DataConverter.LittleEndian.GetBytes(compressedCount + 6 + 20);
rawStream.Write(bytes, 0, bytes.Length);
}
finished = true;
}
byte[] properties = GetProperties(dictionarySize);
this.stream = new LzmaStream(properties, stream);
}
#region Stream methods
@@ -48,19 +84,23 @@ namespace SharpCompress.Compressors.LZMA
return;
}
disposed = true;
if (disposing && !leaveOpen)
if (disposing)
{
stream.Dispose();
Finish();
if (!leaveOpen)
{
rawStream.Dispose();
}
}
}
public CompressionMode Mode { get; }
public override bool CanRead => stream.CanRead;
public override bool CanRead => Mode == CompressionMode.Decompress;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override bool CanWrite => Mode == CompressionMode.Compress;
public override void Flush()
{
@@ -75,20 +115,16 @@ namespace SharpCompress.Compressors.LZMA
public override int Read(byte[] buffer, int offset, int count) => stream.Read(buffer, offset, count);
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();
}
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
public override void SetLength(long value)
{
throw new NotImplementedException();
}
public override void SetLength(long value) => throw new NotImplementedException();
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotImplementedException();
stream.Write(buffer, offset, count);
writeCount += count;
}
#endregion
/// <summary>
@@ -105,7 +141,7 @@ namespace SharpCompress.Compressors.LZMA
/// couldn't be read or it isn't a validate LZIP header, or the dictionary
/// size if it *is* a valid LZIP file.
/// </summary>
private static int ValidateAndReadSize(Stream stream)
public static int ValidateAndReadSize(Stream stream)
{
if (stream == null)
{
@@ -131,6 +167,17 @@ namespace SharpCompress.Compressors.LZMA
return (1 << basePower) - subtractionNumerator * (1 << (basePower - 4));
}
public static void WriteHeaderSize(Stream stream)
{
if (stream == null)
{
throw new ArgumentNullException(nameof(stream));
}
// hard coding the dictionary size encoding
byte[] header = new byte[6] {(byte)'L', (byte)'Z', (byte)'I', (byte)'P', 1, 113};
stream.Write(header, 0, 6);
}
/// <summary>
/// Creates a byte array to communicate the parameters and dictionary size to LzmaStream.
/// </summary>

View File

@@ -141,10 +141,7 @@ namespace SharpCompress.Compressors.LZMA
{
position = encoder.Code(null, true);
}
if (inputStream != null)
{
inputStream.Dispose();
}
inputStream?.Dispose();
}
base.Dispose(disposing);
}

View File

@@ -7,7 +7,7 @@ namespace SharpCompress.Compressors.Rar.Decode
Dif = new int[11];
}
internal int[] Dif { get; private set; }
internal int[] Dif { get; }
internal int ByteCount { get; set; }
internal int D1 { get; set; }

View File

@@ -17,17 +17,17 @@ namespace SharpCompress.Compressors.Rar.Decode
/// <summary> returns the decode Length array</summary>
/// <returns> decodeLength
/// </returns>
internal int[] DecodeLen { get; private set; }
internal int[] DecodeLen { get; }
/// <summary> returns the decode num array</summary>
/// <returns> decodeNum
/// </returns>
internal int[] DecodeNum { get; private set; }
internal int[] DecodeNum { get; }
/// <summary> returns the decodePos array</summary>
/// <returns> decodePos
/// </returns>
internal int[] DecodePos { get; private set; }
internal int[] DecodePos { get; }
internal int MaxNum { get; set; }
}

View File

@@ -10,8 +10,8 @@ namespace SharpCompress.Compressors.Rar.VM
internal VMCommands OpCode { get; set; }
internal bool IsByteMode { get; set; }
internal VMPreparedOperand Op1 { get; private set; }
internal VMPreparedOperand Op1 { get; }
internal VMPreparedOperand Op2 { get; private set; }
internal VMPreparedOperand Op2 { get; }
}
}

View File

@@ -9,10 +9,10 @@ namespace SharpCompress.Compressors.Rar.VM
Type = type;
}
internal int Length { get; private set; }
internal int Length { get; }
internal uint CRC { get; private set; }
internal uint CRC { get; }
internal VMStandardFilters Type { get; private set; }
internal VMStandardFilters Type { get; }
}
}

View File

@@ -0,0 +1,52 @@
using System;
using System.IO;
namespace SharpCompress.Compressors.Xz
{
public static class BinaryUtils
{
public static int ReadLittleEndianInt32(this BinaryReader reader)
{
byte[] bytes = reader.ReadBytes(4);
return (bytes[0] + (bytes[1] << 8) + (bytes[2] << 16) + (bytes[3] << 24));
}
internal static uint ReadLittleEndianUInt32(this BinaryReader reader)
{
return unchecked((uint)ReadLittleEndianInt32(reader));
}
public static int ReadLittleEndianInt32(this Stream stream)
{
byte[] bytes = new byte[4];
var read = stream.Read(bytes, 0, 4);
if (read != 4)
throw new EndOfStreamException();
return (bytes[0] + (bytes[1] << 8) + (bytes[2] << 16) + (bytes[3] << 24));
}
internal static uint ReadLittleEndianUInt32(this Stream stream)
{
return unchecked((uint)ReadLittleEndianInt32(stream));
}
internal static byte[] ToBigEndianBytes(this uint uint32)
{
var result = BitConverter.GetBytes(uint32);
if (BitConverter.IsLittleEndian)
Array.Reverse(result);
return result;
}
internal static byte[] ToLittleEndianBytes(this uint uint32)
{
var result = BitConverter.GetBytes(uint32);
if (!BitConverter.IsLittleEndian)
Array.Reverse(result);
return result;
}
}
}

View File

@@ -0,0 +1,10 @@
namespace SharpCompress.Compressors.Xz
{
public enum CheckType : byte
{
NONE = 0x00,
CRC32 = 0x01,
CRC64 = 0x04,
SHA256 = 0x0A
}
}

View File

@@ -0,0 +1,60 @@
using System;
using System.Collections.Generic;
namespace SharpCompress.Compressors.Xz
{
internal static class Crc32
{
public const UInt32 DefaultPolynomial = 0xedb88320u;
public const UInt32 DefaultSeed = 0xffffffffu;
static UInt32[] defaultTable;
public static UInt32 Compute(byte[] buffer)
{
return Compute(DefaultSeed, buffer);
}
public static UInt32 Compute(UInt32 seed, byte[] buffer)
{
return Compute(DefaultPolynomial, seed, buffer);
}
public static UInt32 Compute(UInt32 polynomial, UInt32 seed, byte[] buffer)
{
return ~CalculateHash(InitializeTable(polynomial), seed, buffer, 0, buffer.Length);
}
static UInt32[] InitializeTable(UInt32 polynomial)
{
if (polynomial == DefaultPolynomial && defaultTable != null)
return defaultTable;
var createTable = new UInt32[256];
for (var i = 0; i < 256; i++)
{
var entry = (UInt32)i;
for (var j = 0; j < 8; j++)
if ((entry & 1) == 1)
entry = (entry >> 1) ^ polynomial;
else
entry = entry >> 1;
createTable[i] = entry;
}
if (polynomial == DefaultPolynomial)
defaultTable = createTable;
return createTable;
}
static UInt32 CalculateHash(UInt32[] table, UInt32 seed, IList<byte> buffer, int start, int size)
{
var crc = seed;
for (var i = start; i < size - start; i++)
crc = (crc >> 8) ^ table[buffer[i] ^ crc & 0xff];
return crc;
}
}
}

View File

@@ -0,0 +1,57 @@
using System;
using System.Collections.Generic;
namespace SharpCompress.Compressors.Xz
{
internal static class Crc64
{
public const UInt64 DefaultSeed = 0x0;
internal static UInt64[] Table;
public const UInt64 Iso3309Polynomial = 0xD800000000000000;
public static UInt64 Compute(byte[] buffer)
{
return Compute(DefaultSeed, buffer);
}
public static UInt64 Compute(UInt64 seed, byte[] buffer)
{
if (Table == null)
Table = CreateTable(Iso3309Polynomial);
return CalculateHash(seed, Table, buffer, 0, buffer.Length);
}
public static UInt64 CalculateHash(UInt64 seed, UInt64[] table, IList<byte> buffer, int start, int size)
{
var crc = seed;
for (var i = start; i < size; i++)
unchecked
{
crc = (crc >> 8) ^ table[(buffer[i] ^ crc) & 0xff];
}
return crc;
}
public static ulong[] CreateTable(ulong polynomial)
{
var createTable = new UInt64[256];
for (var i = 0; i < 256; ++i)
{
var entry = (UInt64)i;
for (var j = 0; j < 8; ++j)
if ((entry & 1) == 1)
entry = (entry >> 1) ^ polynomial;
else
entry = entry >> 1;
createTable[i] = entry;
}
return createTable;
}
}
}

View File

@@ -0,0 +1,53 @@
using System;
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Compressors.Xz.Filters
{
internal abstract class BlockFilter : ReadOnlyStream
{
public enum FilterTypes : ulong
{
DELTA = 0x03,
ARCH_x86_FILTER = 0x04,
ARCH_PowerPC_FILTER = 0x05,
ARCH_IA64_FILTER = 0x06,
ARCH_ARM_FILTER = 0x07,
ARCH_ARMTHUMB_FILTER = 0x08,
ARCH_SPARC_FILTER = 0x09,
LZMA2 = 0x21,
}
static Dictionary<FilterTypes, Type> FilterMap = new Dictionary<FilterTypes, Type>()
{
{FilterTypes.LZMA2, typeof(Lzma2Filter) }
};
public abstract bool AllowAsLast { get; }
public abstract bool AllowAsNonLast { get; }
public abstract bool ChangesDataSize { get; }
public BlockFilter() { }
public abstract void Init(byte[] properties);
public abstract void ValidateFilter();
public FilterTypes FilterType { get; set; }
public static BlockFilter Read(BinaryReader reader)
{
var filterType = (FilterTypes)reader.ReadXZInteger();
if (!FilterMap.ContainsKey(filterType))
throw new NotImplementedException($"Filter {filterType} has not yet been implemented");
var filter = Activator.CreateInstance(FilterMap[filterType]) as BlockFilter;
var sizeOfProperties = reader.ReadXZInteger();
if (sizeOfProperties > int.MaxValue)
throw new InvalidDataException("Block filter information too large");
byte[] properties = reader.ReadBytes((int)sizeOfProperties);
filter.Init(properties);
return filter;
}
public abstract void SetBaseStream(Stream stream);
}
}

View File

@@ -0,0 +1,54 @@
using System;
using System.IO;
namespace SharpCompress.Compressors.Xz.Filters
{
internal class Lzma2Filter : BlockFilter
{
public override bool AllowAsLast => true;
public override bool AllowAsNonLast => false;
public override bool ChangesDataSize => true;
byte _dictionarySize;
public uint DictionarySize
{
get
{
if (_dictionarySize > 40)
throw new OverflowException("Dictionary size greater than UInt32.Max");
if (_dictionarySize == 40)
{
return uint.MaxValue;
}
int mantissa = 2 | (_dictionarySize & 1);
int exponent = _dictionarySize / 2 + 11;
return (uint)mantissa << exponent;
}
}
public override void Init(byte[] properties)
{
if (properties.Length != 1)
throw new InvalidDataException("LZMA properties unexpected length");
_dictionarySize = (byte)(properties[0] & 0x3F);
var reserved = properties[0] & 0xC0;
if (reserved != 0)
throw new InvalidDataException("Reserved bits used in LZMA properties");
}
public override void ValidateFilter()
{
}
public override void SetBaseStream(Stream stream)
{
BaseStream = new SharpCompress.Compressors.LZMA.LzmaStream(new[] { _dictionarySize }, stream);
}
public override int Read(byte[] buffer, int offset, int count)
{
return BaseStream.Read(buffer, offset, count);
}
}
}

View File

@@ -0,0 +1,32 @@
using System;
using System.IO;
namespace SharpCompress.Compressors.Xz
{
internal static class MultiByteIntegers
{
public static ulong ReadXZInteger(this BinaryReader reader, int MaxBytes = 9)
{
if (MaxBytes <= 0)
throw new ArgumentOutOfRangeException();
if (MaxBytes > 9)
MaxBytes = 9;
byte LastByte = reader.ReadByte();
ulong Output = (ulong)LastByte & 0x7F;
int i = 0;
while ((LastByte & 0x80) != 0)
{
if (i >= MaxBytes)
throw new InvalidDataException();
LastByte = reader.ReadByte();
if (LastByte == 0)
throw new InvalidDataException();
Output |= ((ulong)(LastByte & 0x7F)) << (i * 7);
}
return Output;
}
}
}

View File

@@ -0,0 +1,44 @@
using System;
using System.IO;
namespace SharpCompress.Compressors.Xz
{
public abstract class ReadOnlyStream : Stream
{
public Stream BaseStream { get; protected set; }
public override bool CanRead => BaseStream.CanRead;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override long Length => throw new NotSupportedException();
public override long Position
{
get => throw new NotSupportedException();
set => throw new NotSupportedException();
}
public override void Flush()
{
throw new NotSupportedException();
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();
}
public override void SetLength(long value)
{
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException();
}
}
}

View File

@@ -0,0 +1,165 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Compressors.Xz.Filters;
namespace SharpCompress.Compressors.Xz
{
internal sealed class XZBlock : XZReadOnlyStream
{
public int BlockHeaderSize => (_blockHeaderSizeByte + 1) * 4;
public ulong? CompressedSize { get; private set; }
public ulong? UncompressedSize { get; private set; }
public Stack<BlockFilter> Filters { get; private set; } = new Stack<BlockFilter>();
public bool HeaderIsLoaded { get; private set; }
private CheckType _checkType;
private int _checkSize;
private bool _streamConnected;
private int _numFilters;
private byte _blockHeaderSizeByte;
private Stream _decomStream;
private bool _endOfStream;
private bool _paddingSkipped;
private bool _crcChecked;
private ulong _bytesRead;
public XZBlock(Stream stream, CheckType checkType, int checkSize) : base(stream)
{
_checkType = checkType;
_checkSize = checkSize;
}
public override int Read(byte[] buffer, int offset, int count)
{
int bytesRead = 0;
if (!HeaderIsLoaded)
LoadHeader();
if (!_streamConnected)
ConnectStream();
if (!_endOfStream)
bytesRead = _decomStream.Read(buffer, offset, count);
if (bytesRead != count)
_endOfStream = true;
if (_endOfStream && !_paddingSkipped)
SkipPadding();
if (_endOfStream && !_crcChecked)
CheckCrc();
_bytesRead += (ulong)bytesRead;
return bytesRead;
}
private void SkipPadding()
{
int padding = (int)(_bytesRead % 4);
if (padding > 0)
{
byte[] paddingBytes = new byte[padding];
BaseStream.Read(paddingBytes, 0, padding);
if (paddingBytes.Any(b => b != 0))
throw new InvalidDataException("Padding bytes were non-null");
}
_paddingSkipped = true;
}
private void CheckCrc()
{
byte[] crc = new byte[_checkSize];
BaseStream.Read(crc, 0, _checkSize);
// Actually do a check (and read in the bytes
// into the function throughout the stream read).
_crcChecked = true;
}
private void ConnectStream()
{
_decomStream = BaseStream;
while (Filters.Any())
{
var filter = Filters.Pop();
filter.SetBaseStream(_decomStream);
_decomStream = filter;
}
_streamConnected = true;
}
private void LoadHeader()
{
ReadHeaderSize();
byte[] headerCache = CacheHeader();
using (var cache = new MemoryStream(headerCache))
using (var cachedReader = new BinaryReader(cache))
{
cachedReader.BaseStream.Position = 1; // skip the header size byte
ReadBlockFlags(cachedReader);
ReadFilters(cachedReader);
}
HeaderIsLoaded = true;
}
private void ReadHeaderSize()
{
_blockHeaderSizeByte = (byte)BaseStream.ReadByte();
if (_blockHeaderSizeByte == 0)
throw new XZIndexMarkerReachedException();
}
private byte[] CacheHeader()
{
byte[] blockHeaderWithoutCrc = new byte[BlockHeaderSize - 4];
blockHeaderWithoutCrc[0] = _blockHeaderSizeByte;
var read = BaseStream.Read(blockHeaderWithoutCrc, 1, BlockHeaderSize - 5);
if (read != BlockHeaderSize - 5)
throw new EndOfStreamException("Reached end of stream unexectedly");
uint crc = BaseStream.ReadLittleEndianUInt32();
uint calcCrc = Crc32.Compute(blockHeaderWithoutCrc);
if (crc != calcCrc)
throw new InvalidDataException("Block header corrupt");
return blockHeaderWithoutCrc;
}
private void ReadBlockFlags(BinaryReader reader)
{
var blockFlags = reader.ReadByte();
_numFilters = (blockFlags & 0x03) + 1;
byte reserved = (byte)(blockFlags & 0x3C);
if (reserved != 0)
throw new InvalidDataException("Reserved bytes used, perhaps an unknown XZ implementation");
bool compressedSizePresent = (blockFlags & 0x40) != 0;
bool uncompressedSizePresent = (blockFlags & 0x80) != 0;
if (compressedSizePresent)
CompressedSize = reader.ReadXZInteger();
if (uncompressedSizePresent)
UncompressedSize = reader.ReadXZInteger();
}
private void ReadFilters(BinaryReader reader, long baseStreamOffset = 0)
{
int nonLastSizeChangers = 0;
for (int i = 0; i < _numFilters; i++)
{
var filter = BlockFilter.Read(reader);
if ((i + 1 == _numFilters && !filter.AllowAsLast)
|| (i + 1 < _numFilters && !filter.AllowAsNonLast))
throw new InvalidDataException("Block Filters in bad order");
if (filter.ChangesDataSize && i + 1 < _numFilters)
nonLastSizeChangers++;
filter.ValidateFilter();
Filters.Push(filter);
}
if (nonLastSizeChangers > 2)
throw new InvalidDataException("More than two non-last block filters cannot change stream size");
int blockHeaderPaddingSize = BlockHeaderSize -
(4 + (int)(reader.BaseStream.Position - baseStreamOffset));
byte[] blockHeaderPadding = reader.ReadBytes(blockHeaderPaddingSize);
if (!blockHeaderPadding.All(b => b == 0))
throw new InvalidDataException("Block header contains unknown fields");
}
}
}

View File

@@ -0,0 +1,49 @@
using System.IO;
using System.Linq;
using System.Text;
using SharpCompress.IO;
namespace SharpCompress.Compressors.Xz
{
public class XZFooter
{
private readonly BinaryReader _reader;
private readonly byte[] _magicBytes = new byte[] { 0x59, 0x5A };
public long StreamStartPosition { get; private set; }
public long BackwardSize { get; private set; }
public byte[] StreamFlags { get; private set; }
public XZFooter(BinaryReader reader)
{
_reader = reader;
StreamStartPosition = reader.BaseStream.Position;
}
public static XZFooter FromStream(Stream stream)
{
var footer = new XZFooter(new BinaryReader(new NonDisposingStream(stream), Encoding.UTF8));
footer.Process();
return footer;
}
public void Process()
{
uint crc = _reader.ReadLittleEndianUInt32();
byte[] footerBytes = _reader.ReadBytes(6);
uint myCrc = Crc32.Compute(footerBytes);
if (crc != myCrc)
throw new InvalidDataException("Footer corrupt");
using (var stream = new MemoryStream(footerBytes))
using (var reader = new BinaryReader(stream))
{
BackwardSize = (reader.ReadLittleEndianUInt32() + 1) * 4;
StreamFlags = reader.ReadBytes(2);
}
byte[] magBy = _reader.ReadBytes(2);
if (!Enumerable.SequenceEqual(magBy, _magicBytes))
{
throw new InvalidDataException("Magic footer missing");
}
}
}
}

View File

@@ -0,0 +1,55 @@
using System;
using System.IO;
using System.Linq;
using System.Text;
using SharpCompress.IO;
namespace SharpCompress.Compressors.Xz
{
public class XZHeader
{
private readonly BinaryReader _reader;
private readonly byte[] MagicHeader = { 0xFD, 0x37, 0x7A, 0x58, 0x5a, 0x00 };
public CheckType BlockCheckType { get; private set; }
public int BlockCheckSize => ((((int)BlockCheckType) + 2) / 3) * 4;
public XZHeader(BinaryReader reader)
{
_reader = reader;
}
public static XZHeader FromStream(Stream stream)
{
var header = new XZHeader(new BinaryReader(new NonDisposingStream(stream), Encoding.UTF8));
header.Process();
return header;
}
public void Process()
{
CheckMagicBytes(_reader.ReadBytes(6));
ProcessStreamFlags();
}
private void ProcessStreamFlags()
{
byte[] streamFlags = _reader.ReadBytes(2);
UInt32 crc = _reader.ReadLittleEndianUInt32();
UInt32 calcCrc = Crc32.Compute(streamFlags);
if (crc != calcCrc)
throw new InvalidDataException("Stream header corrupt");
BlockCheckType = (CheckType)(streamFlags[1] & 0x0F);
byte futureUse = (byte)(streamFlags[1] & 0xF0);
if (futureUse != 0 || streamFlags[0] != 0)
throw new InvalidDataException("Unknown XZ Stream Version");
}
private void CheckMagicBytes(byte[] header)
{
if (!Enumerable.SequenceEqual(header, MagicHeader))
throw new InvalidDataException("Invalid XZ Stream");
}
}
}

View File

@@ -0,0 +1,73 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using SharpCompress.IO;
namespace SharpCompress.Compressors.Xz
{
[CLSCompliant(false)]
public class XZIndex
{
private readonly BinaryReader _reader;
public long StreamStartPosition { get; private set; }
public ulong NumberOfRecords { get; private set; }
public List<XZIndexRecord> Records { get; } = new List<XZIndexRecord>();
private bool _indexMarkerAlreadyVerified;
public XZIndex(BinaryReader reader, bool indexMarkerAlreadyVerified)
{
_reader = reader;
_indexMarkerAlreadyVerified = indexMarkerAlreadyVerified;
StreamStartPosition = reader.BaseStream.Position;
if (indexMarkerAlreadyVerified)
StreamStartPosition--;
}
public static XZIndex FromStream(Stream stream, bool indexMarkerAlreadyVerified)
{
var index = new XZIndex(new BinaryReader(new NonDisposingStream(stream), Encoding.UTF8), indexMarkerAlreadyVerified);
index.Process();
return index;
}
public void Process()
{
if (!_indexMarkerAlreadyVerified)
VerifyIndexMarker();
NumberOfRecords = _reader.ReadXZInteger();
for (ulong i = 0; i < NumberOfRecords; i++)
{
Records.Add(XZIndexRecord.FromBinaryReader(_reader));
}
SkipPadding();
VerifyCrc32();
}
private void VerifyIndexMarker()
{
byte marker = _reader.ReadByte();
if (marker != 0)
throw new InvalidDataException("Not an index block");
}
private void SkipPadding()
{
int padding = (int)(_reader.BaseStream.Position - StreamStartPosition) % 4;
if (padding > 0)
{
byte[] paddingBytes = _reader.ReadBytes(padding);
if (paddingBytes.Any(b => b != 0))
throw new InvalidDataException("Padding bytes were non-null");
}
}
private void VerifyCrc32()
{
uint crc = _reader.ReadLittleEndianUInt32();
// TODO verify this matches
}
}
}

View File

@@ -0,0 +1,8 @@
using System;
namespace SharpCompress.Compressors.Xz
{
public class XZIndexMarkerReachedException : Exception
{
}
}

View File

@@ -0,0 +1,22 @@
using System;
using System.IO;
namespace SharpCompress.Compressors.Xz
{
[CLSCompliant(false)]
public class XZIndexRecord
{
public ulong UnpaddedSize { get; private set; }
public ulong UncompressedSize { get; private set; }
protected XZIndexRecord() { }
public static XZIndexRecord FromBinaryReader(BinaryReader br)
{
var record = new XZIndexRecord();
record.UnpaddedSize = br.ReadXZInteger();
record.UncompressedSize = br.ReadXZInteger();
return record;
}
}
}

View File

@@ -0,0 +1,14 @@
using System.IO;
namespace SharpCompress.Compressors.Xz
{
public abstract class XZReadOnlyStream : ReadOnlyStream
{
public XZReadOnlyStream(Stream stream)
{
BaseStream = stream;
if (!BaseStream.CanRead)
throw new InvalidDataException("Must be able to read from stream");
}
}
}

View File

@@ -0,0 +1,116 @@
using System;
using System.IO;
namespace SharpCompress.Compressors.Xz
{
[CLSCompliant(false)]
public sealed class XZStream : XZReadOnlyStream
{
public static bool IsXZStream(Stream stream)
{
try
{
return null != XZHeader.FromStream(stream);
}
catch (Exception)
{
return false;
}
}
private void AssertBlockCheckTypeIsSupported()
{
switch (Header.BlockCheckType)
{
case CheckType.NONE:
break;
case CheckType.CRC32:
break;
case CheckType.CRC64:
break;
case CheckType.SHA256:
throw new NotImplementedException();
default:
throw new NotSupportedException("Check Type unknown to this version of decoder.");
}
}
public XZHeader Header { get; private set; }
public XZIndex Index { get; private set; }
public XZFooter Footer { get; private set; }
public bool HeaderIsRead { get; private set; }
private XZBlock _currentBlock;
bool _endOfStream;
public XZStream(Stream stream) : base(stream)
{
}
public override int Read(byte[] buffer, int offset, int count)
{
int bytesRead = 0;
if (_endOfStream)
return bytesRead;
if (!HeaderIsRead)
ReadHeader();
bytesRead = ReadBlocks(buffer, offset, count);
if (bytesRead < count)
{
_endOfStream = true;
ReadIndex();
ReadFooter();
}
return bytesRead;
}
private void ReadHeader()
{
Header = XZHeader.FromStream(BaseStream);
AssertBlockCheckTypeIsSupported();
HeaderIsRead = true;
}
private void ReadIndex()
{
Index = XZIndex.FromStream(BaseStream, true);
// TODO veryfy Index
}
private void ReadFooter()
{
Footer = XZFooter.FromStream(BaseStream);
// TODO verify footer
}
private int ReadBlocks(byte[] buffer, int offset, int count)
{
int bytesRead = 0;
if (_currentBlock == null)
NextBlock();
for (;;)
{
try
{
if (bytesRead >= count)
break;
int remaining = count - bytesRead;
int newOffset = offset + bytesRead;
int justRead = _currentBlock.Read(buffer, newOffset, remaining);
if (justRead < remaining)
NextBlock();
bytesRead += justRead;
}
catch (XZIndexMarkerReachedException)
{
break;
}
}
return bytesRead;
}
private void NextBlock()
{
_currentBlock = new XZBlock(BaseStream, Header.BlockCheckType, Header.BlockCheckSize);
}
}
}

View File

@@ -0,0 +1,106 @@
using System;
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Crypto
{
internal sealed class Crc32Stream : Stream
{
public const uint DefaultPolynomial = 0xedb88320u;
public const uint DefaultSeed = 0xffffffffu;
private static uint[] defaultTable;
private readonly uint[] table;
private uint hash;
private readonly Stream stream;
public Crc32Stream(Stream stream)
: this(stream, DefaultPolynomial, DefaultSeed)
{
}
public Crc32Stream(Stream stream, uint polynomial, uint seed)
{
this.stream = stream;
table = InitializeTable(polynomial);
hash = seed;
}
public Stream WrappedStream => stream;
public override void Flush()
{
stream.Flush();
}
public override int Read(byte[] buffer, int offset, int count) => throw new NotSupportedException();
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count)
{
stream.Write(buffer, offset, count);
hash = CalculateCrc(table, hash, buffer, offset, count);
}
public override bool CanRead => stream.CanRead;
public override bool CanSeek => false;
public override bool CanWrite => stream.CanWrite;
public override long Length => throw new NotSupportedException();
public override long Position { get => throw new NotSupportedException(); set => throw new NotSupportedException(); }
public uint Crc => ~hash;
public static uint Compute(byte[] buffer)
{
return Compute(DefaultSeed, buffer);
}
public static uint Compute(uint seed, byte[] buffer)
{
return Compute(DefaultPolynomial, seed, buffer);
}
public static uint Compute(uint polynomial, uint seed, byte[] buffer)
{
return ~CalculateCrc(InitializeTable(polynomial), seed, buffer, 0, buffer.Length);
}
private static uint[] InitializeTable(uint polynomial)
{
if (polynomial == DefaultPolynomial && defaultTable != null)
return defaultTable;
var createTable = new uint[256];
for (var i = 0; i < 256; i++)
{
var entry = (uint)i;
for (var j = 0; j < 8; j++)
if ((entry & 1) == 1)
entry = (entry >> 1) ^ polynomial;
else
entry = entry >> 1;
createTable[i] = entry;
}
if (polynomial == DefaultPolynomial)
defaultTable = createTable;
return createTable;
}
private static uint CalculateCrc(uint[] table, uint crc, byte[] buffer, int offset, int count)
{
unchecked
{
for (int i = offset, end = offset + count; i < end; i++)
crc = (crc >> 8) ^ table[(crc ^ buffer[i]) & 0xFF];
}
return crc;
}
}
}

View File

@@ -22,6 +22,7 @@ namespace SharpCompress.IO
public override void Flush()
{
writableStream.Flush();
}
public override long Length => throw new NotSupportedException();

View File

@@ -105,6 +105,12 @@ namespace SharpCompress.IO
public override int Read(byte[] buffer, int offset, int count)
{
//don't actually read if we don't really want to read anything
//currently a network stream bug on Windows for .NET Core
if (count == 0)
{
return 0;
}
int read;
if (isRewound && bufferStream.Position != bufferStream.Length)
{

View File

@@ -143,8 +143,11 @@ namespace SharpCompress.Readers
private void Skip()
{
if (!Entry.IsSolid)
if (ArchiveType != ArchiveType.Rar
&& !Entry.IsSolid
&& Entry.CompressedSize > 0)
{
//not solid and has a known compressed size then we can skip raw bytes.
var rawStream = Entry.Parts.First().GetRawStream();
if (rawStream != null)
@@ -158,6 +161,7 @@ namespace SharpCompress.Readers
return;
}
}
//don't know the size so we have to try to decompress to skip
using (var s = OpenEntryStream())
{
while (s.Read(skipBuffer, 0, skipBuffer.Length) > 0)

View File

@@ -14,6 +14,7 @@ using SharpCompress.Readers.Rar;
using SharpCompress.Readers.Tar;
using SharpCompress.Readers.Zip;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.Xz;
namespace SharpCompress.Readers
{
@@ -76,7 +77,6 @@ namespace SharpCompress.Readers
return new TarReader(rewindableStream, options, CompressionType.LZip);
}
}
rewindableStream.Rewind(false);
if (RarArchive.IsRarFile(rewindableStream, options))
{
@@ -90,7 +90,18 @@ namespace SharpCompress.Readers
rewindableStream.Rewind(true);
return TarReader.Open(rewindableStream, options);
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Reader Formats: Zip, GZip, BZip2, Tar, Rar");
rewindableStream.Rewind(false);
if (XZStream.IsXZStream(rewindableStream))
{
rewindableStream.Rewind(true);
XZStream testStream = new XZStream(rewindableStream);
if (TarArchive.IsTarFile(testStream))
{
rewindableStream.Rewind(true);
return new TarReader(rewindableStream, options, CompressionType.Xz);
}
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Reader Formats: Zip, GZip, BZip2, Tar, Rar, LZip, XZ");
}
}
}

View File

@@ -8,8 +8,8 @@ namespace SharpCompress.Readers
public class ReaderProgress
{
private readonly IEntry _entry;
public long BytesTransferred { get; private set; }
public int Iterations { get; private set; }
public long BytesTransferred { get; }
public int Iterations { get; }
public int PercentageRead => (int)Math.Round(PercentageReadExact);
public double PercentageReadExact => (float)BytesTransferred / _entry.Size * 100;

View File

@@ -10,6 +10,7 @@ using SharpCompress.Compressors.BZip2;
using SharpCompress.Compressors.Deflate;
using SharpCompress.IO;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.Xz;
namespace SharpCompress.Readers.Tar
{
@@ -43,6 +44,10 @@ namespace SharpCompress.Readers.Tar
{
return new LZipStream(stream, CompressionMode.Decompress);
}
case CompressionType.Xz:
{
return new XZStream(stream);
}
case CompressionType.None:
{
return stream;

View File

@@ -1,13 +1,13 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<AssemblyTitle>SharpCompress - Pure C# Decompression/Compression</AssemblyTitle>
<NeutralLanguage>en-US</NeutralLanguage>
<VersionPrefix>0.16.0</VersionPrefix>
<AssemblyVersion>0.16.0.0</AssemblyVersion>
<FileVersion>0.16.0.0</FileVersion>
<VersionPrefix>0.17.1</VersionPrefix>
<AssemblyVersion>0.17.1.0</AssemblyVersion>
<FileVersion>0.17.1.0</FileVersion>
<Authors>Adam Hathcock</Authors>
<TargetFrameworks>netstandard1.0;netstandard1.3</TargetFrameworks>
<TargetFrameworks Condition="'$(LibraryFrameworks)'==''">net45;net35;netstandard1.0;netstandard1.3</TargetFrameworks>
<TargetFrameworks Condition="'$(LibraryFrameworks)'!=''">$(LibraryFrameworks)</TargetFrameworks>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
<AssemblyName>SharpCompress</AssemblyName>
@@ -15,16 +15,14 @@
<SignAssembly>true</SignAssembly>
<PublicSign Condition=" '$(OS)' != 'Windows_NT' ">true</PublicSign>
<PackageId>SharpCompress</PackageId>
<PackageTags>rar;unrar;zip;unzip;bzip2;gzip;tar;7zip</PackageTags>
<PackageTags>rar;unrar;zip;unzip;bzip2;gzip;tar;7zip;lzip;xz</PackageTags>
<PackageProjectUrl>https://github.com/adamhathcock/sharpcompress</PackageProjectUrl>
<PackageLicenseUrl>https://github.com/adamhathcock/sharpcompress/blob/master/LICENSE.txt</PackageLicenseUrl>
<GenerateAssemblyTitleAttribute>false</GenerateAssemblyTitleAttribute>
<GenerateAssemblyProductAttribute>false</GenerateAssemblyProductAttribute>
<Description>SharpCompress is a compression library for NET Standard 1.0 that can unrar, decompress 7zip, zip/unzip, tar/untar bzip2/unbzip2 and gzip/ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip is implemented.</Description>
<Description>SharpCompress is a compression library for NET Standard 1.0 that can unrar, decompress 7zip, decompress xz, zip/unzip, tar/untar lzip/unlzip, bzip2/unbzip2 and gzip/ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip is implemented.</Description>
</PropertyGroup>
<PropertyGroup Condition=" '$(TargetFramework)' == 'netstandard1.0' ">
<DefineConstants>$(DefineConstants);NO_FILE;NO_CRYPTO;SILVERLIGHT</DefineConstants>
</PropertyGroup>
</Project>
</Project>

View File

@@ -5,6 +5,7 @@ using SharpCompress.Common.Tar.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.BZip2;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Compressors.LZMA;
namespace SharpCompress.Writers.Tar
{
@@ -23,12 +24,17 @@ namespace SharpCompress.Writers.Tar
break;
case CompressionType.BZip2:
{
destination = new BZip2Stream(destination, CompressionMode.Compress, options.LeaveStreamOpen);
destination = new BZip2Stream(destination, CompressionMode.Compress, true);
}
break;
case CompressionType.GZip:
{
destination = new GZipStream(destination, CompressionMode.Compress, options.LeaveStreamOpen);
destination = new GZipStream(destination, CompressionMode.Compress, true);
}
break;
case CompressionType.LZip:
{
destination = new LZipStream(destination, CompressionMode.Compress, true);
}
break;
default:
@@ -36,7 +42,7 @@ namespace SharpCompress.Writers.Tar
throw new InvalidFormatException("Tar does not support compression: " + options.CompressionType);
}
}
InitalizeStream(destination, !options.LeaveStreamOpen);
InitalizeStream(destination, true);
}
public override void Write(string filename, Stream source, DateTime? modificationTime)
@@ -92,7 +98,19 @@ namespace SharpCompress.Writers.Tar
{
PadTo512(0, true);
PadTo512(0, true);
(OutputStream as BZip2Stream)?.Finish(); // required when bzip2 compression is used
switch (OutputStream)
{
case BZip2Stream b:
{
b.Finish();
break;
}
case LZipStream l:
{
l.Finish();
break;
}
}
}
base.Dispose(isDisposing);
}

View File

@@ -9,18 +9,27 @@ namespace SharpCompress.Writers.Zip
{
internal class ZipCentralDirectoryEntry
{
internal string FileName { get; set; }
private readonly ZipCompressionMethod compression;
private readonly string fileName;
public ZipCentralDirectoryEntry(ZipCompressionMethod compression, string fileName, ulong headerOffset)
{
this.compression = compression;
this.fileName = fileName;
HeaderOffset = headerOffset;
}
internal DateTime? ModificationTime { get; set; }
internal string Comment { get; set; }
internal uint Crc { get; set; }
internal ulong HeaderOffset { get; set; }
internal ulong Compressed { get; set; }
internal ulong Decompressed { get; set; }
internal ushort Zip64HeaderOffset { get; set; }
internal ulong HeaderOffset { get; }
internal uint Write(Stream outputStream, ZipCompressionMethod compression)
internal uint Write(Stream outputStream)
{
byte[] encodedFilename = Encoding.UTF8.GetBytes(FileName);
byte[] encodedFilename = Encoding.UTF8.GetBytes(fileName);
byte[] encodedComment = Encoding.UTF8.GetBytes(Comment);
var zip64_stream = Compressed >= uint.MaxValue || Decompressed >= uint.MaxValue;
@@ -30,7 +39,7 @@ namespace SharpCompress.Writers.Zip
var decompressedvalue = zip64 ? uint.MaxValue : (uint)Decompressed;
var headeroffsetvalue = zip64 ? uint.MaxValue : (uint)HeaderOffset;
var extralength = zip64 ? (2 + 2 + 8 + 8 + 8 + 4) : 0;
var version = (byte)(zip64 ? 45 : 10);
var version = (byte)(zip64 ? 45 : 20); // Version 20 required for deflate/encryption
HeaderFlags flags = HeaderFlags.UTF8;
if (!outputStream.CanSeek)
@@ -50,8 +59,8 @@ namespace SharpCompress.Writers.Zip
}
}
//constant sig, then version made by, compabitility, then version to extract
outputStream.Write(new byte[] { 80, 75, 1, 2, 0x14, 0, version, 0 }, 0, 8);
//constant sig, then version made by, then version to extract
outputStream.Write(new byte[] { 80, 75, 1, 2, version, 0, version, 0 }, 0, 8);
outputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)flags), 0, 2);
outputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)compression), 0, 2); // zipping method

View File

@@ -30,6 +30,10 @@ namespace SharpCompress.Writers.Zip
{
zipComment = zipWriterOptions.ArchiveComment ?? string.Empty;
isZip64 = zipWriterOptions.UseZip64;
if (destination.CanSeek)
{
streamPosition = destination.Position;
}
compressionType = zipWriterOptions.CompressionType;
compressionLevel = zipWriterOptions.DeflateCompressionLevel;
@@ -55,7 +59,7 @@ namespace SharpCompress.Writers.Zip
ulong size = 0;
foreach (ZipCentralDirectoryEntry entry in entries)
{
size += entry.Write(OutputStream, ToZipCompressionMethod(compressionType));
size += entry.Write(OutputStream);
}
WriteEndRecord(size);
}
@@ -108,16 +112,16 @@ namespace SharpCompress.Writers.Zip
public Stream WriteToStream(string entryPath, ZipWriterEntryOptions options)
{
var compression = ToZipCompressionMethod(options.CompressionType ?? compressionType);
entryPath = NormalizeFilename(entryPath);
options.ModificationDateTime = options.ModificationDateTime ?? DateTime.Now;
options.EntryComment = options.EntryComment ?? string.Empty;
var entry = new ZipCentralDirectoryEntry
var entry = new ZipCentralDirectoryEntry(compression, entryPath, (ulong)streamPosition)
{
Comment = options.EntryComment,
FileName = entryPath,
ModificationTime = options.ModificationDateTime,
HeaderOffset = (ulong)streamPosition
};
ModificationTime = options.ModificationDateTime
};
// Use the archive default setting for zip64 and allow overrides
var useZip64 = isZip64;
@@ -126,8 +130,7 @@ namespace SharpCompress.Writers.Zip
var headersize = (uint)WriteHeader(entryPath, options, entry, useZip64);
streamPosition += headersize;
return new ZipWritingStream(this, OutputStream, entry,
ToZipCompressionMethod(options.CompressionType ?? compressionType),
return new ZipWritingStream(this, OutputStream, entry, compression,
options.DeflateCompressionLevel ?? compressionLevel);
}
@@ -208,14 +211,6 @@ namespace SharpCompress.Writers.Zip
OutputStream.Write(DataConverter.LittleEndian.GetBytes(uncompressed), 0, 4);
}
private void WritePostdataDescriptor(uint crc, ulong compressed, ulong uncompressed)
{
OutputStream.Write(DataConverter.LittleEndian.GetBytes(ZipHeaderFactory.POST_DATA_DESCRIPTOR), 0, 4);
OutputStream.Write(DataConverter.LittleEndian.GetBytes(crc), 0, 4);
OutputStream.Write(DataConverter.LittleEndian.GetBytes((uint)compressed), 0, 4);
OutputStream.Write(DataConverter.LittleEndian.GetBytes((uint)uncompressed), 0, 4);
}
private void WriteEndRecord(ulong size)
{
byte[] encodedComment = ArchiveEncoding.Default.GetBytes(zipComment);

View File

@@ -23,9 +23,12 @@
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
using System;
using System.IO;
using SharpCompress.Compressors;
using SharpCompress.Compressors.ADC;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Crypto;
using Xunit;
namespace SharpCompress.Test
@@ -124,5 +127,28 @@ namespace SharpCompress.Test
}
}
}
[Fact]
public void TestCrc32Stream()
{
using (FileStream decFs = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar")))
{
var crc32 = new CRC32().GetCrc32(decFs);
decFs.Seek(0, SeekOrigin.Begin);
var memory = new MemoryStream();
var crcStream = new Crc32Stream(memory, 0xEDB88320, 0xFFFFFFFF);
decFs.CopyTo(crcStream);
decFs.Seek(0, SeekOrigin.Begin);
var crc32a = crcStream.Crc;
var crc32b = Crc32Stream.Compute(memory.ToArray());
Assert.Equal(crc32, crc32a);
Assert.Equal(crc32, crc32b);
}
}
}
}

View File

@@ -95,9 +95,9 @@ namespace SharpCompress.Test
ResetScratch();
using (var archive = ArchiveFactory.Open(path))
{
archive.EntryExtractionBegin += archive_EntryExtractionBegin;
archive.FilePartExtractionBegin += archive_FilePartExtractionBegin;
archive.CompressedBytesRead += archive_CompressedBytesRead;
//archive.EntryExtractionBegin += archive_EntryExtractionBegin;
//archive.FilePartExtractionBegin += archive_FilePartExtractionBegin;
//archive.CompressedBytesRead += archive_CompressedBytesRead;
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
@@ -157,9 +157,9 @@ namespace SharpCompress.Test
using (var archive = ArchiveFactory.Open(path))
{
totalSize = archive.TotalUncompressSize;
archive.EntryExtractionBegin += Archive_EntryExtractionBeginEx;
archive.EntryExtractionEnd += Archive_EntryExtractionEndEx;
archive.CompressedBytesRead += Archive_CompressedBytesReadEx;
//archive.EntryExtractionBegin += Archive_EntryExtractionBeginEx;
//archive.EntryExtractionEnd += Archive_EntryExtractionEndEx;
//archive.CompressedBytesRead += Archive_CompressedBytesReadEx;
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{

View File

@@ -5,7 +5,7 @@ using SharpCompress.Archives;
using SharpCompress.Archives.GZip;
using Xunit;
namespace SharpCompress.Test
namespace SharpCompress.Test.GZip
{
public class GZipArchiveTests : ArchiveTests
{

View File

@@ -4,7 +4,7 @@ using SharpCompress.Writers;
using SharpCompress.Writers.GZip;
using Xunit;
namespace SharpCompress.Test
namespace SharpCompress.Test.GZip
{
public class GZipWriterTests : WriterTests
{

View File

@@ -6,7 +6,7 @@ using SharpCompress.Common;
using SharpCompress.Readers;
using Xunit;
namespace SharpCompress.Test
namespace SharpCompress.Test.Rar
{
public class RarArchiveTests : ArchiveTests
{

View File

@@ -5,7 +5,7 @@ using SharpCompress.Readers;
using SharpCompress.Readers.Rar;
using Xunit;
namespace SharpCompress.Test
namespace SharpCompress.Test.Rar
{
public class RarReaderTests : ReaderTests
{

View File

@@ -1,9 +1,8 @@
using System;
using System;
using SharpCompress.Common;
using Xunit;
namespace SharpCompress.Test
namespace SharpCompress.Test.SevenZip
{
public class SevenZipArchiveTests : ArchiveTests
{

View File

@@ -1,5 +1,4 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>netcoreapp1.1</TargetFramework>
<AssemblyName>SharpCompress.Test</AssemblyName>
@@ -10,11 +9,9 @@
<GenerateRuntimeConfigurationFiles>true</GenerateRuntimeConfigurationFiles>
<RuntimeFrameworkVersion>1.1.2</RuntimeFrameworkVersion>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\..\src\SharpCompress\SharpCompress.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="15.0.0" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.3.0-beta2-build1317" />
@@ -22,9 +19,7 @@
<PackageReference Include="xunit" Version="2.3.0-beta2-build3683" />
<DotNetCliToolReference Include="dotnet-xunit" Version="2.3.0-beta2-build3683" />
</ItemGroup>
<ItemGroup>
<Service Include="{82a7f48d-3b50-4b1e-b82e-3ada8210c358}" />
</ItemGroup>
</Project>
</Project>

View File

@@ -1,5 +1,4 @@
using System.IO;
using System.IO;
using System.Linq;
using SharpCompress.Archives;
using SharpCompress.Archives.Tar;
@@ -7,7 +6,7 @@ using SharpCompress.Common;
using SharpCompress.Writers;
using Xunit;
namespace SharpCompress.Test
namespace SharpCompress.Test.Tar
{
public class TarArchiveTests : ArchiveTests
{

View File

@@ -1,12 +1,11 @@
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using Xunit;
using System.Linq;
using SharpCompress.Readers;
using SharpCompress.Readers.Tar;
using Xunit;
namespace SharpCompress.Test
namespace SharpCompress.Test.Tar
{
public class TarReaderTests : ReaderTests
{
@@ -21,6 +20,33 @@ namespace SharpCompress.Test
Read("Tar.tar", CompressionType.None);
}
[Fact]
public void Tar_Skip()
{
using (Stream stream = new ForwardOnlyStream(File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar"))))
using (IReader reader = ReaderFactory.Open(stream))
{
ResetScratch();
int x = 0;
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
x++;
if (x % 2 == 0)
{
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH,
new ExtractionOptions()
{
ExtractFullPath = true,
Overwrite = true
});
}
}
}
}
}
[Fact]
public void Tar_BZip2_Reader()
{
@@ -39,6 +65,12 @@ namespace SharpCompress.Test
Read("Tar.tar.lz", CompressionType.LZip);
}
[Fact]
public void Tar_Xz_Reader()
{
Read("Tar.tar.xz", CompressionType.Xz);
}
[Fact]
public void Tar_BZip2_Entry_Stream()
{

View File

@@ -1,7 +1,7 @@
using SharpCompress.Common;
using Xunit;
namespace SharpCompress.Test
namespace SharpCompress.Test.Tar
{
public class TarWriterTests : WriterTests
{
@@ -23,6 +23,12 @@ namespace SharpCompress.Test
Write(CompressionType.BZip2, "Tar.noEmptyDirs.tar.bz2", "Tar.noEmptyDirs.tar.bz2");
}
[Fact]
public void Tar_LZip_Writer()
{
Write(CompressionType.LZip, "Tar.noEmptyDirs.tar.lz", "Tar.noEmptyDirs.tar.lz");
}
[Fact]
public void Tar_Rar_Write()
{

View File

@@ -28,10 +28,6 @@ namespace SharpCompress.Test
{
writer.WriteAll(ORIGINAL_FILES_PATH, "*", SearchOption.AllDirectories);
}
if (!stream.CanWrite)
{
throw new InvalidOperationException();
}
}
CompareArchivesByPath(Path.Combine(SCRATCH2_FILES_PATH, archive),
Path.Combine(TEST_ARCHIVES_PATH, archiveToVerifyAgainst));

View File

@@ -9,7 +9,7 @@ using SharpCompress.Writers;
using SharpCompress.Writers.Zip;
using Xunit;
namespace SharpCompress.Test
namespace SharpCompress.Test.Zip
{
public class Zip64Tests : WriterTests
{
@@ -21,7 +21,6 @@ namespace SharpCompress.Test
// 4GiB + 1
const long FOUR_GB_LIMIT = ((long)uint.MaxValue) + 1;
[Fact(Skip = "Takes too long")]
[Trait("format", "zip64")]
public void Zip64_Single_Large_File()
{
@@ -29,7 +28,6 @@ namespace SharpCompress.Test
RunSingleTest(1, FOUR_GB_LIMIT, set_zip64: true, forward_only: false);
}
[Fact(Skip = "Takes too long")]
[Trait("format", "zip64")]
public void Zip64_Two_Large_Files()
{
@@ -37,7 +35,6 @@ namespace SharpCompress.Test
RunSingleTest(2, FOUR_GB_LIMIT, set_zip64: true, forward_only: false);
}
[Fact(Skip = "Takes too long")]
[Trait("format", "zip64")]
public void Zip64_Two_Small_files()
{
@@ -45,7 +42,6 @@ namespace SharpCompress.Test
RunSingleTest(2, FOUR_GB_LIMIT / 2, set_zip64: false, forward_only: false);
}
[Fact(Skip = "Takes too long")]
[Trait("format", "zip64")]
public void Zip64_Two_Small_files_stream()
{
@@ -53,7 +49,6 @@ namespace SharpCompress.Test
RunSingleTest(2, FOUR_GB_LIMIT / 2, set_zip64: false, forward_only: true);
}
[Fact(Skip = "Takes too long")]
[Trait("format", "zip64")]
public void Zip64_Two_Small_Files_Zip64()
{
@@ -61,7 +56,6 @@ namespace SharpCompress.Test
RunSingleTest(2, FOUR_GB_LIMIT / 2, set_zip64: true, forward_only: false);
}
[Fact(Skip = "Takes too long")]
[Trait("format", "zip64")]
public void Zip64_Single_Large_File_Fail()
{
@@ -76,7 +70,6 @@ namespace SharpCompress.Test
}
}
[Fact(Skip = "Takes too long")]
[Trait("zip64", "true")]
public void Zip64_Single_Large_File_Zip64_Streaming_Fail()
{
@@ -91,7 +84,6 @@ namespace SharpCompress.Test
}
}
[Fact(Skip = "Takes too long")]
[Trait("zip64", "true")]
public void Zip64_Single_Large_File_Streaming_Fail()
{

View File

@@ -1,5 +1,4 @@
using System;
using System;
using System.IO;
using System.Linq;
using System.Text;
@@ -10,7 +9,7 @@ using SharpCompress.Readers;
using SharpCompress.Writers;
using Xunit;
namespace SharpCompress.Test
namespace SharpCompress.Test.Zip
{
public class ZipArchiveTests : ArchiveTests
{
@@ -324,6 +323,24 @@ namespace SharpCompress.Test
VerifyFiles();
}
[Fact]
public void Zip_Deflate_WinzipAES_MultiOpenEntryStream()
{
ResetScratch();
using (var reader = ZipArchive.Open(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.WinzipAES2.zip"), new ReaderOptions()
{
Password = "test"
}))
{
foreach (var entry in reader.Entries.Where(x => !x.IsDirectory))
{
var stream = entry.OpenEntryStream();
Assert.NotNull(stream);
var ex = Record.Exception(() => stream = entry.OpenEntryStream());
Assert.Null(ex);
}
}
}
[Fact]
public void Zip_BZip2_Pkware_Read()

View File

@@ -6,7 +6,7 @@ using SharpCompress.Readers.Zip;
using SharpCompress.Writers;
using Xunit;
namespace SharpCompress.Test
namespace SharpCompress.Test.Zip
{
public class ZipReaderTests : ReaderTests
{
@@ -48,6 +48,32 @@ namespace SharpCompress.Test
Read("Zip.deflate.dd.zip", CompressionType.Deflate);
}
[Fact]
public void Zip_Deflate_Streamed_Skip()
{
using (Stream stream = new ForwardOnlyStream(File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd.zip"))))
using (IReader reader = ReaderFactory.Open(stream))
{
ResetScratch();
int x = 0;
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
x++;
if (x % 2 == 0)
{
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH,
new ExtractionOptions()
{
ExtractFullPath = true,
Overwrite = true
});
}
}
}
}
}
[Fact]
public void Zip_Deflate_Read()
{
Read("Zip.deflate.zip", CompressionType.Deflate);

View File

@@ -1,7 +1,7 @@
using SharpCompress.Common;
using Xunit;
namespace SharpCompress.Test
namespace SharpCompress.Test.Zip
{
public class ZipWriterTests : WriterTests
{

Binary file not shown.

Binary file not shown.

Binary file not shown.