Compare commits

..

66 Commits

Author SHA1 Message Date
Adam Hathcock
e53f2cac4a Mark for 0.16.0 2017-05-22 08:58:52 +01:00
Adam Hathcock
3b73464233 Merge pull request #236 from damieng/zip-min-version-of-20
Default zip ver to 20 (deflate/encyption), fixes #164
2017-05-22 08:38:18 +01:00
Damien Guard
575f10f766 Default zip ver to 20 (deflate/encyption), fixes #164 2017-05-19 16:37:20 -07:00
Adam Hathcock
60370b8539 don't run appveyor tests 2017-05-19 15:51:06 +01:00
Adam Hathcock
f6db114865 Remove console writelines 2017-05-19 15:47:53 +01:00
Adam Hathcock
1c6c344b6b Tests don't run on appveyor 2017-05-19 15:45:29 +01:00
Adam Hathcock
d0302898e0 Add back net45,net35 and cake 2017-05-19 13:33:12 +01:00
Adam Hathcock
057ac9b001 Enable test 2017-05-19 11:03:31 +01:00
Adam Hathcock
8be931bbcb Doing some resharper clean up 2017-05-19 10:52:49 +01:00
Adam Hathcock
3197ef289c Forgot to hit save 2017-05-19 10:15:19 +01:00
Adam Hathcock
631578c175 Update to next version. Stop Zip64 tests from running all the time and some clean up 2017-05-19 10:10:23 +01:00
Adam Hathcock
f1809163c7 correct gitignore 2017-05-19 09:44:45 +01:00
Adam Hathcock
60e1fe86f2 Fix test running 2017-05-19 09:40:37 +01:00
Adam Hathcock
59d7de5bfc Try again appveyor 2017-05-19 09:36:05 +01:00
Adam Hathcock
6e95c1d84a Drop net35 support as dot net tooling doesn’t support it currently 2017-05-19 09:34:02 +01:00
Adam Hathcock
ee64670755 Move test folder to be tests 2017-05-19 09:19:37 +01:00
Adam Hathcock
3f7d0f5b68 Update test project 2017-05-19 09:14:43 +01:00
Adam Hathcock
e3514c5c4b Don’t attempt to autodeploy 2017-05-19 09:06:18 +01:00
Adam Hathcock
cc3a9cff88 Merge pull request #231 from adamhathcock/VS2017
Vs2017
2017-05-19 09:02:12 +01:00
Adam Hathcock
15e821aa39 Remove unused events 2017-05-19 08:49:44 +01:00
Adam Hathcock
8dd1dbab5f Remove Cake as it’s unnecessary for basic build/test/publish 2017-05-19 08:47:17 +01:00
Adam Hathcock
65ce91ddf6 Update. Only use net35, net standard 1.0 and net standard 1.3 2017-05-19 08:46:27 +01:00
Adam Hathcock
bf55595d6f Merge pull request #226 from gardebring/master
Add new event handler to allow tracking of progress of extraction progress for individual entry
2017-04-25 13:07:44 +01:00
Anders Gardebring
2aa123ccd7 Remove begin and end events since this can now be tracked via progress instead 2017-04-25 13:21:04 +02:00
Anders Gardebring
0990b06cc9 Create new TransferTo method and pass Entry and IReaderExtractionListener instead of passing an action lambda. 2017-04-25 12:48:56 +02:00
Anders Gardebring
e05f9843ba Use strongly typed ReaderProgress instead of object[] 2017-04-25 12:36:32 +02:00
Anders Gardebring
683d2714d0 Add new event to be able to track progress of extraction of individual entry when extracting an archive. This allows for showing or logging progress of the extraction process, especially useful for large files that might take a long time to extract. 2017-04-24 13:50:45 +02:00
Anders Gardebring
b8ef1ecafc Revert "Add new feature to allow injection of an action into the extraction process. This allows for showing or logging progress of the extraction process, especially useful for large files that might take a long time to extract."
This reverts commit 467fc2d03d.
2017-04-24 10:22:49 +02:00
Anders Gardebring
467fc2d03d Add new feature to allow injection of an action into the extraction process. This allows for showing or logging progress of the extraction process, especially useful for large files that might take a long time to extract. 2017-04-20 11:45:53 +02:00
Adam Hathcock
58b4fe4f28 Merge pull request #220 from coderb/master
verify RAR crc on header and file data
2017-04-07 11:56:06 +01:00
Brien Oberstein
97d5e0aac4 verify rar CRC on header and file data 2017-04-04 12:20:06 -04:00
Adam Hathcock
356c977cff Merge pull request #215 from mnadareski/master
Removed restriction on 7zip file entries
2017-03-17 09:20:59 +00:00
Matt Nadareski
99d6062376 Removed restriction on 7zip file entries 2017-03-16 15:55:20 -07:00
Adam Hathcock
f8538403e4 Merge pull request #211 from kenkendk/add_zip64
Add zip64
2017-03-13 10:23:26 +00:00
Kenneth Skovhede
726b9c80f6 Fixed compiling the unittest 2017-03-11 01:05:58 +01:00
Kenneth Skovhede
2894711c51 Added a test suite to verify zip64 write support is working, and can be read in both Archive and Stream mode 2017-03-11 00:54:06 +01:00
Kenneth Skovhede
85280f6f4f Changed the logic to throw exceptions when sizes exceed the zip archive limits, and zip64 is not enabled.
This changes the logic, such that archives larger than 4GiB are still automatically written correct (only the central header is special).
Archives with individual streams larger than 4 GiB must set the zip64 flag, either on the archive or the individual streams.
2017-03-11 00:53:42 +01:00
Kenneth Skovhede
d7f4c0ee32 Fixed an error in the zip64 central end of header: the signature + length (12 bytes) are not included in the reported length. 2017-03-10 23:10:06 +01:00
Kenneth Skovhede
1263c0d976 Added support for writing zip64 headers 2017-03-09 23:56:42 +01:00
Kenneth Skovhede
cd3cbd2b32 Support for writing zip64 headers in the unused code 2017-03-09 23:18:57 +01:00
Adam Hathcock
b3a4fed8be Mark for 0.15.2 2017-03-09 11:02:44 +00:00
Adam Hathcock
d0b4af6666 Merge pull request #210 from kenkendk/fix_invalid_headers
Fix invalid headers
2017-03-09 10:41:18 +00:00
Kenneth Skovhede
81ab5c189d Fixed writing correct headers in zip archives 2017-03-09 11:34:24 +01:00
Kenneth Skovhede
6ef3be4b5c Fixed writing correct headers in zip archives 2017-03-09 11:32:20 +01:00
Adam Hathcock
9f90a1d651 Mark for 0.15.1 2017-01-25 09:31:01 +00:00
Adam Hathcock
ce9a3fd1ef Add file ordering fix for OS X 2017-01-25 09:29:13 +00:00
Adam Hathcock
7c6f05058e Merge pull request #206 from markryd/zip64-extraction
Zip64 extending information and ZipReader
2017-01-25 09:03:43 +00:00
Mark Rydstrom
a8c3a7439e Add support for zip64 to ZipReader 2017-01-25 17:05:48 +10:00
Mark Rydstrom
839b3ab0cf Add support for zip64 extended information field 2017-01-25 16:51:15 +10:00
Adam Hathcock
44d54db80e Fix some path issues on OS X when running tests. 2017-01-24 17:36:51 +00:00
Adam Hathcock
a67d7bc429 Mark for 0.15 2017-01-24 17:25:19 +00:00
Adam Hathcock
079a818c6c Merge pull request #205 from markryd/zip64-extraction
Add zip64 support for ZipArchive extraction
2017-01-24 16:56:42 +00:00
Mark Rydstrom
6be6ef0b5c Add zip64 support for ZipArchive extraction 2017-01-24 13:04:03 +10:00
Adam Hathcock
8e51d9d646 0.14.1 2016-11-30 14:26:18 +00:00
Adam Hathcock
ea206f4f02 Merge pull request #199 from adamhathcock/Issue-198
Gzip entry can't be read multiple times
2016-11-25 09:33:56 +00:00
Adam Hathcock
f175a2a252 Merge branch 'master' into Issue-198 2016-11-25 09:21:44 +00:00
Adam Hathcock
3f7e559b86 Merge pull request #200 from ITnent/bug/Issue-197
Open branch, to fix multiple crashes on repeated zip archives reading…
2016-11-25 09:21:34 +00:00
Vladimir Demidov
2959b4d701 Modified check integrity condition for the encrypted file. 2016-11-24 20:41:08 +03:00
Vladimir Demidov
031286c5eb Fixed defects after review. 2016-11-24 18:01:49 +03:00
Vladimir Demidov
e181fa8c4a Restored original tabs. 2016-11-24 17:11:43 +03:00
Vladimir Demidov
7b035bec5d Fixed some issues after review. 2016-11-24 16:21:02 +03:00
Vladimir Demidov
f39d2bf53a Open branch, to fix multiple crashes on repeated zip archives reading. Added fix. 2016-11-24 15:14:29 +03:00
Adam Hathcock
7c8e407182 Merge branch 'master' into Issue-198 2016-11-21 12:21:29 +00:00
Adam Hathcock
a09136d46b Merge pull request #195 from jskeet/strong-naming
Strong-name both the main and test projects
2016-11-21 12:06:13 +00:00
Adam Hathcock
5fe1363ee1 Gzip entry can't be read multiple times https://github.com/adamhathcock/sharpcompress/issues/198 2016-11-21 12:04:35 +00:00
Jon Skeet
b41823fc10 Strong-name both the main and test projects
It's not clear whether SharpCompress.Test.Portable (as referenced
in AssemblyInfo.cs) still exists, but build.ps1 certainly works.
2016-11-15 18:42:56 +00:00
229 changed files with 2045 additions and 1292 deletions

2
.gitignore vendored
View File

@@ -10,7 +10,7 @@ TestResults/
*.nupkg
packages/*/
project.lock.json
test/TestArchives/Scratch
tests/TestArchives/Scratch
.vs
tools
.vscode

View File

@@ -1,11 +1,13 @@
# Archive Formats
## Accessing Archives
Archive classes allow random access to a seekable stream.
Reader classes allow forward-only reading
Writer classes allow forward-only Writing
## Supported Format Table
| Archive Format | Compression Format(s) | Compress/Decompress | Archive API | Reader API | Writer API |
| --- | --- | --- | --- | --- | --- |
| Rar | Rar | Decompress (1) | RarArchive | RarReader | N/A |
@@ -15,11 +17,12 @@ Writer classes allow forward-only Writing
| 7Zip (4) | LZMA, LZMA2, BZip2, PPMd, BCJ, BCJ2, Deflate | Decompress | SevenZipArchive | N/A | N/A |
1. SOLID Rars are only supported in the RarReader API.
2. Zip format supports pkware and WinzipAES encryption. However, encrypted LZMA is not supported.
2. Zip format supports pkware and WinzipAES encryption. However, encrypted LZMA is not supported. Zip64 reading is supported.
3. The Tar format requires a file size in the header. If no size is specified to the TarWriter and the stream is not seekable, then an exception will be thrown.
4. The 7Zip format doesn't allow for reading as a forward-only stream so 7Zip is only supported through the Archive API
## Compressors
For those who want to directly compress/decompress bits
| Compressor | Compress/Decompress |

View File

@@ -25,12 +25,39 @@ I'm always looking for help or ideas. Please submit code or email with ideas. Un
* RAR 5 support
* 7Zip writing
* Zip64
* Zip64 (Need writing and extend Reading)
* Multi-volume Zip support.
* RAR5 support
## Version Log
### Version 0.16.0
* Breaking - [Progress Event Tracking rethink](https://github.com/adamhathcock/sharpcompress/pull/226)
* Update to VS2017 - [VS2017](https://github.com/adamhathcock/sharpcompress/pull/231) - Framework targets have been changed.
* New - [Add Zip64 writing](https://github.com/adamhathcock/sharpcompress/pull/211)
* [Fix invalid/mismatching Zip version flags.](https://github.com/adamhathcock/sharpcompress/issues/164) - This allows nuget/System.IO.Packaging to read zip files generated by SharpCompress
* [Fix 7Zip directory hiding](https://github.com/adamhathcock/sharpcompress/pull/215/files)
* [Verify RAR CRC headers](https://github.com/adamhathcock/sharpcompress/pull/220)
### Version 0.15.2
* [Fix invalid headers](https://github.com/adamhathcock/sharpcompress/pull/210) - fixes an issue creating large-ish zip archives that was introduced with zip64 reading.
### Version 0.15.1
* [Zip64 extending information and ZipReader](https://github.com/adamhathcock/sharpcompress/pull/206)
### Version 0.15.0
* [Add zip64 support for ZipArchive extraction](https://github.com/adamhathcock/sharpcompress/pull/205)
### Version 0.14.1
* [.NET Assemblies aren't strong named](https://github.com/adamhathcock/sharpcompress/issues/158)
* [Pkware encryption for Zip files didn't allow for multiple reads of an entry](https://github.com/adamhathcock/sharpcompress/issues/197)
* [GZip Entry couldn't be read multiple times](https://github.com/adamhathcock/sharpcompress/issues/198)
### Version 0.14.0
* [Support for LZip reading in for Tars](https://github.com/adamhathcock/sharpcompress/pull/191)
@@ -117,8 +144,6 @@ I'm always looking for help or ideas. Please submit code or email with ideas. Un
* Embedded some BouncyCastle crypto classes to allow RAR Decryption and Winzip AES Decryption in Portable and Windows Store DLLs
* Built in Release (I think)
Some Help/Discussion: https://sharpcompress.codeplex.com/discussions
7Zip implementation based on: https://code.google.com/p/managed-lzma/
LICENSE

View File

@@ -1,44 +1,38 @@
Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio 14
VisualStudioVersion = 14.0.24720.0
MinimumVisualStudioVersion = 10.0.40219.1
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{F18F1765-4A02-42FD-9BEF-F0E2FCBD9D17}"
ProjectSection(SolutionItems) = preProject
global.json = global.json
EndProjectSection
EndProject
Project("{8BB2217D-0F2D-49D1-97BC-3654ED321F3B}") = "SharpCompress", "src\SharpCompress\SharpCompress.xproj", "{FD19DDD8-72B2-4024-8665-0D1F7A2AA998}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{3C5BE746-03E5-4895-9988-0B57F162F86C}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "test", "test", "{0F0901FF-E8D9-426A-B5A2-17C7F47C1529}"
EndProject
Project("{8BB2217D-0F2D-49D1-97BC-3654ED321F3B}") = "SharpCompress.Test", "test\SharpCompress.Test\SharpCompress.Test.xproj", "{3B80E585-A2F3-4666-8F69-C7FFDA0DD7E5}"
ProjectSection(ProjectDependencies) = postProject
{FD19DDD8-72B2-4024-8665-0D1F7A2AA998} = {FD19DDD8-72B2-4024-8665-0D1F7A2AA998}
EndProjectSection
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{FD19DDD8-72B2-4024-8665-0D1F7A2AA998}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{FD19DDD8-72B2-4024-8665-0D1F7A2AA998}.Debug|Any CPU.Build.0 = Debug|Any CPU
{FD19DDD8-72B2-4024-8665-0D1F7A2AA998}.Release|Any CPU.ActiveCfg = Release|Any CPU
{FD19DDD8-72B2-4024-8665-0D1F7A2AA998}.Release|Any CPU.Build.0 = Release|Any CPU
{3B80E585-A2F3-4666-8F69-C7FFDA0DD7E5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{3B80E585-A2F3-4666-8F69-C7FFDA0DD7E5}.Debug|Any CPU.Build.0 = Debug|Any CPU
{3B80E585-A2F3-4666-8F69-C7FFDA0DD7E5}.Release|Any CPU.ActiveCfg = Release|Any CPU
{3B80E585-A2F3-4666-8F69-C7FFDA0DD7E5}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(NestedProjects) = preSolution
{FD19DDD8-72B2-4024-8665-0D1F7A2AA998} = {3C5BE746-03E5-4895-9988-0B57F162F86C}
{3B80E585-A2F3-4666-8F69-C7FFDA0DD7E5} = {0F0901FF-E8D9-426A-B5A2-17C7F47C1529}
EndGlobalSection
EndGlobal
Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio 15
VisualStudioVersion = 15.0.26430.6
MinimumVisualStudioVersion = 10.0.40219.1
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{F18F1765-4A02-42FD-9BEF-F0E2FCBD9D17}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{3C5BE746-03E5-4895-9988-0B57F162F86C}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "tests", "tests", "{0F0901FF-E8D9-426A-B5A2-17C7F47C1529}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SharpCompress", "src\SharpCompress\SharpCompress.csproj", "{FD19DDD8-72B2-4024-8665-0D1F7A2AA998}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SharpCompress.Test", "tests\SharpCompress.Test\SharpCompress.Test.csproj", "{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{FD19DDD8-72B2-4024-8665-0D1F7A2AA998}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{FD19DDD8-72B2-4024-8665-0D1F7A2AA998}.Debug|Any CPU.Build.0 = Debug|Any CPU
{FD19DDD8-72B2-4024-8665-0D1F7A2AA998}.Release|Any CPU.ActiveCfg = Release|Any CPU
{FD19DDD8-72B2-4024-8665-0D1F7A2AA998}.Release|Any CPU.Build.0 = Release|Any CPU
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}.Debug|Any CPU.Build.0 = Debug|Any CPU
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}.Release|Any CPU.ActiveCfg = Release|Any CPU
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(NestedProjects) = preSolution
{FD19DDD8-72B2-4024-8665-0D1F7A2AA998} = {3C5BE746-03E5-4895-9988-0B57F162F86C}
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F} = {0F0901FF-E8D9-426A-B5A2-17C7F47C1529}
EndGlobalSection
EndGlobal

BIN
SharpCompress.snk Normal file

Binary file not shown.

View File

@@ -1,17 +1,20 @@
version: '0.13.{build}'
version: '{build}'
image: Visual Studio 2017
init:
- git config --global core.autocrlf true
pull_requests:
do_not_increment_build_number: true
branches:
only:
- master
nuget:
disable_publish_on_pr: true
build_script:
- ps: .\build.ps1
test: off
cache:
- tools -> build.cake
- tools -> build.ps1
artifacts:
- path: nupkgs\*.nupkg
name: NuPkgs
- path: src\SharpCompress\bin\Release\*.nupkg

View File

@@ -1,229 +1,51 @@
#addin "Cake.Json"
#addin "nuget:?package=NuGet.Core"
using NuGet;
//////////////////////////////////////////////////////////////////////
// ARGUMENTS
//////////////////////////////////////////////////////////////////////
var target = Argument("target", "Default");
var apiKey = Argument("apiKey", "");
var repo = Argument("repo", "");
//////////////////////////////////////////////////////////////////////
// PREPARATION
//////////////////////////////////////////////////////////////////////
var sources = new [] { "https://api.nuget.org/v3/index.json" };
var publishTarget = "";
Warning("=============");
var globalPath = MakeFullPath("global.json");
var nupkgs = MakeFullPath("nupkgs");
Warning("Operating on global.json: " + globalPath);
Warning("=============");
//////////////////////////////////////////////////////////////////////
// FUNCTIONS
//////////////////////////////////////////////////////////////////////
string MakeFullPath(string relativePath)
{
if (string.IsNullOrEmpty(repo))
{
return MakeAbsolute(new DirectoryPath(relativePath)).ToString();
}
if (!System.IO.Path.IsPathRooted(repo))
{
return MakeAbsolute(new DirectoryPath(System.IO.Path.Combine(repo,relativePath))).ToString();
}
return System.IO.Path.Combine(repo, relativePath);
}
IEnumerable<string> GetAllProjects()
{
var global = DeserializeJsonFromFile<JObject>(globalPath);
var projs = global["projects"].Select(x => x.ToString());
foreach(var y in projs)
{
yield return MakeFullPath(y);
}
}
IEnumerable<string> GetSourceProjects()
{
return GetAllProjects().Where(x => x.EndsWith("src"));
}
IEnumerable<string> GetTestProjects()
{
return GetAllProjects().Where(x => x.EndsWith("test"));
}
IEnumerable<string> GetFrameworks(string path)
{
var projectJObject = DeserializeJsonFromFile<JObject>(path);
foreach(var prop in ((JObject)projectJObject["frameworks"]).Properties())
{
yield return prop.Name;
}
}
string GetVersion(string path)
{
var projectJObject = DeserializeJsonFromFile<JObject>(path);
return ((JToken)projectJObject["version"]).ToString();
}
IEnumerable<string> GetProjectJsons(IEnumerable<string> projects)
{
foreach(var proj in projects)
{
foreach(var projectJson in GetFiles(proj + "/**/project.json"))
{
yield return MakeFullPath(projectJson.ToString());
}
}
}
bool IsNuGetPublished (FilePath file, string nugetSource)
{
var pkg = new ZipPackage(file.ToString());
var repo = PackageRepositoryFactory.Default.CreateRepository(nugetSource);
var packages = repo.FindPackagesById(pkg.Id);
var version = SemanticVersion.Parse(pkg.Version.ToString());
//Filter the list of packages that are not Release (Stable) versions
var exists = packages.Any (p => p.Version == version);
return exists;
}
//////////////////////////////////////////////////////////////////////
// TASKS
//////////////////////////////////////////////////////////////////////
var tag = Argument("tag", "cake");
Task("Restore")
.Does(() =>
.Does(() =>
{
var settings = new DotNetCoreRestoreSettings
{
Sources = sources,
NoCache = true
};
foreach(var project in GetProjectJsons(GetSourceProjects().Concat(GetTestProjects())))
{
DotNetCoreRestore(project, settings);
}
DotNetCoreRestore(".");
});
Task("Build")
.Does(() =>
.Does(() =>
{
var settings = new DotNetCoreBuildSettings
{
Configuration = "Release"
};
foreach(var project in GetProjectJsons(GetSourceProjects().Concat(GetTestProjects())))
{
foreach(var framework in GetFrameworks(project))
{
Information("Building: {0} on Framework: {1}", project, framework);
Information("========");
settings.Framework = framework;
DotNetCoreBuild(project, settings);
}
}
MSBuild("./sharpcompress.sln", c => c
.SetConfiguration("Release")
.SetVerbosity(Verbosity.Minimal)
.UseToolVersion(MSBuildToolVersion.VS2017));
});
Task("Test")
.Does(() =>
{
var settings = new DotNetCoreTestSettings
.Does(() =>
{
var files = GetFiles("tests/**/*.csproj");
foreach(var file in files)
{
Configuration = "Release",
Verbose = true
};
foreach(var project in GetProjectJsons(GetTestProjects()))
{
settings.Framework = GetFrameworks(project).First();
DotNetCoreTest(project.ToString(), settings);
DotNetCoreTest(file.ToString());
}
}).ReportError(exception =>
{
Error(exception.ToString());
});
Task("Pack")
.Does(() =>
{
if (DirectoryExists(nupkgs))
{
DeleteDirectory(nupkgs, true);
}
CreateDirectory(nupkgs);
var settings = new DotNetCorePackSettings
{
Configuration = "Release",
OutputDirectory = nupkgs
};
foreach(var project in GetProjectJsons(GetSourceProjects()))
{
DotNetCorePack(project, settings);
}
});
Task("Publish")
.IsDependentOn("Restore")
.IsDependentOn("Build")
.IsDependentOn("Test")
.IsDependentOn("Pack")
.Does(() =>
.Does(() =>
{
var packages = GetFiles(nupkgs + "/*.nupkg");
foreach(var package in packages)
{
if (package.ToString().Contains("symbols"))
{
Warning("Skipping Symbols package " + package);
continue;
}
if (IsNuGetPublished(package, sources[1]))
{
throw new InvalidOperationException(package + " is already published.");
}
NuGetPush(package, new NuGetPushSettings{
ApiKey = apiKey,
Verbosity = NuGetVerbosity.Detailed,
Source = publishTarget
});
}
MSBuild("src/SharpCompress/SharpCompress.csproj", c => c
.SetConfiguration("Release")
.SetVerbosity(Verbosity.Minimal)
.UseToolVersion(MSBuildToolVersion.VS2017)
.WithProperty("NoBuild", "true")
.WithTarget("Pack"));
});
//////////////////////////////////////////////////////////////////////
// TASK TARGETS
//////////////////////////////////////////////////////////////////////
Task("Default")
.IsDependentOn("Restore")
.IsDependentOn("Build")
.IsDependentOn("Test")
.IsDependentOn("Pack");
//////////////////////////////////////////////////////////////////////
// EXECUTION
//////////////////////////////////////////////////////////////////////
Task("RunTests")
.IsDependentOn("Restore")
.IsDependentOn("Build");
RunTarget(target);

246
build.ps1
View File

@@ -1,22 +1,41 @@
##########################################################################
# This is the Cake bootstrapper script for PowerShell.
# This file was downloaded from https://github.com/cake-build/resources
# Feel free to change this file to fit your needs.
##########################################################################
<#
.SYNOPSIS
This is a Powershell script to bootstrap a Cake build.
.DESCRIPTION
This Powershell script will download NuGet if missing, restore NuGet tools (including Cake)
and execute your Cake build script with the parameters you provide.
.PARAMETER Script
The build script to execute.
.PARAMETER Target
The build script target to run.
.PARAMETER Configuration
The build configuration to use.
.PARAMETER Verbosity
Specifies the amount of information to be displayed.
.PARAMETER Experimental
Tells Cake to use the latest Roslyn release.
.PARAMETER WhatIf
Performs a dry run of the build script.
No tasks will be executed.
.PARAMETER Mono
Tells Cake to use the Mono scripting engine.
.PARAMETER SkipToolPackageRestore
Skips restoring of packages.
.PARAMETER ScriptArgs
Remaining arguments are added here.
.LINK
http://cakebuild.net
#>
[CmdletBinding()]
@@ -27,104 +46,183 @@ Param(
[string]$Configuration = "Release",
[ValidateSet("Quiet", "Minimal", "Normal", "Verbose", "Diagnostic")]
[string]$Verbosity = "Verbose",
[switch]$Experimental,
[Alias("DryRun","Noop")]
[switch]$WhatIf,
[switch]$Mono,
[switch]$SkipToolPackageRestore,
[Parameter(Position=0,Mandatory=$false,ValueFromRemainingArguments=$true)]
[string[]]$ScriptArgs
)
$CakeVersion = "0.16.1"
$DotNetChannel = "preview";
$DotNetVersion = "1.0.0-preview2-003131";
$DotNetInstallerUri = "https://raw.githubusercontent.com/dotnet/cli/rel/1.0.0-preview2/scripts/obtain/dotnet-install.ps1";
$NugetUrl = "https://dist.nuget.org/win-x86-commandline/latest/nuget.exe"
[Reflection.Assembly]::LoadWithPartialName("System.Security") | Out-Null
function MD5HashFile([string] $filePath)
{
if ([string]::IsNullOrEmpty($filePath) -or !(Test-Path $filePath -PathType Leaf))
{
return $null
}
[System.IO.Stream] $file = $null;
[System.Security.Cryptography.MD5] $md5 = $null;
try
{
$md5 = [System.Security.Cryptography.MD5]::Create()
$file = [System.IO.File]::OpenRead($filePath)
return [System.BitConverter]::ToString($md5.ComputeHash($file))
}
finally
{
if ($file -ne $null)
{
$file.Dispose()
}
}
}
Write-Host "Preparing to run build script..."
if(!$PSScriptRoot){
$PSScriptRoot = Split-Path $MyInvocation.MyCommand.Path -Parent
}
$TOOLS_DIR = Join-Path $PSScriptRoot "tools"
$ADDINS_DIR = Join-Path $TOOLS_DIR "addins"
$MODULES_DIR = Join-Path $TOOLS_DIR "modules"
$NUGET_EXE = Join-Path $TOOLS_DIR "nuget.exe"
$CAKE_EXE = Join-Path $TOOLS_DIR "Cake/Cake.exe"
$NUGET_URL = "https://dist.nuget.org/win-x86-commandline/latest/nuget.exe"
$PACKAGES_CONFIG = Join-Path $TOOLS_DIR "packages.config"
$PACKAGES_CONFIG_MD5 = Join-Path $TOOLS_DIR "packages.config.md5sum"
$ADDINS_PACKAGES_CONFIG = Join-Path $ADDINS_DIR "packages.config"
$MODULES_PACKAGES_CONFIG = Join-Path $MODULES_DIR "packages.config"
# Should we use mono?
$UseMono = "";
if($Mono.IsPresent) {
Write-Verbose -Message "Using the Mono based scripting engine."
$UseMono = "-mono"
}
# Should we use the new Roslyn?
$UseExperimental = "";
if($Experimental.IsPresent -and !($Mono.IsPresent)) {
Write-Verbose -Message "Using experimental version of Roslyn."
$UseExperimental = "-experimental"
}
# Is this a dry run?
$UseDryRun = "";
if($WhatIf.IsPresent) {
$UseDryRun = "-dryrun"
}
# Make sure tools folder exists
$PSScriptRoot = Split-Path $MyInvocation.MyCommand.Path -Parent
$ToolPath = Join-Path $PSScriptRoot "tools"
if (!(Test-Path $ToolPath)) {
Write-Verbose "Creating tools directory..."
New-Item -Path $ToolPath -Type directory | out-null
if ((Test-Path $PSScriptRoot) -and !(Test-Path $TOOLS_DIR)) {
Write-Verbose -Message "Creating tools directory..."
New-Item -Path $TOOLS_DIR -Type directory | out-null
}
###########################################################################
# INSTALL .NET CORE CLI
###########################################################################
Function Remove-PathVariable([string]$VariableToRemove)
{
$path = [Environment]::GetEnvironmentVariable("PATH", "User")
if ($path -ne $null)
{
$newItems = $path.Split(';', [StringSplitOptions]::RemoveEmptyEntries) | Where-Object { "$($_)" -inotlike $VariableToRemove }
[Environment]::SetEnvironmentVariable("PATH", [System.String]::Join(';', $newItems), "User")
}
$path = [Environment]::GetEnvironmentVariable("PATH", "Process")
if ($path -ne $null)
{
$newItems = $path.Split(';', [StringSplitOptions]::RemoveEmptyEntries) | Where-Object { "$($_)" -inotlike $VariableToRemove }
[Environment]::SetEnvironmentVariable("PATH", [System.String]::Join(';', $newItems), "Process")
# Make sure that packages.config exist.
if (!(Test-Path $PACKAGES_CONFIG)) {
Write-Verbose -Message "Downloading packages.config..."
try { (New-Object System.Net.WebClient).DownloadFile("http://cakebuild.net/download/bootstrapper/packages", $PACKAGES_CONFIG) } catch {
Throw "Could not download packages.config."
}
}
# Get .NET Core CLI path if installed.
$FoundDotNetCliVersion = $null;
if (Get-Command dotnet -ErrorAction SilentlyContinue) {
$FoundDotNetCliVersion = dotnet --version;
}
if($FoundDotNetCliVersion -ne $DotNetVersion) {
$InstallPath = Join-Path $PSScriptRoot ".dotnet"
if (!(Test-Path $InstallPath)) {
mkdir -Force $InstallPath | Out-Null;
# Try find NuGet.exe in path if not exists
if (!(Test-Path $NUGET_EXE)) {
Write-Verbose -Message "Trying to find nuget.exe in PATH..."
$existingPaths = $Env:Path -Split ';' | Where-Object { (![string]::IsNullOrEmpty($_)) -and (Test-Path $_ -PathType Container) }
$NUGET_EXE_IN_PATH = Get-ChildItem -Path $existingPaths -Filter "nuget.exe" | Select -First 1
if ($NUGET_EXE_IN_PATH -ne $null -and (Test-Path $NUGET_EXE_IN_PATH.FullName)) {
Write-Verbose -Message "Found in PATH at $($NUGET_EXE_IN_PATH.FullName)."
$NUGET_EXE = $NUGET_EXE_IN_PATH.FullName
}
(New-Object System.Net.WebClient).DownloadFile($DotNetInstallerUri, "$InstallPath\dotnet-install.ps1");
& $InstallPath\dotnet-install.ps1 -Channel $DotNetChannel -Version $DotNetVersion -InstallDir $InstallPath;
Remove-PathVariable "$InstallPath"
$env:PATH = "$InstallPath;$env:PATH"
$env:DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
$env:DOTNET_CLI_TELEMETRY_OPTOUT=1
}
###########################################################################
# INSTALL NUGET
###########################################################################
# Make sure nuget.exe exists.
$NugetPath = Join-Path $ToolPath "nuget.exe"
if (!(Test-Path $NugetPath)) {
Write-Host "Downloading NuGet.exe..."
(New-Object System.Net.WebClient).DownloadFile($NugetUrl, $NugetPath);
# Try download NuGet.exe if not exists
if (!(Test-Path $NUGET_EXE)) {
Write-Verbose -Message "Downloading NuGet.exe..."
try {
(New-Object System.Net.WebClient).DownloadFile($NUGET_URL, $NUGET_EXE)
} catch {
Throw "Could not download NuGet.exe."
}
}
###########################################################################
# INSTALL CAKE
###########################################################################
# Save nuget.exe path to environment to be available to child processed
$ENV:NUGET_EXE = $NUGET_EXE
# Restore tools from NuGet?
if(-Not $SkipToolPackageRestore.IsPresent) {
Push-Location
Set-Location $TOOLS_DIR
# Check for changes in packages.config and remove installed tools if true.
[string] $md5Hash = MD5HashFile($PACKAGES_CONFIG)
if((!(Test-Path $PACKAGES_CONFIG_MD5)) -Or
($md5Hash -ne (Get-Content $PACKAGES_CONFIG_MD5 ))) {
Write-Verbose -Message "Missing or changed package.config hash..."
Remove-Item * -Recurse -Exclude packages.config,nuget.exe
}
Write-Verbose -Message "Restoring tools from NuGet..."
$NuGetOutput = Invoke-Expression "&`"$NUGET_EXE`" install -ExcludeVersion -OutputDirectory `"$TOOLS_DIR`""
# Make sure Cake has been installed.
$CakePath = Join-Path $ToolPath "Cake.$CakeVersion/Cake.exe"
if (!(Test-Path $CakePath)) {
Write-Host "Installing Cake..."
Invoke-Expression "&`"$NugetPath`" install Cake -Version $CakeVersion -OutputDirectory `"$ToolPath`"" | Out-Null;
if ($LASTEXITCODE -ne 0) {
Throw "An error occured while restoring Cake from NuGet."
Throw "An error occured while restoring NuGet tools."
}
else
{
$md5Hash | Out-File $PACKAGES_CONFIG_MD5 -Encoding "ASCII"
}
Write-Verbose -Message ($NuGetOutput | out-string)
Pop-Location
}
###########################################################################
# RUN BUILD SCRIPT
###########################################################################
# Restore addins from NuGet
if (Test-Path $ADDINS_PACKAGES_CONFIG) {
Push-Location
Set-Location $ADDINS_DIR
# Build the argument list.
$Arguments = @{
target=$Target;
configuration=$Configuration;
verbosity=$Verbosity;
dryrun=$WhatIf;
}.GetEnumerator() | %{"--{0}=`"{1}`"" -f $_.key, $_.value };
Write-Verbose -Message "Restoring addins from NuGet..."
$NuGetOutput = Invoke-Expression "&`"$NUGET_EXE`" install -ExcludeVersion -OutputDirectory `"$ADDINS_DIR`""
if ($LASTEXITCODE -ne 0) {
Throw "An error occured while restoring NuGet addins."
}
Write-Verbose -Message ($NuGetOutput | out-string)
Pop-Location
}
# Restore modules from NuGet
if (Test-Path $MODULES_PACKAGES_CONFIG) {
Push-Location
Set-Location $MODULES_DIR
Write-Verbose -Message "Restoring modules from NuGet..."
$NuGetOutput = Invoke-Expression "&`"$NUGET_EXE`" install -ExcludeVersion -OutputDirectory `"$MODULES_DIR`""
if ($LASTEXITCODE -ne 0) {
Throw "An error occured while restoring NuGet modules."
}
Write-Verbose -Message ($NuGetOutput | out-string)
Pop-Location
}
# Make sure that Cake has been installed.
if (!(Test-Path $CAKE_EXE)) {
Throw "Could not find Cake.exe at $CAKE_EXE"
}
# Start Cake
Write-Host "Running build script..."
Invoke-Expression "& `"$CakePath`" `"$Script`" $Arguments $ScriptArgs"
Invoke-Expression "& `"$CAKE_EXE`" `"$Script`" -target=`"$Target`" -configuration=`"$Configuration`" -verbosity=`"$Verbosity`" $UseMono $UseDryRun $UseExperimental $ScriptArgs"
exit $LASTEXITCODE

View File

@@ -1,3 +0,0 @@
{
"projects": ["src","test"]
}

View File

@@ -61,18 +61,12 @@ namespace SharpCompress.Archives
void IArchiveExtractionListener.FireEntryExtractionBegin(IArchiveEntry entry)
{
if (EntryExtractionBegin != null)
{
EntryExtractionBegin(this, new ArchiveExtractionEventArgs<IArchiveEntry>(entry));
}
EntryExtractionBegin?.Invoke(this, new ArchiveExtractionEventArgs<IArchiveEntry>(entry));
}
void IArchiveExtractionListener.FireEntryExtractionEnd(IArchiveEntry entry)
{
if (EntryExtractionEnd != null)
{
EntryExtractionEnd(this, new ArchiveExtractionEventArgs<IArchiveEntry>(entry));
}
EntryExtractionEnd?.Invoke(this, new ArchiveExtractionEventArgs<IArchiveEntry>(entry));
}
private static Stream CheckStreams(Stream stream)
@@ -129,27 +123,21 @@ namespace SharpCompress.Archives
void IExtractionListener.FireCompressedBytesRead(long currentPartCompressedBytes, long compressedReadBytes)
{
if (CompressedBytesRead != null)
CompressedBytesRead?.Invoke(this, new CompressedBytesReadEventArgs
{
CompressedBytesRead(this, new CompressedBytesReadEventArgs
{
CurrentFilePartCompressedBytesRead = currentPartCompressedBytes,
CompressedBytesRead = compressedReadBytes
});
}
CurrentFilePartCompressedBytesRead = currentPartCompressedBytes,
CompressedBytesRead = compressedReadBytes
});
}
void IExtractionListener.FireFilePartExtractionBegin(string name, long size, long compressedSize)
{
if (FilePartExtractionBegin != null)
FilePartExtractionBegin?.Invoke(this, new FilePartExtractionBeginEventArgs
{
FilePartExtractionBegin(this, new FilePartExtractionBeginEventArgs
{
CompressedSize = compressedSize,
Size = size,
Name = name
});
}
CompressedSize = compressedSize,
Size = size,
Name = name
});
}
/// <summary>

View File

@@ -14,6 +14,12 @@ namespace SharpCompress.Archives.GZip
public virtual Stream OpenEntryStream()
{
//this is to reset the stream to be read multiple times
var part = Parts.Single() as GZipFilePart;
if (part.GetRawStream().Position != part.EntryStartPosition)
{
part.GetRawStream().Position = part.EntryStartPosition;
}
return Parts.Single().GetCompressedStream();
}
@@ -21,7 +27,7 @@ namespace SharpCompress.Archives.GZip
public IArchive Archive { get; }
public bool IsComplete { get { return true; } }
public bool IsComplete => true;
#endregion
}

View File

@@ -22,31 +22,31 @@ namespace SharpCompress.Archives.GZip
this.closeStream = closeStream;
}
public override long Crc { get { return 0; } }
public override long Crc => 0;
public override string Key { get; }
public override long CompressedSize { get { return 0; } }
public override long CompressedSize => 0;
public override long Size { get; }
public override DateTime? LastModifiedTime { get; }
public override DateTime? CreatedTime { get { return null; } }
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime { get { return null; } }
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime { get { return null; } }
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted { get { return false; } }
public override bool IsEncrypted => false;
public override bool IsDirectory { get { return false; } }
public override bool IsDirectory => false;
public override bool IsSplit { get { return false; } }
public override bool IsSplit => false;
internal override IEnumerable<FilePart> Parts { get { throw new NotImplementedException(); } }
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
Stream IWritableArchiveEntry.Stream { get { return stream; } }
Stream IWritableArchiveEntry.Stream => stream;
public override Stream OpenEntryStream()
{

View File

@@ -60,7 +60,7 @@ namespace SharpCompress.Archives.Rar
return RarReader.Open(stream, ReaderOptions);
}
public override bool IsSolid { get { return Volumes.First().IsSolidArchive; } }
public override bool IsSolid => Volumes.First().IsSolidArchive;
#region Creation

View File

@@ -20,13 +20,13 @@ namespace SharpCompress.Archives.Rar
this.archive = archive;
}
public override CompressionType CompressionType { get { return CompressionType.Rar; } }
public override CompressionType CompressionType => CompressionType.Rar;
public IArchive Archive { get { return archive; } }
public IArchive Archive => archive;
internal override IEnumerable<FilePart> Parts { get { return parts.Cast<FilePart>(); } }
internal override IEnumerable<FilePart> Parts => parts.Cast<FilePart>();
internal override FileHeader FileHeader { get { return parts.First().FileHeader; } }
internal override FileHeader FileHeader => parts.First().FileHeader;
public override long Crc
{

View File

@@ -28,6 +28,6 @@ namespace SharpCompress.Archives.Rar
return stream;
}
internal override string FilePartName { get { return "Unknown Stream - File Entry: " + FileHeader.FileName; } }
internal override string FilePartName => "Unknown Stream - File Entry: " + FileHeader.FileName;
}
}

View File

@@ -106,10 +106,7 @@ namespace SharpCompress.Archives.SevenZip
for (int i = 0; i < database.Files.Count; i++)
{
var file = database.Files[i];
if (!file.IsDir)
{
yield return new SevenZipArchiveEntry(this, new SevenZipFilePart(stream, database, i, file));
}
yield return new SevenZipArchiveEntry(this, new SevenZipFilePart(stream, database, i, file));
}
}
@@ -174,7 +171,7 @@ namespace SharpCompress.Archives.SevenZip
this.archive = archive;
}
public override SevenZipVolume Volume { get { return archive.Volumes.Single(); } }
public override SevenZipVolume Volume => archive.Volumes.Single();
internal override IEnumerable<SevenZipEntry> GetEntries(Stream stream)
{
@@ -209,4 +206,4 @@ namespace SharpCompress.Archives.SevenZip
}
}
}
}
}

View File

@@ -18,11 +18,11 @@ namespace SharpCompress.Archives.SevenZip
public IArchive Archive { get; }
public bool IsComplete { get { return true; } }
public bool IsComplete => true;
/// <summary>
/// This is a 7Zip Anti item
/// </summary>
public bool IsAnti { get { return FilePart.Header.IsAnti; } }
public bool IsAnti => FilePart.Header.IsAnti;
}
}

View File

@@ -22,7 +22,7 @@ namespace SharpCompress.Archives.Tar
public IArchive Archive { get; }
public bool IsComplete { get { return true; } }
public bool IsComplete => true;
#endregion
}

View File

@@ -22,30 +22,30 @@ namespace SharpCompress.Archives.Tar
this.closeStream = closeStream;
}
public override long Crc { get { return 0; } }
public override long Crc => 0;
public override string Key { get; }
public override long CompressedSize { get { return 0; } }
public override long CompressedSize => 0;
public override long Size { get; }
public override DateTime? LastModifiedTime { get; }
public override DateTime? CreatedTime { get { return null; } }
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime { get { return null; } }
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime { get { return null; } }
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted { get { return false; } }
public override bool IsEncrypted => false;
public override bool IsDirectory { get { return false; } }
public override bool IsDirectory => false;
public override bool IsSplit { get { return false; } }
public override bool IsSplit => false;
internal override IEnumerable<FilePart> Parts { get { throw new NotImplementedException(); } }
Stream IWritableArchiveEntry.Stream { get { return stream; } }
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
Stream IWritableArchiveEntry.Stream => stream;
public override Stream OpenEntryStream()
{

View File

@@ -21,10 +21,10 @@ namespace SharpCompress.Archives.Zip
public IArchive Archive { get; }
public bool IsComplete { get { return true; } }
public bool IsComplete => true;
#endregion
public string Comment { get { return (Parts.Single() as SeekableZipFilePart).Comment; } }
public string Comment => (Parts.Single() as SeekableZipFilePart).Comment;
}
}

View File

@@ -23,31 +23,31 @@ namespace SharpCompress.Archives.Zip
this.closeStream = closeStream;
}
public override long Crc { get { return 0; } }
public override long Crc => 0;
public override string Key { get; }
public override long CompressedSize { get { return 0; } }
public override long CompressedSize => 0;
public override long Size { get; }
public override DateTime? LastModifiedTime { get; }
public override DateTime? CreatedTime { get { return null; } }
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime { get { return null; } }
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime { get { return null; } }
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted { get { return false; } }
public override bool IsEncrypted => false;
public override bool IsDirectory { get { return false; } }
public override bool IsDirectory => false;
public override bool IsSplit { get { return false; } }
public override bool IsSplit => false;
internal override IEnumerable<FilePart> Parts { get { throw new NotImplementedException(); } }
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
Stream IWritableArchiveEntry.Stream { get { return stream; } }
Stream IWritableArchiveEntry.Stream => stream;
public override Stream OpenEntryStream()
{

View File

@@ -4,6 +4,22 @@ using System.Runtime.CompilerServices;
[assembly: AssemblyTitle("SharpCompress")]
[assembly: AssemblyProduct("SharpCompress")]
[assembly: InternalsVisibleTo("SharpCompress.Test")]
[assembly: InternalsVisibleTo("SharpCompress.Test.Portable")]
[assembly: CLSCompliant(true)]
[assembly: InternalsVisibleTo("SharpCompress.Test" + SharpCompress.AssemblyInfo.PublicKeySuffix)]
[assembly: InternalsVisibleTo("SharpCompress.Test.Portable" + SharpCompress.AssemblyInfo.PublicKeySuffix)]
[assembly: CLSCompliant(true)]
namespace SharpCompress
{
/// <summary>
/// Just a static class to house the public key, to avoid repetition.
/// </summary>
internal static class AssemblyInfo
{
internal const string PublicKeySuffix =
",PublicKey=002400000480000094000000060200000024000052534131000400000100010059acfa17d26c44" +
"7a4d03f16eaa72c9187c04f16e6569dd168b080e39a6f5c9fd00f28c768cd8e9a089d5a0e1b34c" +
"cd971488e7afe030ce5ce8df2053cf12ec89f6d38065c434c09ee6af3ee284c5dc08f44774b679" +
"bf39298e57efe30d4b00aecf9e4f6f8448b2cb0146d8956dfcab606cc64a0ac38c60a7d78b0d65" +
"d3b98dc0";
}
}

View File

@@ -75,6 +75,6 @@ namespace SharpCompress.Common
/// <summary>
/// Entry file attribute.
/// </summary>
public virtual int? Attrib { get { throw new NotImplementedException(); } }
public virtual int? Attrib => throw new NotImplementedException();
}
}

View File

@@ -44,20 +44,20 @@ namespace SharpCompress.Common
stream.Dispose();
}
public override bool CanRead { get { return true; } }
public override bool CanRead => true;
public override bool CanSeek { get { return false; } }
public override bool CanSeek => false;
public override bool CanWrite { get { return false; } }
public override bool CanWrite => false;
public override void Flush()
{
throw new NotSupportedException();
}
public override long Length { get { throw new NotSupportedException(); } }
public override long Length => throw new NotSupportedException();
public override long Position { get { throw new NotSupportedException(); } set { throw new NotSupportedException(); } }
public override long Position { get => throw new NotSupportedException(); set => throw new NotSupportedException(); }
public override int Read(byte[] buffer, int offset, int count)
{

View File

@@ -13,31 +13,31 @@ namespace SharpCompress.Common.GZip
this.filePart = filePart;
}
public override CompressionType CompressionType { get { return CompressionType.GZip; } }
public override CompressionType CompressionType => CompressionType.GZip;
public override long Crc { get { return 0; } }
public override long Crc => 0;
public override string Key { get { return filePart.FilePartName; } }
public override string Key => filePart.FilePartName;
public override long CompressedSize { get { return 0; } }
public override long CompressedSize => 0;
public override long Size { get { return 0; } }
public override long Size => 0;
public override DateTime? LastModifiedTime { get { return filePart.DateModified; } }
public override DateTime? LastModifiedTime => filePart.DateModified;
public override DateTime? CreatedTime { get { return null; } }
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime { get { return null; } }
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime { get { return null; } }
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted { get { return false; } }
public override bool IsEncrypted => false;
public override bool IsDirectory { get { return false; } }
public override bool IsDirectory => false;
public override bool IsSplit { get { return false; } }
public override bool IsSplit => false;
internal override IEnumerable<FilePart> Parts { get { return filePart.AsEnumerable<FilePart>(); } }
internal override IEnumerable<FilePart> Parts => filePart.AsEnumerable<FilePart>();
internal static IEnumerable<GZipEntry> GetEntries(Stream stream)
{

View File

@@ -16,12 +16,15 @@ namespace SharpCompress.Common.GZip
internal GZipFilePart(Stream stream)
{
ReadAndValidateGzipHeader(stream);
EntryStartPosition = stream.Position;
this.stream = stream;
}
internal long EntryStartPosition { get; }
internal DateTime? DateModified { get; private set; }
internal override string FilePartName { get { return name; } }
internal override string FilePartName => name;
internal override Stream GetCompressedStream()
{

View File

@@ -18,8 +18,8 @@ namespace SharpCompress.Common.GZip
}
#endif
public override bool IsFirstVolume { get { return true; } }
public override bool IsFirstVolume => true;
public override bool IsMultiVolume { get { return true; } }
public override bool IsMultiVolume => true;
}
}

View File

@@ -17,7 +17,7 @@ namespace SharpCompress.Common.Rar.Headers
}
}
internal ArchiveFlags ArchiveHeaderFlags { get { return (ArchiveFlags)Flags; } }
internal ArchiveFlags ArchiveHeaderFlags => (ArchiveFlags)Flags;
internal short HighPosAv { get; private set; }
@@ -25,6 +25,6 @@ namespace SharpCompress.Common.Rar.Headers
internal byte EncryptionVersion { get; private set; }
public bool HasPassword { get { return ArchiveHeaderFlags.HasFlag(ArchiveFlags.PASSWORD); } }
public bool HasPassword => ArchiveHeaderFlags.HasFlag(ArchiveFlags.PASSWORD);
}
}

View File

@@ -16,7 +16,7 @@ namespace SharpCompress.Common.Rar.Headers
}
}
internal EndArchiveFlags EndArchiveFlags { get { return (EndArchiveFlags)Flags; } }
internal EndArchiveFlags EndArchiveFlags => (EndArchiveFlags)Flags;
internal int? ArchiveCRC { get; private set; }

View File

@@ -208,7 +208,7 @@ namespace SharpCompress.Common.Rar.Headers
internal int FileAttributes { get; private set; }
internal FileFlags FileFlags { get { return (FileFlags)Flags; } }
internal FileFlags FileFlags => (FileFlags)Flags;
internal long CompressedSize { get; private set; }
internal long UncompressedSize { get; private set; }

View File

@@ -13,7 +13,7 @@ namespace SharpCompress.Common.Rar.Headers
Mark = reader.ReadBytes(8);
}
internal uint DataSize { get { return AdditionalSize; } }
internal uint DataSize => AdditionalSize;
internal byte Version { get; private set; }
internal ushort RecSectors { get; private set; }
internal uint TotalBlocks { get; private set; }

View File

@@ -1,4 +1,5 @@
using System.IO;
using System;
using System.IO;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
@@ -18,14 +19,14 @@ namespace SharpCompress.Common.Rar.Headers
ReadBytes = baseHeader.ReadBytes;
}
internal static RarHeader Create(MarkingBinaryReader reader)
internal static RarHeader Create(RarCrcBinaryReader reader)
{
try
{
RarHeader header = new RarHeader();
reader.Mark();
header.ReadFromReader(reader);
header.ReadStartFromReader(reader);
header.ReadBytes += reader.CurrentReadByteCount;
return header;
@@ -36,9 +37,10 @@ namespace SharpCompress.Common.Rar.Headers
}
}
protected virtual void ReadFromReader(MarkingBinaryReader reader)
private void ReadStartFromReader(RarCrcBinaryReader reader)
{
HeadCRC = reader.ReadInt16();
HeadCRC = reader.ReadUInt16();
reader.ResetCrc();
HeaderType = (HeaderType)(reader.ReadByte() & 0xff);
Flags = reader.ReadInt16();
HeaderSize = reader.ReadInt16();
@@ -48,7 +50,11 @@ namespace SharpCompress.Common.Rar.Headers
}
}
internal T PromoteHeader<T>(MarkingBinaryReader reader)
protected virtual void ReadFromReader(MarkingBinaryReader reader) {
throw new NotImplementedException();
}
internal T PromoteHeader<T>(RarCrcBinaryReader reader)
where T : RarHeader, new()
{
T header = new T();
@@ -65,9 +71,21 @@ namespace SharpCompress.Common.Rar.Headers
reader.ReadBytes(headerSizeDiff);
}
VerifyHeaderCrc(reader.GetCrc());
return header;
}
private void VerifyHeaderCrc(ushort crc) {
if (HeaderType != HeaderType.MarkHeader)
{
if (crc != HeadCRC)
{
throw new InvalidFormatException("rar header crc mismatch");
}
}
}
protected virtual void PostReadingBytes(MarkingBinaryReader reader)
{
}
@@ -77,7 +95,7 @@ namespace SharpCompress.Common.Rar.Headers
/// </summary>
protected long ReadBytes { get; private set; }
protected short HeadCRC { get; private set; }
protected ushort HeadCRC { get; private set; }
internal HeaderType HeaderType { get; private set; }

View File

@@ -129,7 +129,7 @@ namespace SharpCompress.Common.Rar.Headers
reader.InitializeAes(salt);
}
#else
var reader = new MarkingBinaryReader(stream);
var reader = new RarCrcBinaryReader(stream);
#endif
@@ -247,4 +247,4 @@ namespace SharpCompress.Common.Rar.Headers
}
}
}
}
}

View File

@@ -0,0 +1,40 @@
using System.IO;
using SharpCompress.Compressors.Rar;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar {
internal class RarCrcBinaryReader : MarkingBinaryReader {
private uint currentCrc;
public RarCrcBinaryReader(Stream stream) : base(stream)
{
}
public ushort GetCrc()
{
return (ushort)~currentCrc;
}
public void ResetCrc()
{
currentCrc = 0xffffffff;
}
protected void UpdateCrc(byte b)
{
currentCrc = RarCRC.CheckCrc(currentCrc, b);
}
protected byte[] ReadBytesNoCrc(int count)
{
return base.ReadBytes(count);
}
public override byte[] ReadBytes(int count)
{
var result = base.ReadBytes(count);
currentCrc = RarCRC.CheckCrc(currentCrc, result, 0, result.Length);
return result;
}
}
}

View File

@@ -6,12 +6,13 @@ using SharpCompress.IO;
namespace SharpCompress.Common.Rar
{
internal class RarCryptoBinaryReader : MarkingBinaryReader
internal class RarCryptoBinaryReader : RarCrcBinaryReader
{
private RarRijndael rijndael;
private byte[] salt;
private readonly string password;
private readonly Queue<byte> data = new Queue<byte>();
private long readCount;
public RarCryptoBinaryReader(Stream stream, string password )
: base(stream)
@@ -19,6 +20,22 @@ namespace SharpCompress.Common.Rar
this.password = password;
}
// track read count ourselves rather than using the underlying stream since we buffer
public override long CurrentReadByteCount {
get
{
return this.readCount;
}
protected set
{
// ignore
}
}
public override void Mark() {
this.readCount = 0;
}
protected bool UseEncryption
{
get { return salt != null; }
@@ -36,6 +53,7 @@ namespace SharpCompress.Common.Rar
{
return ReadAndDecryptBytes(count);
}
this.readCount += count;
return base.ReadBytes(count);
}
@@ -50,7 +68,7 @@ namespace SharpCompress.Common.Rar
for (int i = 0; i < alignedSize / 16; i++)
{
//long ax = System.currentTimeMillis();
byte[] cipherText = base.ReadBytes(16);
byte[] cipherText = base.ReadBytesNoCrc(16);
var readBytes = rijndael.ProcessBlock(cipherText);
foreach (var readByte in readBytes)
data.Enqueue(readByte);
@@ -63,8 +81,11 @@ namespace SharpCompress.Common.Rar
for (int i = 0; i < count; i++)
{
decryptedBytes[i] = data.Dequeue();
var b = data.Dequeue();
decryptedBytes[i] = b;
UpdateCrc(b);
}
this.readCount += count;
return decryptedBytes;
}

View File

@@ -10,44 +10,44 @@ namespace SharpCompress.Common.Rar
/// <summary>
/// The File's 32 bit CRC Hash
/// </summary>
public override long Crc { get { return FileHeader.FileCRC; } }
public override long Crc => FileHeader.FileCRC;
/// <summary>
/// The path of the file internal to the Rar Archive.
/// </summary>
public override string Key { get { return FileHeader.FileName; } }
public override string Key => FileHeader.FileName;
/// <summary>
/// The entry last modified time in the archive, if recorded
/// </summary>
public override DateTime? LastModifiedTime { get { return FileHeader.FileLastModifiedTime; } }
public override DateTime? LastModifiedTime => FileHeader.FileLastModifiedTime;
/// <summary>
/// The entry create time in the archive, if recorded
/// </summary>
public override DateTime? CreatedTime { get { return FileHeader.FileCreatedTime; } }
public override DateTime? CreatedTime => FileHeader.FileCreatedTime;
/// <summary>
/// The entry last accessed time in the archive, if recorded
/// </summary>
public override DateTime? LastAccessedTime { get { return FileHeader.FileLastAccessedTime; } }
public override DateTime? LastAccessedTime => FileHeader.FileLastAccessedTime;
/// <summary>
/// The entry time whend archived, if recorded
/// </summary>
public override DateTime? ArchivedTime { get { return FileHeader.FileArchivedTime; } }
public override DateTime? ArchivedTime => FileHeader.FileArchivedTime;
/// <summary>
/// Entry is password protected and encrypted and cannot be extracted.
/// </summary>
public override bool IsEncrypted { get { return FileHeader.FileFlags.HasFlag(FileFlags.PASSWORD); } }
public override bool IsEncrypted => FileHeader.FileFlags.HasFlag(FileFlags.PASSWORD);
/// <summary>
/// Entry is password protected and encrypted and cannot be extracted.
/// </summary>
public override bool IsDirectory { get { return FileHeader.FileFlags.HasFlag(FileFlags.DIRECTORY); } }
public override bool IsDirectory => FileHeader.FileFlags.HasFlag(FileFlags.DIRECTORY);
public override bool IsSplit { get { return FileHeader.FileFlags.HasFlag(FileFlags.SPLIT_AFTER); } }
public override bool IsSplit => FileHeader.FileFlags.HasFlag(FileFlags.SPLIT_AFTER);
public override string ToString()
{

View File

@@ -21,7 +21,7 @@ namespace SharpCompress.Common.Rar
headerFactory = new RarHeaderFactory(mode, options);
}
internal StreamingMode Mode { get { return headerFactory.StreamingMode; } }
internal StreamingMode Mode => headerFactory.StreamingMode;
internal abstract IEnumerable<RarFilePart> ReadFileParts();

View File

@@ -1,14 +1,17 @@
using System;
using SharpCompress.Readers;
namespace SharpCompress.Common
{
public class ReaderExtractionEventArgs<T> : EventArgs
{
internal ReaderExtractionEventArgs(T entry)
internal ReaderExtractionEventArgs(T entry, ReaderProgress readerProgress = null)
{
Item = entry;
ReaderProgress = readerProgress;
}
public T Item { get; private set; }
public ReaderProgress ReaderProgress { get; private set; }
}
}

View File

@@ -1339,20 +1339,20 @@ namespace SharpCompress.Common.SevenZip
#region Stream
public override bool CanRead { get { return true; } }
public override bool CanRead => true;
public override bool CanSeek { get { return false; } }
public override bool CanSeek => false;
public override bool CanWrite { get { return false; } }
public override bool CanWrite => false;
public override void Flush()
{
throw new NotSupportedException();
}
public override long Length { get { throw new NotSupportedException(); } }
public override long Length => throw new NotSupportedException();
public override long Position { get { throw new NotSupportedException(); } set { throw new NotSupportedException(); } }
public override long Position { get => throw new NotSupportedException(); set => throw new NotSupportedException(); }
public override int Read(byte[] buffer, int offset, int count)
{

View File

@@ -12,9 +12,9 @@ namespace SharpCompress.Common.SevenZip
public bool HasStream { get; internal set; }
public bool IsDir { get; internal set; }
public bool CrcDefined { get { return Crc != null; } }
public bool CrcDefined => Crc != null;
public bool AttribDefined { get { return Attrib != null; } }
public bool AttribDefined => Attrib != null;
public void SetAttrib(uint attrib)
{

View File

@@ -13,7 +13,7 @@ namespace SharpCompress.Common.SevenZip
internal List<long> UnpackSizes = new List<long>();
internal uint? UnpackCRC;
internal bool UnpackCRCDefined { get { return UnpackCRC != null; } }
internal bool UnpackCRCDefined => UnpackCRC != null;
public long GetUnpackSize()
{

View File

@@ -12,32 +12,32 @@ namespace SharpCompress.Common.SevenZip
internal SevenZipFilePart FilePart { get; }
public override CompressionType CompressionType { get { return FilePart.CompressionType; } }
public override CompressionType CompressionType => FilePart.CompressionType;
public override long Crc { get { return FilePart.Header.Crc ?? 0; } }
public override long Crc => FilePart.Header.Crc ?? 0;
public override string Key { get { return FilePart.Header.Name; } }
public override string Key => FilePart.Header.Name;
public override long CompressedSize { get { return 0; } }
public override long CompressedSize => 0;
public override long Size { get { return FilePart.Header.Size; } }
public override long Size => FilePart.Header.Size;
public override DateTime? LastModifiedTime { get { return FilePart.Header.MTime; } }
public override DateTime? LastModifiedTime => FilePart.Header.MTime;
public override DateTime? CreatedTime { get { return null; } }
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime { get { return null; } }
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime { get { return null; } }
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted { get { return false; } }
public override bool IsEncrypted => false;
public override bool IsDirectory { get { return FilePart.Header.IsDir; } }
public override bool IsDirectory => FilePart.Header.IsDir;
public override bool IsSplit { get { return false; } }
public override bool IsSplit => false;
public override int? Attrib { get { return (int)FilePart.Header.Attrib; } }
public override int? Attrib => (int)FilePart.Header.Attrib;
internal override IEnumerable<FilePart> Parts { get { return FilePart.AsEnumerable<FilePart>(); } }
internal override IEnumerable<FilePart> Parts => FilePart.AsEnumerable<FilePart>();
}
}

View File

@@ -28,7 +28,7 @@ namespace SharpCompress.Common.SevenZip
internal CFolder Folder { get; }
internal int Index { get; }
internal override string FilePartName { get { return Header.Name; } }
internal override string FilePartName => Header.Name;
internal override Stream GetRawStream()
{

View File

@@ -18,29 +18,29 @@ namespace SharpCompress.Common.Tar
public override CompressionType CompressionType { get; }
public override long Crc { get { return 0; } }
public override long Crc => 0;
public override string Key { get { return filePart.Header.Name; } }
public override string Key => filePart.Header.Name;
public override long CompressedSize { get { return filePart.Header.Size; } }
public override long CompressedSize => filePart.Header.Size;
public override long Size { get { return filePart.Header.Size; } }
public override long Size => filePart.Header.Size;
public override DateTime? LastModifiedTime { get { return filePart.Header.LastModifiedTime; } }
public override DateTime? LastModifiedTime => filePart.Header.LastModifiedTime;
public override DateTime? CreatedTime { get { return null; } }
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime { get { return null; } }
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime { get { return null; } }
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted { get { return false; } }
public override bool IsEncrypted => false;
public override bool IsDirectory { get { return filePart.Header.EntryType == EntryType.Directory; } }
public override bool IsDirectory => filePart.Header.EntryType == EntryType.Directory;
public override bool IsSplit { get { return false; } }
public override bool IsSplit => false;
internal override IEnumerable<FilePart> Parts { get { return filePart.AsEnumerable<FilePart>(); } }
internal override IEnumerable<FilePart> Parts => filePart.AsEnumerable<FilePart>();
internal static IEnumerable<TarEntry> GetEntries(StreamingMode mode, Stream stream,
CompressionType compressionType)

View File

@@ -16,7 +16,7 @@ namespace SharpCompress.Common.Tar
internal TarHeader Header { get; }
internal override string FilePartName { get { return Header.Name; } }
internal override string FilePartName => Header.Name;
internal override Stream GetCompressedStream()
{

View File

@@ -42,20 +42,20 @@ namespace SharpCompress.Common.Tar
public Stream Stream { get; }
public override bool CanRead { get { return true; } }
public override bool CanRead => true;
public override bool CanSeek { get { return false; } }
public override bool CanSeek => false;
public override bool CanWrite { get { return false; } }
public override bool CanWrite => false;
public override void Flush()
{
throw new NotSupportedException();
}
public override long Length { get { throw new NotSupportedException(); } }
public override long Length => throw new NotSupportedException();
public override long Position { get { throw new NotSupportedException(); } set { throw new NotSupportedException(); } }
public override long Position { get => throw new NotSupportedException(); set => throw new NotSupportedException(); }
public override int Read(byte[] buffer, int offset, int count)
{

View File

@@ -14,7 +14,7 @@ namespace SharpCompress.Common
ReaderOptions = readerOptions;
}
internal Stream Stream { get { return new NonDisposingStream(actualStream); } }
internal Stream Stream => new NonDisposingStream(actualStream);
protected ReaderOptions ReaderOptions { get; }
@@ -22,12 +22,12 @@ namespace SharpCompress.Common
/// RarArchive is the first volume of a multi-part archive.
/// Only Rar 3.0 format and higher
/// </summary>
public virtual bool IsFirstVolume { get { return true; } }
public virtual bool IsFirstVolume => true;
/// <summary>
/// RarArchive is part of a multi-part archive.
/// </summary>
public virtual bool IsMultiVolume { get { return true; } }
public virtual bool IsMultiVolume => true;
private bool disposed;

View File

@@ -48,5 +48,9 @@ namespace SharpCompress.Common.Zip.Headers
public byte[] Comment { get; private set; }
public ushort TotalNumberOfEntries { get; private set; }
public bool IsZip64 => TotalNumberOfEntriesInDisk == ushort.MaxValue
|| DirectorySize == uint.MaxValue
|| DirectoryStartOffsetRelativeToDisk == uint.MaxValue;
}
}

View File

@@ -1,4 +1,5 @@
using System.IO;
using System;
using System.IO;
using System.Linq;
namespace SharpCompress.Common.Zip.Headers
@@ -41,10 +42,31 @@ namespace SharpCompress.Common.Zip.Headers
{
Name = ((ExtraUnicodePathExtraField)unicodePathExtra).UnicodeName;
}
var zip64ExtraData = Extra.OfType<Zip64ExtendedInformationExtraField>().FirstOrDefault();
if (zip64ExtraData != null)
{
if (CompressedSize == uint.MaxValue)
{
CompressedSize = zip64ExtraData.CompressedSize;
}
if (UncompressedSize == uint.MaxValue)
{
UncompressedSize = zip64ExtraData.UncompressedSize;
}
if (RelativeOffsetOfEntryHeader == uint.MaxValue)
{
RelativeOffsetOfEntryHeader = zip64ExtraData.RelativeOffsetOfEntryHeader;
}
}
}
internal override void Write(BinaryWriter writer)
{
var zip64 = CompressedSize >= uint.MaxValue || UncompressedSize >= uint.MaxValue || RelativeOffsetOfEntryHeader >= uint.MaxValue;
if (zip64)
Version = (ushort)(Version > 45 ? Version : 45);
writer.Write(Version);
writer.Write(VersionNeededToExtract);
writer.Write((ushort)Flags);
@@ -52,24 +74,40 @@ namespace SharpCompress.Common.Zip.Headers
writer.Write(LastModifiedTime);
writer.Write(LastModifiedDate);
writer.Write(Crc);
writer.Write(CompressedSize);
writer.Write(UncompressedSize);
writer.Write(zip64 ? uint.MaxValue : CompressedSize);
writer.Write(zip64 ? uint.MaxValue : UncompressedSize);
byte[] nameBytes = EncodeString(Name);
writer.Write((ushort)nameBytes.Length);
//writer.Write((ushort)Extra.Length);
writer.Write((ushort)0);
if (zip64)
{
writer.Write((ushort)(2 + 2 + 8 + 8 + 8 + 4));
}
else
{
//writer.Write((ushort)Extra.Length);
writer.Write((ushort)0);
}
writer.Write((ushort)Comment.Length);
writer.Write(DiskNumberStart);
writer.Write(InternalFileAttributes);
writer.Write(ExternalFileAttributes);
writer.Write(RelativeOffsetOfEntryHeader);
writer.Write(zip64 ? uint.MaxValue : RelativeOffsetOfEntryHeader);
writer.Write(nameBytes);
// writer.Write(Extra);
if (zip64)
{
writer.Write((ushort)0x0001);
writer.Write((ushort)((8 + 8 + 8 + 4)));
writer.Write((ulong)UncompressedSize);
writer.Write((ulong)CompressedSize);
writer.Write((ulong)RelativeOffsetOfEntryHeader);
writer.Write((uint)0); // VolumeNumber = 0
}
writer.Write(Comment);
}
@@ -77,7 +115,7 @@ namespace SharpCompress.Common.Zip.Headers
public ushort VersionNeededToExtract { get; set; }
public uint RelativeOffsetOfEntryHeader { get; set; }
public long RelativeOffsetOfEntryHeader { get; set; }
public uint ExternalFileAttributes { get; set; }

View File

@@ -32,29 +32,69 @@ namespace SharpCompress.Common.Zip.Headers
{
Name = ((ExtraUnicodePathExtraField)unicodePathExtra).UnicodeName;
}
var zip64ExtraData = Extra.OfType<Zip64ExtendedInformationExtraField>().FirstOrDefault();
if (zip64ExtraData != null)
{
if (CompressedSize == uint.MaxValue)
{
CompressedSize = zip64ExtraData.CompressedSize;
}
if (UncompressedSize == uint.MaxValue)
{
UncompressedSize = zip64ExtraData.UncompressedSize;
}
}
}
internal override void Write(BinaryWriter writer)
{
if (IsZip64)
Version = (ushort)(Version > 45 ? Version : 45);
writer.Write(Version);
writer.Write((ushort)Flags);
writer.Write((ushort)CompressionMethod);
writer.Write(LastModifiedTime);
writer.Write(LastModifiedDate);
writer.Write(Crc);
writer.Write(CompressedSize);
writer.Write(UncompressedSize);
if (IsZip64)
{
writer.Write(uint.MaxValue);
writer.Write(uint.MaxValue);
}
else
{
writer.Write(CompressedSize);
writer.Write(UncompressedSize);
}
byte[] nameBytes = EncodeString(Name);
writer.Write((ushort)nameBytes.Length);
writer.Write((ushort)0);
if (IsZip64)
{
writer.Write((ushort)(2 + 2 + (2 * 8)));
}
else
{
writer.Write((ushort)0);
}
//if (Extra != null)
//{
// writer.Write(Extra);
//}
writer.Write(nameBytes);
if (IsZip64)
{
writer.Write((ushort)0x0001);
writer.Write((ushort)(2 * 8));
writer.Write((ulong)CompressedSize);
writer.Write((ulong)UncompressedSize);
}
}
internal ushort Version { get; private set; }

View File

@@ -1,5 +1,6 @@
using System;
using System.Text;
using SharpCompress.Converters;
namespace SharpCompress.Common.Zip.Headers
{
@@ -11,7 +12,8 @@ namespace SharpCompress.Common.Zip.Headers
// Third Party Mappings
// -Info-ZIP Unicode Path Extra Field
UnicodePathExtraField = 0x7075
UnicodePathExtraField = 0x7075,
Zip64ExtendedInformationExtraField = 0x0001
}
internal class ExtraData
@@ -23,7 +25,7 @@ namespace SharpCompress.Common.Zip.Headers
internal class ExtraUnicodePathExtraField : ExtraData
{
internal byte Version { get { return DataBytes[0]; } }
internal byte Version => DataBytes[0];
internal byte[] NameCRC32
{
@@ -47,6 +49,73 @@ namespace SharpCompress.Common.Zip.Headers
}
}
internal class Zip64ExtendedInformationExtraField : ExtraData
{
public Zip64ExtendedInformationExtraField(ExtraDataType type, ushort length, byte[] dataBytes)
{
Type = type;
Length = length;
DataBytes = dataBytes;
Process();
}
//From the spec values are only in the extradata if the standard
//value is set to 0xFFFF, but if one of the sizes are present, both are.
//Hence if length == 4 volume only
// if length == 8 offset only
// if length == 12 offset + volume
// if length == 16 sizes only
// if length == 20 sizes + volume
// if length == 24 sizes + offset
// if length == 28 everything.
//It is unclear how many of these are used in the wild.
private void Process()
{
switch (DataBytes.Length)
{
case 4:
VolumeNumber = DataConverter.LittleEndian.GetUInt32(DataBytes, 0);
return;
case 8:
RelativeOffsetOfEntryHeader = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
return;
case 12:
RelativeOffsetOfEntryHeader = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
VolumeNumber = DataConverter.LittleEndian.GetUInt32(DataBytes, 8);
return;
case 16:
UncompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
CompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 8);
return;
case 20:
UncompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
CompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 8);
VolumeNumber = DataConverter.LittleEndian.GetUInt32(DataBytes, 16);
return;
case 24:
UncompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
CompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 8);
RelativeOffsetOfEntryHeader = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 16);
return;
case 28:
UncompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
CompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 8);
RelativeOffsetOfEntryHeader = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 16);
VolumeNumber = DataConverter.LittleEndian.GetUInt32(DataBytes, 24);
return;
default:
throw new ArchiveException("Unexpected size of of Zip64 extended information extra field");
}
}
public long UncompressedSize { get; private set; }
public long CompressedSize { get; private set; }
public long RelativeOffsetOfEntryHeader { get; private set; }
public uint VolumeNumber { get; private set; }
}
internal static class LocalEntryHeaderExtraFactory
{
internal static ExtraData Create(ExtraDataType type, ushort length, byte[] extraData)
@@ -60,6 +129,13 @@ namespace SharpCompress.Common.Zip.Headers
Length = length,
DataBytes = extraData
};
case ExtraDataType.Zip64ExtendedInformationExtraField:
return new Zip64ExtendedInformationExtraField
(
type,
length,
extraData
);
default:
return new ExtraData
{

View File

@@ -0,0 +1,54 @@
using System;
using System.IO;
namespace SharpCompress.Common.Zip.Headers
{
internal class Zip64DirectoryEndHeader : ZipHeader
{
public Zip64DirectoryEndHeader()
: base(ZipHeaderType.Zip64DirectoryEnd)
{
}
internal override void Read(BinaryReader reader)
{
SizeOfDirectoryEndRecord = (long)reader.ReadUInt64();
VersionMadeBy = reader.ReadUInt16();
VersionNeededToExtract = reader.ReadUInt16();
VolumeNumber = reader.ReadUInt32();
FirstVolumeWithDirectory = reader.ReadUInt32();
TotalNumberOfEntriesInDisk = (long)reader.ReadUInt64();
TotalNumberOfEntries = (long)reader.ReadUInt64();
DirectorySize = (long)reader.ReadUInt64();
DirectoryStartOffsetRelativeToDisk = (long)reader.ReadUInt64();
DataSector = reader.ReadBytes((int)(SizeOfDirectoryEndRecord - SizeOfFixedHeaderDataExceptSignatureAndSizeFields));
}
const int SizeOfFixedHeaderDataExceptSignatureAndSizeFields = 44;
internal override void Write(BinaryWriter writer)
{
throw new NotImplementedException();
}
public long SizeOfDirectoryEndRecord { get; private set; }
public ushort VersionMadeBy { get; private set; }
public ushort VersionNeededToExtract { get; private set; }
public uint VolumeNumber { get; private set; }
public uint FirstVolumeWithDirectory { get; private set; }
public long TotalNumberOfEntriesInDisk { get; private set; }
public long TotalNumberOfEntries { get; private set; }
public long DirectorySize { get; private set; }
public long DirectoryStartOffsetRelativeToDisk { get; private set; }
public byte[] DataSector { get; private set; }
}
}

View File

@@ -0,0 +1,30 @@
using System.IO;
namespace SharpCompress.Common.Zip.Headers
{
internal class Zip64DirectoryEndLocatorHeader : ZipHeader
{
public Zip64DirectoryEndLocatorHeader()
: base(ZipHeaderType.Zip64DirectoryEndLocator)
{
}
internal override void Read(BinaryReader reader)
{
FirstVolumeWithDirectory = reader.ReadUInt32();
RelativeOffsetOfTheEndOfDirectoryRecord = (long)reader.ReadUInt64();
TotalNumberOfVolumes = reader.ReadUInt32();
}
internal override void Write(BinaryWriter writer)
{
throw new System.NotImplementedException();
}
public uint FirstVolumeWithDirectory { get; private set; }
public long RelativeOffsetOfTheEndOfDirectoryRecord { get; private set; }
public uint TotalNumberOfVolumes { get; private set; }
}
}

View File

@@ -57,15 +57,31 @@ namespace SharpCompress.Common.Zip.Headers
internal ZipCompressionMethod CompressionMethod { get; set; }
internal uint CompressedSize { get; set; }
internal long CompressedSize { get; set; }
internal long? DataStartPosition { get; set; }
internal uint UncompressedSize { get; set; }
internal long UncompressedSize { get; set; }
internal List<ExtraData> Extra { get; set; }
public string Password { get; set; }
internal PkwareTraditionalEncryptionData ComposeEncryptionData(Stream archiveStream)
{
if (archiveStream == null)
{
throw new ArgumentNullException(nameof(archiveStream));
}
var buffer = new byte[12];
archiveStream.Read(buffer, 0, 12);
PkwareTraditionalEncryptionData encryptionData = PkwareTraditionalEncryptionData.ForRead(Password, this, buffer);
return encryptionData;
}
internal PkwareTraditionalEncryptionData PkwareTraditionalEncryptionData { get; set; }
#if !NO_CRYPTO
internal WinzipAesEncryptionData WinzipAesEncryptionData { get; set; }
#endif
@@ -96,5 +112,7 @@ namespace SharpCompress.Common.Zip.Headers
}
internal ZipFilePart Part { get; set; }
internal bool IsZip64 => CompressedSize == uint.MaxValue;
}
}

View File

@@ -6,6 +6,8 @@
LocalEntry,
DirectoryEntry,
DirectoryEnd,
Split
Split,
Zip64DirectoryEnd,
Zip64DirectoryEndLocator
}
}

View File

@@ -23,15 +23,15 @@ namespace SharpCompress.Common.Zip
this.mode = mode;
}
public override bool CanRead { get { return (mode == CryptoMode.Decrypt); } }
public override bool CanRead => (mode == CryptoMode.Decrypt);
public override bool CanSeek { get { return false; } }
public override bool CanSeek => false;
public override bool CanWrite { get { return (mode == CryptoMode.Encrypt); } }
public override bool CanWrite => (mode == CryptoMode.Encrypt);
public override long Length { get { throw new NotSupportedException(); } }
public override long Length => throw new NotSupportedException();
public override long Position { get { throw new NotSupportedException(); } set { throw new NotSupportedException(); } }
public override long Position { get => throw new NotSupportedException(); set => throw new NotSupportedException(); }
public override int Read(byte[] buffer, int offset, int count)
{

View File

@@ -9,11 +9,10 @@ namespace SharpCompress.Common.Zip
{
private static readonly CRC32 crc32 = new CRC32();
private readonly UInt32[] _Keys = {0x12345678, 0x23456789, 0x34567890};
private readonly string password;
private PkwareTraditionalEncryptionData(string password)
{
this.password = password;
Initialize(password);
}
private byte MagicByte
@@ -29,7 +28,6 @@ namespace SharpCompress.Common.Zip
byte[] encryptionHeader)
{
var encryptor = new PkwareTraditionalEncryptionData(password);
encryptor.InitializeKeys();
byte[] plainTextHeader = encryptor.Decrypt(encryptionHeader, encryptionHeader.Length);
if (plainTextHeader[11] != (byte)((header.Crc >> 24) & 0xff))
{
@@ -86,7 +84,7 @@ namespace SharpCompress.Common.Zip
return cipherText;
}
internal void InitializeKeys()
private void Initialize(string password)
{
byte[] p = StringToByteArray(password);
for (int i = 0; i < password.Length; i++)

View File

@@ -24,7 +24,7 @@ namespace SharpCompress.Common.Zip
return base.GetCompressedStream();
}
internal string Comment { get { return (Header as DirectoryEntryHeader).Comment; } }
internal string Comment => (Header as DirectoryEntryHeader).Comment;
private void LoadLocalHeader()
{

View File

@@ -9,6 +9,7 @@ namespace SharpCompress.Common.Zip
internal class SeekableZipHeaderFactory : ZipHeaderFactory
{
private const int MAX_ITERATIONS_FOR_DIRECTORY_HEADER = 4096;
private bool zip64;
internal SeekableZipHeaderFactory(string password)
: base(StreamingMode.Seekable, password)
@@ -16,11 +17,56 @@ namespace SharpCompress.Common.Zip
}
internal IEnumerable<DirectoryEntryHeader> ReadSeekableHeader(Stream stream)
{
var reader = new BinaryReader(stream);
SeekBackToHeader(stream, reader, DIRECTORY_END_HEADER_BYTES);
var entry = new DirectoryEndHeader();
entry.Read(reader);
if (entry.IsZip64)
{
zip64 = true;
SeekBackToHeader(stream, reader, ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR);
var zip64Locator = new Zip64DirectoryEndLocatorHeader();
zip64Locator.Read(reader);
stream.Seek(zip64Locator.RelativeOffsetOfTheEndOfDirectoryRecord, SeekOrigin.Begin);
uint zip64Signature = reader.ReadUInt32();
if(zip64Signature != ZIP64_END_OF_CENTRAL_DIRECTORY)
throw new ArchiveException("Failed to locate the Zip64 Header");
var zip64Entry = new Zip64DirectoryEndHeader();
zip64Entry.Read(reader);
stream.Seek(zip64Entry.DirectoryStartOffsetRelativeToDisk, SeekOrigin.Begin);
}
else
{
stream.Seek(entry.DirectoryStartOffsetRelativeToDisk, SeekOrigin.Begin);
}
long position = stream.Position;
while (true)
{
stream.Position = position;
uint signature = reader.ReadUInt32();
var directoryEntryHeader = ReadHeader(signature, reader, zip64) as DirectoryEntryHeader;
position = stream.Position;
if (directoryEntryHeader == null)
{
yield break;
}
//entry could be zero bytes so we need to know that.
directoryEntryHeader.HasData = directoryEntryHeader.CompressedSize != 0;
yield return directoryEntryHeader;
}
}
private static void SeekBackToHeader(Stream stream, BinaryReader reader, uint headerSignature)
{
long offset = 0;
uint signature;
BinaryReader reader = new BinaryReader(stream);
int iterationCount = 0;
do
{
@@ -34,33 +80,10 @@ namespace SharpCompress.Common.Zip
iterationCount++;
if (iterationCount > MAX_ITERATIONS_FOR_DIRECTORY_HEADER)
{
throw new ArchiveException(
"Could not find Zip file Directory at the end of the file. File may be corrupted.");
throw new ArchiveException("Could not find Zip file Directory at the end of the file. File may be corrupted.");
}
}
while (signature != DIRECTORY_END_HEADER_BYTES);
var entry = new DirectoryEndHeader();
entry.Read(reader);
stream.Seek(entry.DirectoryStartOffsetRelativeToDisk, SeekOrigin.Begin);
DirectoryEntryHeader directoryEntryHeader = null;
long position = stream.Position;
while (true)
{
stream.Position = position;
signature = reader.ReadUInt32();
directoryEntryHeader = ReadHeader(signature, reader) as DirectoryEntryHeader;
position = stream.Position;
if (directoryEntryHeader == null)
{
yield break;
}
//entry could be zero bytes so we need to know that.
directoryEntryHeader.HasData = directoryEntryHeader.CompressedSize != 0;
yield return directoryEntryHeader;
}
while (signature != headerSignature);
}
internal LocalEntryHeader GetLocalHeader(Stream stream, DirectoryEntryHeader directoryEntryHeader)
@@ -68,7 +91,7 @@ namespace SharpCompress.Common.Zip
stream.Seek(directoryEntryHeader.RelativeOffsetOfEntryHeader, SeekOrigin.Begin);
BinaryReader reader = new BinaryReader(stream);
uint signature = reader.ReadUInt32();
var localEntryHeader = ReadHeader(signature, reader) as LocalEntryHeader;
var localEntryHeader = ReadHeader(signature, reader, zip64) as LocalEntryHeader;
if (localEntryHeader == null)
{
throw new InvalidOperationException();

View File

@@ -28,7 +28,7 @@ namespace SharpCompress.Common.Zip
ZipHeader header = null;
BinaryReader reader = new BinaryReader(rewindableStream);
if (lastEntryHeader != null &&
FlagUtility.HasFlag(lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor))
(FlagUtility.HasFlag(lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor) || lastEntryHeader.IsZip64))
{
reader = (lastEntryHeader.Part as StreamingZipFilePart).FixStreamedFileLocation(ref rewindableStream);
long? pos = rewindableStream.CanSeek ? (long?)rewindableStream.Position : null;

View File

@@ -52,28 +52,28 @@ namespace SharpCompress.Common.Zip
}
}
public override long Crc { get { return filePart.Header.Crc; } }
public override long Crc => filePart.Header.Crc;
public override string Key { get { return filePart.Header.Name; } }
public override string Key => filePart.Header.Name;
public override long CompressedSize { get { return filePart.Header.CompressedSize; } }
public override long CompressedSize => filePart.Header.CompressedSize;
public override long Size { get { return filePart.Header.UncompressedSize; } }
public override long Size => filePart.Header.UncompressedSize;
public override DateTime? LastModifiedTime { get; }
public override DateTime? CreatedTime { get { return null; } }
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime { get { return null; } }
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime { get { return null; } }
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted { get { return FlagUtility.HasFlag(filePart.Header.Flags, HeaderFlags.Encrypted); } }
public override bool IsEncrypted => FlagUtility.HasFlag(filePart.Header.Flags, HeaderFlags.Encrypted);
public override bool IsDirectory { get { return filePart.Header.IsDirectory; } }
public override bool IsDirectory => filePart.Header.IsDirectory;
public override bool IsSplit { get { return false; } }
public override bool IsSplit => false;
internal override IEnumerable<FilePart> Parts { get { return filePart.AsEnumerable<FilePart>(); } }
internal override IEnumerable<FilePart> Parts => filePart.AsEnumerable<FilePart>();
}
}

View File

@@ -24,7 +24,7 @@ namespace SharpCompress.Common.Zip
internal Stream BaseStream { get; private set; }
internal ZipFileEntry Header { get; set; }
internal override string FilePartName { get { return Header.Name; } }
internal override string FilePartName => Header.Name;
internal override Stream GetCompressedStream()
{
@@ -51,7 +51,7 @@ namespace SharpCompress.Common.Zip
protected abstract Stream CreateBaseStream();
protected bool LeaveStreamOpen { get { return FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor); } }
protected bool LeaveStreamOpen => FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor) || Header.IsZip64;
protected Stream CreateDecompressionStream(Stream stream)
{
@@ -126,18 +126,16 @@ namespace SharpCompress.Common.Zip
protected Stream GetCryptoStream(Stream plainStream)
{
if ((Header.CompressedSize == 0)
#if !NO_CRYPTO
&& ((Header.PkwareTraditionalEncryptionData != null)
|| (Header.WinzipAesEncryptionData != null)))
#else
&& (Header.PkwareTraditionalEncryptionData != null))
#endif
bool isFileEncrypted = FlagUtility.HasFlag(Header.Flags, HeaderFlags.Encrypted);
if (Header.CompressedSize == 0 && isFileEncrypted)
{
throw new NotSupportedException("Cannot encrypt file with unknown size at start.");
}
if ((Header.CompressedSize == 0)
if ((Header.CompressedSize == 0
&& FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor))
|| Header.IsZip64)
{
plainStream = new NonDisposingStream(plainStream); //make sure AES doesn't close
}
@@ -145,19 +143,40 @@ namespace SharpCompress.Common.Zip
{
plainStream = new ReadOnlySubStream(plainStream, Header.CompressedSize); //make sure AES doesn't close
}
if (Header.PkwareTraditionalEncryptionData != null)
if (isFileEncrypted)
{
Header.PkwareTraditionalEncryptionData.InitializeKeys();
return new PkwareTraditionalCryptoStream(plainStream, Header.PkwareTraditionalEncryptionData,
CryptoMode.Decrypt);
}
switch (Header.CompressionMethod)
{
case ZipCompressionMethod.None:
case ZipCompressionMethod.Deflate:
case ZipCompressionMethod.Deflate64:
case ZipCompressionMethod.BZip2:
case ZipCompressionMethod.LZMA:
case ZipCompressionMethod.PPMd:
{
return new PkwareTraditionalCryptoStream(plainStream, Header.ComposeEncryptionData(plainStream), CryptoMode.Decrypt);
}
case ZipCompressionMethod.WinzipAes:
{
#if !NO_FILE
if (Header.WinzipAesEncryptionData != null)
{
//only read 10 less because the last ten are auth bytes
return new WinzipAesCryptoStream(plainStream, Header.WinzipAesEncryptionData, Header.CompressedSize - 10);
}
if (Header.WinzipAesEncryptionData != null)
{
return new WinzipAesCryptoStream(plainStream, Header.WinzipAesEncryptionData, Header.CompressedSize - 10);
}
#endif
return plainStream;
}
default:
{
throw new ArgumentOutOfRangeException();
}
}
}
return plainStream;
}
}

View File

@@ -17,8 +17,8 @@ namespace SharpCompress.Common.Zip
internal const uint DIGITAL_SIGNATURE = 0x05054b50;
internal const uint SPLIT_ARCHIVE_HEADER_BYTES = 0x30304b50;
private const uint ZIP64_END_OF_CENTRAL_DIRECTORY = 0x06064b50;
private const uint ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR = 0x07064b50;
internal const uint ZIP64_END_OF_CENTRAL_DIRECTORY = 0x06064b50;
internal const uint ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR = 0x07064b50;
protected LocalEntryHeader lastEntryHeader;
private readonly string password;
@@ -30,7 +30,7 @@ namespace SharpCompress.Common.Zip
this.password = password;
}
protected ZipHeader ReadHeader(uint headerBytes, BinaryReader reader)
protected ZipHeader ReadHeader(uint headerBytes, BinaryReader reader, bool zip64 = false)
{
switch (headerBytes)
{
@@ -54,14 +54,12 @@ namespace SharpCompress.Common.Zip
if (FlagUtility.HasFlag(lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor))
{
lastEntryHeader.Crc = reader.ReadUInt32();
lastEntryHeader.CompressedSize = reader.ReadUInt32();
lastEntryHeader.UncompressedSize = reader.ReadUInt32();
lastEntryHeader.CompressedSize = zip64 ? (long)reader.ReadUInt64() : reader.ReadUInt32();
lastEntryHeader.UncompressedSize = zip64 ? (long)reader.ReadUInt64() : reader.ReadUInt32();
}
else
{
reader.ReadUInt32();
reader.ReadUInt32();
reader.ReadUInt32();
reader.ReadBytes(zip64 ? 20 : 12);
}
return null;
}
@@ -78,9 +76,14 @@ namespace SharpCompress.Common.Zip
return new SplitHeader();
}
case ZIP64_END_OF_CENTRAL_DIRECTORY:
{
var entry = new Zip64DirectoryEndHeader();
entry.Read(reader);
return entry;
}
case ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR:
{
var entry = new IgnoreHeader(ZipHeaderType.Ignore);
var entry = new Zip64DirectoryEndLocatorHeader();
entry.Read(reader);
return entry;
}
@@ -111,46 +114,43 @@ namespace SharpCompress.Common.Zip
{
if (FlagUtility.HasFlag(entryHeader.Flags, HeaderFlags.Encrypted))
{
if (!entryHeader.IsDirectory &&
entryHeader.CompressedSize == 0 &&
if (!entryHeader.IsDirectory && entryHeader.CompressedSize == 0 &&
FlagUtility.HasFlag(entryHeader.Flags, HeaderFlags.UsePostDataDescriptor))
{
throw new NotSupportedException(
"SharpCompress cannot currently read non-seekable Zip Streams with encrypted data that has been written in a non-seekable manner.");
throw new NotSupportedException("SharpCompress cannot currently read non-seekable Zip Streams with encrypted data that has been written in a non-seekable manner.");
}
if (password == null)
{
throw new CryptographicException("No password supplied for encrypted zip.");
}
if (entryHeader.CompressionMethod != ZipCompressionMethod.WinzipAes)
{
byte[] buffer = new byte[12];
stream.Read(buffer, 0, 12);
entryHeader.PkwareTraditionalEncryptionData = PkwareTraditionalEncryptionData.ForRead(password,
entryHeader,
buffer);
entryHeader.CompressedSize -= 12;
}
else
entryHeader.Password = password;
if (entryHeader.CompressionMethod == ZipCompressionMethod.WinzipAes)
{
#if NO_CRYPTO
throw new NotSupportedException("Cannot decrypt Winzip AES with Silverlight or WP7.");
#else
var data = entryHeader.Extra.SingleOrDefault(x => x.Type == ExtraDataType.WinZipAes);
WinzipAesKeySize keySize = (WinzipAesKeySize) data.DataBytes[4];
ExtraData data = entryHeader.Extra.SingleOrDefault(x => x.Type == ExtraDataType.WinZipAes);
if (data != null)
{
var keySize = (WinzipAesKeySize)data.DataBytes[4];
byte[] salt = new byte[WinzipAesEncryptionData.KeyLengthInBytes(keySize)/2];
byte[] passwordVerifyValue = new byte[2];
stream.Read(salt, 0, salt.Length);
stream.Read(passwordVerifyValue, 0, 2);
entryHeader.WinzipAesEncryptionData = new WinzipAesEncryptionData(keySize, salt, passwordVerifyValue,
password);
entryHeader.CompressedSize -= (uint) (salt.Length + 2);
var salt = new byte[WinzipAesEncryptionData.KeyLengthInBytes(keySize) / 2];
var passwordVerifyValue = new byte[2];
stream.Read(salt, 0, salt.Length);
stream.Read(passwordVerifyValue, 0, 2);
entryHeader.WinzipAesEncryptionData =
new WinzipAesEncryptionData(keySize, salt, passwordVerifyValue, password);
entryHeader.CompressedSize -= (uint)(salt.Length + 2);
}
#endif
}
}
if (entryHeader.IsDirectory)
{
return;
@@ -168,13 +168,15 @@ namespace SharpCompress.Common.Zip
{
entryHeader.DataStartPosition = stream.Position;
stream.Position += entryHeader.CompressedSize;
}
break;
}
case StreamingMode.Streaming:
{
entryHeader.PackedStream = stream;
}
break;
}
default:
{
throw new InvalidFormatException("Invalid StreamingMode");

View File

@@ -73,15 +73,15 @@ namespace SharpCompress.Compressors.ADC
this.stream = stream;
}
public override bool CanRead { get { return stream.CanRead; } }
public override bool CanRead => stream.CanRead;
public override bool CanSeek { get { return false; } }
public override bool CanSeek => false;
public override bool CanWrite { get { return false; } }
public override bool CanWrite => false;
public override long Length { get { throw new NotSupportedException(); } }
public override long Length => throw new NotSupportedException();
public override long Position { get { return position; } set { throw new NotSupportedException(); } }
public override long Position { get => position; set => throw new NotSupportedException(); }
public override void Flush()
{

View File

@@ -48,20 +48,20 @@ namespace SharpCompress.Compressors.BZip2
public CompressionMode Mode { get; }
public override bool CanRead { get { return stream.CanRead; } }
public override bool CanRead => stream.CanRead;
public override bool CanSeek { get { return stream.CanSeek; } }
public override bool CanSeek => stream.CanSeek;
public override bool CanWrite { get { return stream.CanWrite; } }
public override bool CanWrite => stream.CanWrite;
public override void Flush()
{
stream.Flush();
}
public override long Length { get { return stream.Length; } }
public override long Length => stream.Length;
public override long Position { get { return stream.Position; } set { stream.Position = value; } }
public override long Position { get => stream.Position; set => stream.Position = value; }
public override int Read(byte[] buffer, int offset, int count)
{

View File

@@ -1092,13 +1092,13 @@ namespace SharpCompress.Compressors.BZip2
{
}
public override bool CanRead { get { return true; } }
public override bool CanRead => true;
public override bool CanSeek { get { return false; } }
public override bool CanSeek => false;
public override bool CanWrite { get { return false; } }
public override bool CanWrite => false;
public override long Length { get { return 0; } }
public override long Length => 0;
public override long Position { get { return 0; } set { } }
}

View File

@@ -1956,13 +1956,13 @@ namespace SharpCompress.Compressors.BZip2
}
}
public override bool CanRead { get { return false; } }
public override bool CanRead => false;
public override bool CanSeek { get { return false; } }
public override bool CanSeek => false;
public override bool CanWrite { get { return true; } }
public override bool CanWrite => true;
public override long Length { get { return 0; } }
public override long Length => 0;
public override long Position { get { return 0; } set { } }
}

View File

@@ -92,14 +92,7 @@ namespace SharpCompress.Compressors.Deflate
/// <summary>
/// Indicates the current CRC for all blocks slurped in.
/// </summary>
public Int32 Crc32Result
{
get
{
// return one's complement of the running result
return unchecked((Int32)(~runningCrc32Result));
}
}
public Int32 Crc32Result => unchecked((Int32)(~runningCrc32Result));
/// <summary>
/// Returns the CRC32 for the specified stream.

View File

@@ -50,7 +50,7 @@ namespace SharpCompress.Compressors.Deflate
/// </remarks>
public virtual FlushType FlushMode
{
get { return (_baseStream._flushMode); }
get => (_baseStream._flushMode);
set
{
if (_disposed)
@@ -80,7 +80,7 @@ namespace SharpCompress.Compressors.Deflate
/// </remarks>
public int BufferSize
{
get { return _baseStream._bufferSize; }
get => _baseStream._bufferSize;
set
{
if (_disposed)
@@ -111,7 +111,7 @@ namespace SharpCompress.Compressors.Deflate
/// </remarks>
public CompressionStrategy Strategy
{
get { return _baseStream.Strategy; }
get => _baseStream.Strategy;
set
{
if (_disposed)
@@ -123,10 +123,10 @@ namespace SharpCompress.Compressors.Deflate
}
/// <summary> Returns the total number of bytes input so far.</summary>
public virtual long TotalIn { get { return _baseStream._z.TotalBytesIn; } }
public virtual long TotalIn => _baseStream._z.TotalBytesIn;
/// <summary> Returns the total number of bytes output so far.</summary>
public virtual long TotalOut { get { return _baseStream._z.TotalBytesOut; } }
public virtual long TotalOut => _baseStream._z.TotalBytesOut;
#endregion
@@ -156,7 +156,7 @@ namespace SharpCompress.Compressors.Deflate
/// <remarks>
/// Always returns false.
/// </remarks>
public override bool CanSeek { get { return false; } }
public override bool CanSeek => false;
/// <summary>
/// Indicates whether the stream can be written.
@@ -179,7 +179,7 @@ namespace SharpCompress.Compressors.Deflate
/// <summary>
/// Reading this property always throws a <see cref="NotImplementedException"/>.
/// </summary>
public override long Length { get { throw new NotSupportedException(); } }
public override long Length => throw new NotSupportedException();
/// <summary>
/// The position of the stream pointer.
@@ -206,7 +206,7 @@ namespace SharpCompress.Compressors.Deflate
}
return 0;
}
set { throw new NotSupportedException(); }
set => throw new NotSupportedException();
}
/// <summary>
@@ -342,13 +342,7 @@ namespace SharpCompress.Compressors.Deflate
#endregion
public MemoryStream InputBuffer
{
get
{
return new MemoryStream(_baseStream._z.InputBuffer, _baseStream._z.NextIn,
_baseStream._z.AvailableBytesIn);
}
}
public MemoryStream InputBuffer => new MemoryStream(_baseStream._z.InputBuffer, _baseStream._z.NextIn,
_baseStream._z.AvailableBytesIn);
}
}

View File

@@ -71,7 +71,7 @@ namespace SharpCompress.Compressors.Deflate
public virtual FlushType FlushMode
{
get { return (BaseStream._flushMode); }
get => (BaseStream._flushMode);
set
{
if (disposed)
@@ -84,7 +84,7 @@ namespace SharpCompress.Compressors.Deflate
public int BufferSize
{
get { return BaseStream._bufferSize; }
get => BaseStream._bufferSize;
set
{
if (disposed)
@@ -105,9 +105,9 @@ namespace SharpCompress.Compressors.Deflate
}
}
internal virtual long TotalIn { get { return BaseStream._z.TotalBytesIn; } }
internal virtual long TotalIn => BaseStream._z.TotalBytesIn;
internal virtual long TotalOut { get { return BaseStream._z.TotalBytesOut; } }
internal virtual long TotalOut => BaseStream._z.TotalBytesOut;
#endregion
@@ -137,7 +137,7 @@ namespace SharpCompress.Compressors.Deflate
/// <remarks>
/// Always returns false.
/// </remarks>
public override bool CanSeek { get { return false; } }
public override bool CanSeek => false;
/// <summary>
/// Indicates whether the stream can be written.
@@ -160,7 +160,7 @@ namespace SharpCompress.Compressors.Deflate
/// <summary>
/// Reading this property always throws a <see cref="NotImplementedException"/>.
/// </summary>
public override long Length { get { throw new NotSupportedException(); } }
public override long Length => throw new NotSupportedException();
/// <summary>
/// The position of the stream pointer.
@@ -188,7 +188,7 @@ namespace SharpCompress.Compressors.Deflate
return 0;
}
set { throw new NotSupportedException(); }
set => throw new NotSupportedException();
}
/// <summary>
@@ -350,7 +350,7 @@ namespace SharpCompress.Compressors.Deflate
public String Comment
{
get { return comment; }
get => comment;
set
{
if (disposed)
@@ -363,7 +363,7 @@ namespace SharpCompress.Compressors.Deflate
public string FileName
{
get { return fileName; }
get => fileName;
set
{
if (disposed)

View File

@@ -98,7 +98,7 @@ namespace SharpCompress.Compressors.Deflate
}
}
protected internal bool _wantCompress { get { return (_compressionMode == CompressionMode.Compress); } }
protected internal bool _wantCompress => (_compressionMode == CompressionMode.Compress);
private ZlibCodec z
{
@@ -630,15 +630,15 @@ namespace SharpCompress.Compressors.Deflate
return rc;
}
public override Boolean CanRead { get { return _stream.CanRead; } }
public override Boolean CanRead => _stream.CanRead;
public override Boolean CanSeek { get { return _stream.CanSeek; } }
public override Boolean CanSeek => _stream.CanSeek;
public override Boolean CanWrite { get { return _stream.CanWrite; } }
public override Boolean CanWrite => _stream.CanWrite;
public override Int64 Length { get { return _stream.Length; } }
public override Int64 Length => _stream.Length;
public override long Position { get { throw new NotSupportedException(); } set { throw new NotSupportedException(); } }
public override long Position { get => throw new NotSupportedException(); set => throw new NotSupportedException(); }
internal enum StreamMode
{

View File

@@ -171,7 +171,7 @@ namespace SharpCompress.Compressors.Deflate
/// <summary>
/// The Adler32 checksum on the data transferred through the codec so far. You probably don't need to look at this.
/// </summary>
public int Adler32 { get { return (int)_Adler32; } }
public int Adler32 => (int)_Adler32;
/// <summary>
/// Create a ZlibCodec.

View File

@@ -63,7 +63,7 @@ namespace SharpCompress.Compressors.Deflate
/// </summary>
public virtual FlushType FlushMode
{
get { return (_baseStream._flushMode); }
get => (_baseStream._flushMode);
set
{
if (_disposed)
@@ -93,7 +93,7 @@ namespace SharpCompress.Compressors.Deflate
/// </remarks>
public int BufferSize
{
get { return _baseStream._bufferSize; }
get => _baseStream._bufferSize;
set
{
if (_disposed)
@@ -115,10 +115,10 @@ namespace SharpCompress.Compressors.Deflate
}
/// <summary> Returns the total number of bytes input so far.</summary>
public virtual long TotalIn { get { return _baseStream._z.TotalBytesIn; } }
public virtual long TotalIn => _baseStream._z.TotalBytesIn;
/// <summary> Returns the total number of bytes output so far.</summary>
public virtual long TotalOut { get { return _baseStream._z.TotalBytesOut; } }
public virtual long TotalOut => _baseStream._z.TotalBytesOut;
#endregion
@@ -148,7 +148,7 @@ namespace SharpCompress.Compressors.Deflate
/// <remarks>
/// Always returns false.
/// </remarks>
public override bool CanSeek { get { return false; } }
public override bool CanSeek => false;
/// <summary>
/// Indicates whether the stream can be written.
@@ -171,7 +171,7 @@ namespace SharpCompress.Compressors.Deflate
/// <summary>
/// Reading this property always throws a <see cref="NotImplementedException"/>.
/// </summary>
public override long Length { get { throw new NotSupportedException(); } }
public override long Length => throw new NotSupportedException();
/// <summary>
/// The position of the stream pointer.
@@ -199,7 +199,7 @@ namespace SharpCompress.Compressors.Deflate
return 0;
}
set { throw new NotSupportedException(); }
set => throw new NotSupportedException();
}
/// <summary>

View File

@@ -78,20 +78,20 @@ namespace SharpCompress.Compressors.Filters
baseStream.Dispose();
}
public override bool CanRead { get { return true; } }
public override bool CanRead => true;
public override bool CanSeek { get { return false; } }
public override bool CanSeek => false;
public override bool CanWrite { get { return false; } }
public override bool CanWrite => false;
public override void Flush()
{
throw new NotSupportedException();
}
public override long Length { get { return baseStream.Length + data1.Length + data2.Length; } }
public override long Length => baseStream.Length + data1.Length + data2.Length;
public override long Position { get { return position; } set { throw new NotSupportedException(); } }
public override long Position { get => position; set => throw new NotSupportedException(); }
public override int Read(byte[] buffer, int offset, int count)
{

View File

@@ -34,20 +34,20 @@ namespace SharpCompress.Compressors.Filters
baseStream.Dispose();
}
public override bool CanRead { get { return !isEncoder; } }
public override bool CanRead => !isEncoder;
public override bool CanSeek { get { return false; } }
public override bool CanSeek => false;
public override bool CanWrite { get { return isEncoder; } }
public override bool CanWrite => isEncoder;
public override void Flush()
{
throw new NotSupportedException();
}
public override long Length { get { return baseStream.Length; } }
public override long Length => baseStream.Length;
public override long Position { get { return baseStream.Position; } set { throw new NotSupportedException(); } }
public override long Position { get => baseStream.Position; set => throw new NotSupportedException(); }
public override int Read(byte[] buffer, int offset, int count)
{

View File

@@ -8,20 +8,20 @@ namespace SharpCompress.Compressors.LZMA
{
internal abstract class DecoderStream2 : Stream
{
public override bool CanRead { get { return true; } }
public override bool CanRead => true;
public override bool CanSeek { get { return false; } }
public override bool CanSeek => false;
public override bool CanWrite { get { return false; } }
public override bool CanWrite => false;
public override void Flush()
{
throw new NotSupportedException();
}
public override long Length { get { throw new NotSupportedException(); } }
public override long Length => throw new NotSupportedException();
public override long Position { get { throw new NotSupportedException(); } set { throw new NotSupportedException(); } }
public override long Position { get => throw new NotSupportedException(); set => throw new NotSupportedException(); }
public override long Seek(long offset, SeekOrigin origin)
{

View File

@@ -178,6 +178,6 @@ namespace SharpCompress.Compressors.LZMA.LZ
_streamPos -= (UInt32)subValue;
}
public bool IsDataStarved { get { return _streamPos - _pos < _keepSizeAfter; } }
public bool IsDataStarved => _streamPos - _pos < _keepSizeAfter;
}
}

View File

@@ -166,9 +166,9 @@ namespace SharpCompress.Compressors.LZMA.LZ
Limit = Total + size;
}
public bool HasSpace { get { return _pos < _windowSize && Total < Limit; } }
public bool HasSpace => _pos < _windowSize && Total < Limit;
public bool HasPending { get { return _pendingLen > 0; } }
public bool HasPending => _pendingLen > 0;
public int Read(byte[] buffer, int offset, int count)
{
@@ -200,6 +200,6 @@ namespace SharpCompress.Compressors.LZMA.LZ
}
}
public int AvailableBytes { get { return _pos - _streamPos; } }
public int AvailableBytes => _pos - _streamPos;
}
}

View File

@@ -69,9 +69,9 @@ namespace SharpCompress.Compressors.LZMA
// TODO: Both Length and Position are sometimes feasible, but would require
// reading the output length when we initialize.
public override long Length { get { throw new NotImplementedException(); } }
public override long Length => throw new NotImplementedException();
public override long Position { get { throw new NotImplementedException(); } set { throw new NotImplementedException(); } }
public override long Position { get => throw new NotImplementedException(); set => throw new NotImplementedException(); }
public override int Read(byte[] buffer, int offset, int count) => stream.Read(buffer, offset, count);

View File

@@ -118,11 +118,11 @@ namespace SharpCompress.Compressors.LZMA
}
}
public override bool CanRead { get { return encoder == null; } }
public override bool CanRead => encoder == null;
public override bool CanSeek { get { return false; } }
public override bool CanSeek => false;
public override bool CanWrite { get { return encoder != null; } }
public override bool CanWrite => encoder != null;
public override void Flush()
{
@@ -149,9 +149,9 @@ namespace SharpCompress.Compressors.LZMA
base.Dispose(disposing);
}
public override long Length { get { return position + availableBytes; } }
public override long Length => position + availableBytes;
public override long Position { get { return position; } set { throw new NotSupportedException(); } }
public override long Position { get => position; set => throw new NotSupportedException(); }
public override int Read(byte[] buffer, int offset, int count)
{

View File

@@ -245,7 +245,7 @@ namespace SharpCompress.Compressors.LZMA.RangeCoder
return symbol;
}
public bool IsFinished { get { return Code == 0; } }
public bool IsFinished => Code == 0;
// ulong GetProcessedSize() {return Stream.GetProcessedSize(); }
}

View File

@@ -40,19 +40,19 @@ namespace SharpCompress.Compressors.LZMA.Utilites
return mCRC;
}
public override bool CanRead { get { return false; } }
public override bool CanRead => false;
public override bool CanSeek { get { return false; } }
public override bool CanSeek => false;
public override bool CanWrite { get { return true; } }
public override bool CanWrite => true;
public override void Flush()
{
}
public override long Length { get { throw new NotSupportedException(); } }
public override long Length => throw new NotSupportedException();
public override long Position { get { throw new NotSupportedException(); } set { throw new NotSupportedException(); } }
public override long Position { get => throw new NotSupportedException(); set => throw new NotSupportedException(); }
public override int Read(byte[] buffer, int offset, int count)
{
@@ -122,20 +122,20 @@ namespace SharpCompress.Compressors.LZMA.Utilites
return mCRC;
}
public override bool CanRead { get { return mSource.CanRead; } }
public override bool CanRead => mSource.CanRead;
public override bool CanSeek { get { return false; } }
public override bool CanSeek => false;
public override bool CanWrite { get { return false; } }
public override bool CanWrite => false;
public override void Flush()
{
throw new NotSupportedException();
}
public override long Length { get { throw new NotSupportedException(); } }
public override long Length => throw new NotSupportedException();
public override long Position { get { throw new NotSupportedException(); } set { throw new NotSupportedException(); } }
public override long Position { get => throw new NotSupportedException(); set => throw new NotSupportedException(); }
public override int Read(byte[] buffer, int offset, int count)
{

View File

@@ -62,19 +62,19 @@ namespace SharpCompress.Compressors.LZMA.Utilites
}
}
public override bool CanRead { get { return false; } }
public override bool CanRead => false;
public override bool CanSeek { get { return false; } }
public override bool CanSeek => false;
public override bool CanWrite { get { return true; } }
public override bool CanWrite => true;
public override void Flush()
{
}
public override long Length { get { throw new NotSupportedException(); } }
public override long Length => throw new NotSupportedException();
public override long Position { get { throw new NotSupportedException(); } set { throw new NotSupportedException(); } }
public override long Position { get => throw new NotSupportedException(); set => throw new NotSupportedException(); }
public override int Read(byte[] buffer, int offset, int count)
{

View File

@@ -19,7 +19,7 @@ namespace SharpCompress.Compressors.PPMd.H
{
}
internal int SummFreq { get { return DataConverter.LittleEndian.GetInt16(Memory, Address) & 0xffff; } set { DataConverter.LittleEndian.PutBytes(Memory, Address, (short)value); } }
internal int SummFreq { get => DataConverter.LittleEndian.GetInt16(Memory, Address) & 0xffff; set => DataConverter.LittleEndian.PutBytes(Memory, Address, (short)value); }
internal FreqData Initialize(byte[] mem)
{

View File

@@ -22,33 +22,33 @@ namespace SharpCompress.Compressors.PPMd.H
public SubAllocator SubAlloc { get; } = new SubAllocator();
public virtual SEE2Context DummySEE2Cont { get { return dummySEE2Cont; } }
public virtual SEE2Context DummySEE2Cont => dummySEE2Cont;
public virtual int InitRL { get { return initRL; } }
public virtual int InitRL => initRL;
public virtual int EscCount { get { return escCount; } set { escCount = value & 0xff; } }
public virtual int EscCount { get => escCount; set => escCount = value & 0xff; }
public virtual int[] CharMask { get { return charMask; } }
public virtual int[] CharMask => charMask;
public virtual int NumMasked { get { return numMasked; } set { numMasked = value; } }
public virtual int NumMasked { get => numMasked; set => numMasked = value; }
public virtual int PrevSuccess { get { return prevSuccess; } set { prevSuccess = value & 0xff; } }
public virtual int PrevSuccess { get => prevSuccess; set => prevSuccess = value & 0xff; }
public virtual int InitEsc { get { return initEsc; } set { initEsc = value; } }
public virtual int InitEsc { get => initEsc; set => initEsc = value; }
public virtual int RunLength { get { return runLength; } set { runLength = value; } }
public virtual int RunLength { get => runLength; set => runLength = value; }
public virtual int HiBitsFlag { get { return hiBitsFlag; } set { hiBitsFlag = value & 0xff; } }
public virtual int HiBitsFlag { get => hiBitsFlag; set => hiBitsFlag = value & 0xff; }
public virtual int[][] BinSumm { get { return binSumm; } }
public virtual int[][] BinSumm => binSumm;
internal RangeCoder Coder { get; private set; }
internal State FoundState { get; private set; }
public virtual byte[] Heap { get { return SubAlloc.Heap; } }
public virtual byte[] Heap => SubAlloc.Heap;
public virtual int OrderFall { get { return orderFall; } }
public virtual int OrderFall => orderFall;
public const int MAX_O = 64; /* maximum allowed model order */

View File

@@ -8,8 +8,7 @@ namespace SharpCompress.Compressors.PPMd.H
{
internal FreqData FreqData
{
get { return freqData; }
get => freqData;
set
{
freqData.SummFreq = value.SummFreq;
@@ -131,7 +130,7 @@ namespace SharpCompress.Compressors.PPMd.H
internal override int Address
{
get { return base.Address; }
get => base.Address;
set
{
base.Address = value;

View File

@@ -131,11 +131,11 @@ namespace SharpCompress.Compressors.PPMd.H
Scale = Scale + dScale;
}
internal long HighCount { get { return highCount; } set { highCount = value & RangeCoder.UintMask; } }
internal long HighCount { get => highCount; set => highCount = value & RangeCoder.UintMask; }
internal long LowCount { get { return lowCount & RangeCoder.UintMask; } set { lowCount = value & RangeCoder.UintMask; } }
internal long LowCount { get => lowCount & RangeCoder.UintMask; set => lowCount = value & RangeCoder.UintMask; }
internal long Scale { get { return scale; } set { scale = value & RangeCoder.UintMask; } }
internal long Scale { get => scale; set => scale = value & RangeCoder.UintMask; }
// Debug
public override String ToString()

View File

@@ -15,11 +15,11 @@ namespace SharpCompress.Compressors.PPMd.H
}
}
public virtual int Count { get { return count; } set { count = value & 0xff; } }
public virtual int Count { get => count; set => count = value & 0xff; }
public virtual int Shift { get { return shift; } set { shift = value & 0xff; } }
public virtual int Shift { get => shift; set => shift = value & 0xff; }
public virtual int Summ { get { return summ; } set { summ = value & 0xffff; } }
public virtual int Summ { get => summ; set => summ = value & 0xffff; }
public const int size = 4;

View File

@@ -13,9 +13,9 @@ namespace SharpCompress.Compressors.PPMd.H
{
}
internal int Symbol { get { return Memory[Address] & 0xff; } set { Memory[Address] = (byte)value; } }
internal int Symbol { get => Memory[Address] & 0xff; set => Memory[Address] = (byte)value; }
internal int Freq { get { return Memory[Address + 1] & 0xff; } set { Memory[Address + 1] = (byte)value; } }
internal int Freq { get => Memory[Address + 1] & 0xff; set => Memory[Address + 1] = (byte)value; }
internal State Initialize(byte[] mem)
{

View File

@@ -11,9 +11,9 @@ namespace SharpCompress.Compressors.PPMd.H
private int successor; // pointer ppmcontext
internal int Symbol { get { return symbol; } set { symbol = value & 0xff; } }
internal int Symbol { get => symbol; set => symbol = value & 0xff; }
internal int Freq { get { return freq; } set { freq = value & 0xff; } }
internal int Freq { get => freq; set => freq = value & 0xff; }
internal State Values
{

View File

@@ -5,15 +5,15 @@ namespace SharpCompress.Compressors.PPMd.H
{
internal class SubAllocator
{
public virtual int FakeUnitsStart { get { return fakeUnitsStart; } set { fakeUnitsStart = value; } }
public virtual int FakeUnitsStart { get => fakeUnitsStart; set => fakeUnitsStart = value; }
public virtual int HeapEnd { get { return heapEnd; } }
public virtual int HeapEnd => heapEnd;
public virtual int PText { get { return pText; } set { pText = value; } }
public virtual int PText { get => pText; set => pText = value; }
public virtual int UnitsStart { get { return unitsStart; } set { unitsStart = value; } }
public virtual int UnitsStart { get => unitsStart; set => unitsStart = value; }
public virtual byte[] Heap { get { return heap; } }
public virtual byte[] Heap => heap;
//UPGRADE_NOTE: Final was removed from the declaration of 'N4 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
public const int N1 = 4;

View File

@@ -48,11 +48,8 @@ namespace SharpCompress.Compressors.PPMd.I1
/// </summary>
public uint Stamp
{
get
{
return Memory[Address] | ((uint)Memory[Address + 1]) << 8 | ((uint)Memory[Address + 2]) << 16 |
((uint)Memory[Address + 3]) << 24;
}
get => Memory[Address] | ((uint)Memory[Address + 1]) << 8 | ((uint)Memory[Address + 2]) << 16 |
((uint)Memory[Address + 3]) << 24;
set
{
Memory[Address] = (byte)value;
@@ -67,13 +64,9 @@ namespace SharpCompress.Compressors.PPMd.I1
/// </summary>
public MemoryNode Next
{
get
{
return
new MemoryNode(
Memory[Address + 4] | ((uint)Memory[Address + 5]) << 8 |
((uint)Memory[Address + 6]) << 16 | ((uint)Memory[Address + 7]) << 24, Memory);
}
get => new MemoryNode(
Memory[Address + 4] | ((uint)Memory[Address + 5]) << 8 |
((uint)Memory[Address + 6]) << 16 | ((uint)Memory[Address + 7]) << 24, Memory);
set
{
Memory[Address + 4] = (byte)value.Address;
@@ -88,11 +81,8 @@ namespace SharpCompress.Compressors.PPMd.I1
/// </summary>
public uint UnitCount
{
get
{
return Memory[Address + 8] | ((uint)Memory[Address + 9]) << 8 |
((uint)Memory[Address + 10]) << 16 | ((uint)Memory[Address + 11]) << 24;
}
get => Memory[Address + 8] | ((uint)Memory[Address + 9]) << 8 |
((uint)Memory[Address + 10]) << 16 | ((uint)Memory[Address + 11]) << 24;
set
{
Memory[Address + 8] = (byte)value;
@@ -105,7 +95,7 @@ namespace SharpCompress.Compressors.PPMd.I1
/// <summary>
/// Gets whether there is a next memory node available.
/// </summary>
public bool Available { get { return Next.Address != 0; } }
public bool Available => Next.Address != 0;
/// <summary>
/// Link in the provided memory node.

View File

@@ -34,19 +34,19 @@ namespace SharpCompress.Compressors.PPMd.I1
/// <summary>
/// Gets or sets the number statistics.
/// </summary>
public byte NumberStatistics { get { return Memory[Address]; } set { Memory[Address] = value; } }
public byte NumberStatistics { get => Memory[Address]; set => Memory[Address] = value; }
/// <summary>
/// Gets or sets the flags.
/// </summary>
public byte Flags { get { return Memory[Address + 1]; } set { Memory[Address + 1] = value; } }
public byte Flags { get => Memory[Address + 1]; set => Memory[Address + 1] = value; }
/// <summary>
/// Gets or sets the summary frequency.
/// </summary>
public ushort SummaryFrequency
{
get { return (ushort)(Memory[Address + 2] | Memory[Address + 3] << 8); }
get => (ushort)(Memory[Address + 2] | Memory[Address + 3] << 8);
set
{
Memory[Address + 2] = (byte)value;
@@ -59,13 +59,9 @@ namespace SharpCompress.Compressors.PPMd.I1
/// </summary>
public PpmState Statistics
{
get
{
return
new PpmState(
Memory[Address + 4] | ((uint)Memory[Address + 5]) << 8 |
((uint)Memory[Address + 6]) << 16 | ((uint)Memory[Address + 7]) << 24, Memory);
}
get => new PpmState(
Memory[Address + 4] | ((uint)Memory[Address + 5]) << 8 |
((uint)Memory[Address + 6]) << 16 | ((uint)Memory[Address + 7]) << 24, Memory);
set
{
Memory[Address + 4] = (byte)value.Address;
@@ -80,13 +76,9 @@ namespace SharpCompress.Compressors.PPMd.I1
/// </summary>
public PpmContext Suffix
{
get
{
return
new PpmContext(
Memory[Address + 8] | ((uint)Memory[Address + 9]) << 8 |
((uint)Memory[Address + 10]) << 16 | ((uint)Memory[Address + 11]) << 24, Memory);
}
get => new PpmContext(
Memory[Address + 8] | ((uint)Memory[Address + 9]) << 8 |
((uint)Memory[Address + 10]) << 16 | ((uint)Memory[Address + 11]) << 24, Memory);
set
{
Memory[Address + 8] = (byte)value.Address;
@@ -121,21 +113,21 @@ namespace SharpCompress.Compressors.PPMd.I1
/// </para>
/// </remarks>
/// <returns></returns>
public PpmState FirstState { get { return new PpmState(Address + 2, Memory); } }
public PpmState FirstState => new PpmState(Address + 2, Memory);
/// <summary>
/// Gets or sets the symbol of the first PPM state. This is provided for convenience. The same
/// information can be obtained using the Symbol property on the PPM state provided by the
/// <see cref="FirstState"/> property.
/// </summary>
public byte FirstStateSymbol { get { return Memory[Address + 2]; } set { Memory[Address + 2] = value; } }
public byte FirstStateSymbol { get => Memory[Address + 2]; set => Memory[Address + 2] = value; }
/// <summary>
/// Gets or sets the frequency of the first PPM state. This is provided for convenience. The same
/// information can be obtained using the Frequency property on the PPM state provided by the
///context.FirstState property.
/// </summary>
public byte FirstStateFrequency { get { return Memory[Address + 3]; } set { Memory[Address + 3] = value; } }
public byte FirstStateFrequency { get => Memory[Address + 3]; set => Memory[Address + 3] = value; }
/// <summary>
/// Gets or sets the successor of the first PPM state. This is provided for convenience. The same
@@ -143,13 +135,9 @@ namespace SharpCompress.Compressors.PPMd.I1
/// </summary>
public PpmContext FirstStateSuccessor
{
get
{
return
new PpmContext(
Memory[Address + 4] | ((uint)Memory[Address + 5]) << 8 |
((uint)Memory[Address + 6]) << 16 | ((uint)Memory[Address + 7]) << 24, Memory);
}
get => new PpmContext(
Memory[Address + 4] | ((uint)Memory[Address + 5]) << 8 |
((uint)Memory[Address + 6]) << 16 | ((uint)Memory[Address + 7]) << 24, Memory);
set
{
Memory[Address + 4] = (byte)value.Address;

View File

@@ -38,25 +38,21 @@ namespace SharpCompress.Compressors.PPMd.I1
/// <summary>
/// Gets or sets the symbol.
/// </summary>
public byte Symbol { get { return Memory[Address]; } set { Memory[Address] = value; } }
public byte Symbol { get => Memory[Address]; set => Memory[Address] = value; }
/// <summary>
/// Gets or sets the frequency.
/// </summary>
public byte Frequency { get { return Memory[Address + 1]; } set { Memory[Address + 1] = value; } }
public byte Frequency { get => Memory[Address + 1]; set => Memory[Address + 1] = value; }
/// <summary>
/// Gets or sets the successor.
/// </summary>
public Model.PpmContext Successor
{
get
{
return
new Model.PpmContext(
Memory[Address + 2] | ((uint)Memory[Address + 3]) << 8 |
((uint)Memory[Address + 4]) << 16 | ((uint)Memory[Address + 5]) << 24, Memory);
}
get => new Model.PpmContext(
Memory[Address + 2] | ((uint)Memory[Address + 3]) << 8 |
((uint)Memory[Address + 4]) << 16 | ((uint)Memory[Address + 5]) << 24, Memory);
set
{
Memory[Address + 2] = (byte)value.Address;
@@ -72,7 +68,7 @@ namespace SharpCompress.Compressors.PPMd.I1
/// </summary>
/// <param name="offset"></param>
/// <returns></returns>
public PpmState this[int offset] { get { return new PpmState((uint)(Address + offset * Size), Memory); } }
public PpmState this[int offset] => new PpmState((uint)(Address + offset * Size), Memory);
/// <summary>
/// Allow a pointer to be implicitly converted to a PPM state.

View File

@@ -48,7 +48,7 @@ namespace SharpCompress.Compressors.PPMd
public int AllocatorSize
{
get { return allocatorSize; }
get => allocatorSize;
set
{
allocatorSize = value;
@@ -63,15 +63,8 @@ namespace SharpCompress.Compressors.PPMd
}
}
public byte[] Properties
{
get
{
return
DataConverter.LittleEndian.GetBytes(
(ushort)
((ModelOrder - 1) + (((AllocatorSize >> 20) - 1) << 4) + ((ushort)ModelRestorationMethod << 12)));
}
}
public byte[] Properties => DataConverter.LittleEndian.GetBytes(
(ushort)
((ModelOrder - 1) + (((AllocatorSize >> 20) - 1) << 4) + ((ushort)ModelRestorationMethod << 12)));
}
}

View File

@@ -57,11 +57,11 @@ namespace SharpCompress.Compressors.PPMd
}
}
public override bool CanRead { get { return !compress; } }
public override bool CanRead => !compress;
public override bool CanSeek { get { return false; } }
public override bool CanSeek => false;
public override bool CanWrite { get { return compress; } }
public override bool CanWrite => compress;
public override void Flush()
{
@@ -84,9 +84,9 @@ namespace SharpCompress.Compressors.PPMd
base.Dispose(isDisposing);
}
public override long Length { get { throw new NotSupportedException(); } }
public override long Length => throw new NotSupportedException();
public override long Position { get { return position; } set { throw new NotSupportedException(); } }
public override long Position { get => position; set => throw new NotSupportedException(); }
public override int Read(byte[] buffer, int offset, int count)
{

View File

@@ -59,6 +59,8 @@ namespace SharpCompress.Compressors.Rar
currentPartTotalReadBytes = 0;
CurrentCrc = filePartEnumerator.Current.FileHeader.FileCRC;
streamListener.FireFilePartExtractionBegin(filePartEnumerator.Current.FilePartName,
filePartEnumerator.Current.FileHeader.CompressedSize,
filePartEnumerator.Current.FileHeader.UncompressedSize);
@@ -113,20 +115,22 @@ namespace SharpCompress.Compressors.Rar
return totalRead;
}
public override bool CanRead { get { return true; } }
public override bool CanRead => true;
public override bool CanSeek { get { return false; } }
public override bool CanSeek => false;
public override bool CanWrite { get { return false; } }
public override bool CanWrite => false;
public uint CurrentCrc { get; private set; }
public override void Flush()
{
throw new NotSupportedException();
}
public override long Length { get { throw new NotSupportedException(); } }
public override long Length => throw new NotSupportedException();
public override long Position { get { throw new NotSupportedException(); } set { throw new NotSupportedException(); } }
public override long Position { get => throw new NotSupportedException(); set => throw new NotSupportedException(); }
public override long Seek(long offset, SeekOrigin origin)
{

View File

@@ -6,6 +6,10 @@ namespace SharpCompress.Compressors.Rar
{
private static readonly uint[] crcTab;
public static uint CheckCrc(uint startCrc, byte b) {
return (crcTab[((int) ((int) startCrc ^ (int) b)) & 0xff] ^ (startCrc >> 8));
}
public static uint CheckCrc(uint startCrc, byte[] data, int offset, int count)
{
int size = Math.Min(data.Length - offset, count);

View File

@@ -0,0 +1,42 @@
using System.IO;
using SharpCompress.Common;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Compressors.Rar {
internal class RarCrcStream : RarStream {
private readonly MultiVolumeReadOnlyStream readStream;
private uint currentCrc;
public RarCrcStream(Unpack unpack, FileHeader fileHeader, MultiVolumeReadOnlyStream readStream) : base(unpack, fileHeader, readStream)
{
this.readStream = readStream;
ResetCrc();
}
public uint GetCrc()
{
return ~currentCrc;
}
public void ResetCrc()
{
currentCrc = 0xffffffff;
}
public override int Read(byte[] buffer, int offset, int count)
{
var result = base.Read(buffer, offset, count);
if (result != 0)
{
currentCrc = RarCRC.CheckCrc(currentCrc, buffer, offset, result);
}
else if (GetCrc() != readStream.CurrentCrc)
{
// NOTE: we use the last FileHeader in a multipart volume to check CRC
throw new InvalidFormatException("file crc mismatch");
}
return result;
}
}
}

View File

@@ -43,19 +43,19 @@ namespace SharpCompress.Compressors.Rar
readStream.Dispose();
}
public override bool CanRead { get { return true; } }
public override bool CanRead => true;
public override bool CanSeek { get { return false; } }
public override bool CanSeek => false;
public override bool CanWrite { get { return false; } }
public override bool CanWrite => false;
public override void Flush()
{
}
public override long Length { get { return fileHeader.UncompressedSize; } }
public override long Length => fileHeader.UncompressedSize;
public override long Position { get { return fileHeader.UncompressedSize - unpack.DestSize; } set { throw new NotSupportedException(); } }
public override long Position { get => fileHeader.UncompressedSize - unpack.DestSize; set => throw new NotSupportedException(); }
public override int Read(byte[] buffer, int offset, int count)
{

Some files were not shown because too many files have changed in this diff Show More