Compare commits

...

49 Commits

Author SHA1 Message Date
Adam Hathcock
af2408de74 Encryptor is stateful. It's also reused when multiple runs of open entry stream is called. Need to change that. Need Pkware test. 2016-11-21 12:02:35 +00:00
Adam Hathcock
0a64fe28b0 Oops, removed too much from project.json 2016-10-14 09:03:15 +01:00
Adam Hathcock
e320ccfa9a 0.14.0 2016-10-14 08:59:19 +01:00
Adam Hathcock
9628ff9456 Merge pull request #191 from jskeet/lzip
Initial read-only support for LZip
2016-10-14 08:50:32 +01:00
Jon Skeet
d540f78cfc Initial read-only support for LZip
LZip has no notion of flienames, so an LzipReader wouldn't make very much sense;
I've just implemented the stream, and hooked it into tar support.
2016-10-12 15:08:56 +01:00
Adam Hathcock
66420cd299 Merge pull request #189 from ziaa/master
Remove unbalanced parentheses in code samples
2016-10-08 18:25:30 +01:00
Seyed Zia Azimi
dd0594471f Remove unbalanced parentheses in samples 2016-10-07 19:33:41 +03:30
Adam Hathcock
844ba228ee Make 0.13.1 2016-10-03 13:44:19 +01:00
Adam Hathcock
7efc701b32 Merge pull request #188 from adamhathcock/fix_nulls
Fix null password on ReaderFactory.  Fix null options on SevenZipArchive
2016-10-03 13:41:55 +01:00
Adam Hathcock
d7e29f7c4d Fix occasionally failing test 2016-10-03 13:37:04 +01:00
Adam Hathcock
f26ba91386 Fix null password on ReaderFactory. Fix null options on SevenZipArchive 2016-10-03 13:32:53 +01:00
Adam Hathcock
c73ac2039c Merge pull request #185 from adamhathcock/ppmd_allocation_zipwriter
Make PpmdProperties lazy to avoid unnecessary allocations.
2016-10-03 13:04:14 +01:00
Adam Hathcock
671f9cd0cb Empty commit to kick build 2016-10-03 12:58:23 +01:00
Adam Hathcock
131b5b9714 Can't use Lazy on .NET 3.5 :( 2016-10-03 11:20:29 +01:00
Adam Hathcock
74af0889b9 Make PpmdProperties lazy to avoid unnecessary allocations. 2016-10-03 10:16:26 +01:00
Adam Hathcock
e5ee399045 Merge pull request #181 from claunia/patch-1
Update FORMATS.md
2016-09-30 07:08:52 +01:00
deeb7a0f64 Update FORMATS.md
Add ADC to formats list.
2016-09-29 22:53:51 +01:00
Adam Hathcock
5af3bab1dc Merge pull request #180 from adamhathcock/documenting
Add Markdown files to document things.
2016-09-29 11:58:19 +01:00
Adam Hathcock
28be84d315 For all branches 2016-09-29 11:35:54 +01:00
Adam Hathcock
a0528c737d Trying just to build once 2016-09-29 11:34:50 +01:00
Adam Hathcock
b506e488e8 Add build badge 2016-09-29 11:32:31 +01:00
Adam Hathcock
58eb0e08d6 Don't save artifacts for PRs 2016-09-29 11:22:26 +01:00
Adam Hathcock
562701894f Save nupkgs 2016-09-29 11:13:05 +01:00
Adam Hathcock
54a562273b Incomplete refactoring 2016-09-29 11:10:11 +01:00
Adam Hathcock
3f8c9c4cb0 Update for 0.13.0 2016-09-29 11:03:11 +01:00
Adam Hathcock
3e7d28b043 Can I fix tables? 2016-09-29 10:57:49 +01:00
Adam Hathcock
40b10d4a26 Add Markdown files to document things. 2016-09-29 10:55:04 +01:00
Adam Hathcock
f367630a2a Merge pull request #179 from adamhathcock/tar_fix
Allow empty tar header to be read to know there are no more tar heade…
2016-09-28 13:57:09 +01:00
Adam Hathcock
b9e4f00862 Merge branch 'master' into tar_fix 2016-09-28 13:50:45 +01:00
Adam Hathcock
d6e74d6163 Merge pull request #178 from adamhathcock/7zip_deflate
Allow deflate decoder for 7zip
2016-09-28 13:50:35 +01:00
Adam Hathcock
4a4522b842 Merge branch 'master' into 7zip_deflate 2016-09-28 13:44:46 +01:00
Adam Hathcock
710ba4423d Merge branch 'master' into tar_fix 2016-09-28 13:43:21 +01:00
Adam Hathcock
2a5494a804 Merge pull request #174 from adamhathcock/redo_options
Redo options
2016-09-28 13:40:54 +01:00
Adam Hathcock
568909800c Allow empty tar header to be read to know there are no more tar headers to read 2016-09-28 12:00:48 +01:00
Adam Hathcock
7513a608b1 Allow deflate decoder 2016-09-28 11:59:31 +01:00
Adam Hathcock
911e9878bd Merge branch 'master' into redo_options 2016-09-27 13:09:07 +01:00
Adam Hathcock
899d7d6e61 Appveyor (#175)
* First pass of Cake build

* Update Cake but still need cake itself to run on full CLR

* Test out appveyor

* 3.5 build fix

* Build master and PRs differently.  Still scared to auto publish to nuget.
2016-09-27 13:08:42 +01:00
Adam Hathcock
260c0ee776 Add SaveTo overload for zip archives 2016-09-27 11:19:52 +01:00
Adam Hathcock
d71520808d Helps if I rename everything 2016-09-27 11:08:54 +01:00
Adam Hathcock
177fc2a12c Flags were a better idea when I was younger. It's not clear though. 2016-09-27 10:50:36 +01:00
Adam Hathcock
5dafcb02d4 Redo options classes 2016-09-27 10:23:35 +01:00
Adam Hathcock
c4fde80c5e Create proper options objects to remove flags from API 2016-09-27 10:14:08 +01:00
Adam Hathcock
06e3486ec4 Bump version 2016-09-26 11:53:35 +01:00
Adam Hathcock
bd7c783aaf Test fixes 2016-09-26 11:51:35 +01:00
Adam Hathcock
d732e3cfa4 Renamespace for proper pluralization 2016-09-26 11:49:49 +01:00
Adam Hathcock
c24cdc66ed Clean up from clean up 2016-09-26 11:03:15 +01:00
Adam Hathcock
efa6f7a82e Huge Resharper clean up. Fixed up test project.json 2016-09-26 10:55:52 +01:00
ddbbc3b847 Adds support for Apple Data Compression. (#168) 2016-09-12 17:41:31 +01:00
Adam Hathcock
7037161c07 Update README 2016-08-12 12:15:45 +01:00
285 changed files with 10144 additions and 8048 deletions

30
.gitignore vendored
View File

@@ -1,14 +1,16 @@
**/bin/*
**/obj/*
_ReSharper.SharpCompress/
bin/
*.suo
*.user
TestArchives/Scratch/
TestArchives/Scratch2/
TestResults/
*.nupkg
packages/*/
project.lock.json
test/TestArchives/Scratch
.vs
**/bin/*
**/obj/*
_ReSharper.SharpCompress/
bin/
*.suo
*.user
TestArchives/Scratch/
TestArchives/Scratch2/
TestResults/
*.nupkg
packages/*/
project.lock.json
test/TestArchives/Scratch
.vs
tools
.vscode

33
FORMATS.md Normal file
View File

@@ -0,0 +1,33 @@
# Archive Formats
## Accessing Archives
Archive classes allow random access to a seekable stream.
Reader classes allow forward-only reading
Writer classes allow forward-only Writing
## Supported Format Table
| Archive Format | Compression Format(s) | Compress/Decompress | Archive API | Reader API | Writer API |
| --- | --- | --- | --- | --- | --- |
| Rar | Rar | Decompress (1) | RarArchive | RarReader | N/A |
| Zip (2) | None, DEFLATE, BZip2, LZMA/LZMA2, PPMd | Both | ZipArchive | ZipReader | ZipWriter |
| Tar | None, BZip2, GZip, LZip | Both | TarArchive | TarReader | TarWriter (3) |
| GZip (single file) | GZip | Both | GZipArchive | GZipReader | GZipWriter |
| 7Zip (4) | LZMA, LZMA2, BZip2, PPMd, BCJ, BCJ2, Deflate | Decompress | SevenZipArchive | N/A | N/A |
1. SOLID Rars are only supported in the RarReader API.
2. Zip format supports pkware and WinzipAES encryption. However, encrypted LZMA is not supported.
3. The Tar format requires a file size in the header. If no size is specified to the TarWriter and the stream is not seekable, then an exception will be thrown.
4. The 7Zip format doesn't allow for reading as a forward-only stream so 7Zip is only supported through the Archive API
## Compressors
For those who want to directly compress/decompress bits
| Compressor | Compress/Decompress |
| --- | --- |
| BZip2Stream | Both |
| GZipStream | Both |
| DeflateStream | Both |
| LZMAStream | Both |
| PPMdStream | Both |
| ADCStream | Decompress |
| LZipStream | Decompress |

View File

@@ -4,10 +4,12 @@ SharpCompress is a compression library for .NET/Mono/Silverlight/WP7 that can un
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).
[![Build status](https://ci.appveyor.com/api/projects/status/voxg971oemmvxh1e/branch/master?svg=true)](https://ci.appveyor.com/project/adamhathcock/sharpcompress/branch/master)
## Need Help?
Post Issues on Github!
Check the [Supported Formats](https://github.com/adamhathcock/sharpcompress/wiki/Supported-Formats) and [basic samples.](https://github.com/adamhathcock/sharpcompress/wiki/API-Examples)
Check the [Supported Formats](FORMATS.md) and [Basic Usage.](USAGE.md)
## A Simple Request
@@ -25,26 +27,44 @@ I'm always looking for help or ideas. Please submit code or email with ideas. Un
* 7Zip writing
* Zip64
* Multi-volume Zip support.
## In-Progress
* RAR5 support
* DNX/NET Core support
* xproj targeting
## Version Log
### Version 0.14.0
* [Support for LZip reading in for Tars](https://github.com/adamhathcock/sharpcompress/pull/191)
### Version 0.13.1
* [Fix null password on ReaderFactory. Fix null options on SevenZipArchive](https://github.com/adamhathcock/sharpcompress/pull/188)
* [Make PpmdProperties lazy to avoid unnecessary allocations.](https://github.com/adamhathcock/sharpcompress/pull/185)
### Version 0.13.0
* Breaking change: Big refactor of Options on API.
* 7Zip supports Deflate
### Version 0.12.4
* Forward only zip issue fix https://github.com/adamhathcock/sharpcompress/issues/160
* Try to fix frameworks again by copying targets from JSON.NET
### Version 0.12.3
* 7Zip fixes https://github.com/adamhathcock/sharpcompress/issues/73
* Maybe all profiles will work with project.json now
### Version 0.12.2
* Support Profile 259 again
### Version 0.12.1
* Support Silverlight 5
### Version 0.12.0
* .NET Core RTM!
* Bug fix for Tar long paths

View File

@@ -1,6 +1,119 @@
<wpf:ResourceDictionary xml:space="preserve" xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml" xmlns:s="clr-namespace:System;assembly=mscorlib" xmlns:ss="urn:shemas-jetbrains-com:settings-storage-xaml" xmlns:wpf="http://schemas.microsoft.com/winfx/2006/xaml/presentation">
<s:String x:Key="/Default/Environment/InjectedLayers/FileInjectedLayer/=181069325DAB1C4287CD564D6CDDEDB3/AbsolutePath/@EntryValue">D:\Git\sharpcompress\SharpCompress\sharpcompress.DotSettings</s:String>
<s:String x:Key="/Default/Environment/InjectedLayers/FileInjectedLayer/=181069325DAB1C4287CD564D6CDDEDB3/RelativePath/@EntryValue">..\SharpCompress\sharpcompress.DotSettings</s:String>
<s:Boolean x:Key="/Default/Environment/InjectedLayers/FileInjectedLayer/=181069325DAB1C4287CD564D6CDDEDB3/@KeyIndexDefined">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/InjectedLayers/InjectedLayerCustomization/=File181069325DAB1C4287CD564D6CDDEDB3/@KeyIndexDefined">True</s:Boolean>
<s:Double x:Key="/Default/Environment/InjectedLayers/InjectedLayerCustomization/=File181069325DAB1C4287CD564D6CDDEDB3/RelativePriority/@EntryValue">1</s:Double></wpf:ResourceDictionary>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=ArgumentsStyleNamedExpression/@EntryIndexedValue">DO_NOT_SHOW</s:String>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=ArrangeBraces_005Fdowhile/@EntryIndexedValue">ERROR</s:String>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=ArrangeBraces_005Ffixed/@EntryIndexedValue">ERROR</s:String>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=ArrangeBraces_005Ffor/@EntryIndexedValue">ERROR</s:String>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=ArrangeBraces_005Fforeach/@EntryIndexedValue">ERROR</s:String>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=ArrangeBraces_005Fifelse/@EntryIndexedValue">ERROR</s:String>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=ArrangeBraces_005Flock/@EntryIndexedValue">ERROR</s:String>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=ArrangeBraces_005Fusing/@EntryIndexedValue">ERROR</s:String>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=ArrangeBraces_005Fwhile/@EntryIndexedValue">ERROR</s:String>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=MethodSupportsCancellation/@EntryIndexedValue">ERROR</s:String>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=RedundantExplicitParamsArrayCreation/@EntryIndexedValue">DO_NOT_SHOW</s:String>
<s:String x:Key="/Default/CodeStyle/CodeCleanup/Profiles/=Basic_0020Clean/@EntryIndexedValue">&lt;?xml version="1.0" encoding="utf-16"?&gt;&lt;Profile name="Basic Clean"&gt;&lt;CSOptimizeUsings&gt;&lt;OptimizeUsings&gt;True&lt;/OptimizeUsings&gt;&lt;EmbraceInRegion&gt;False&lt;/EmbraceInRegion&gt;&lt;RegionName&gt;&lt;/RegionName&gt;&lt;/CSOptimizeUsings&gt;&lt;CSShortenReferences&gt;True&lt;/CSShortenReferences&gt;&lt;CSRemoveCodeRedundancies&gt;True&lt;/CSRemoveCodeRedundancies&gt;&lt;CSMakeFieldReadonly&gt;True&lt;/CSMakeFieldReadonly&gt;&lt;CSCodeStyleAttributes ArrangeTypeAccessModifier="False" ArrangeTypeMemberAccessModifier="False" SortModifiers="False" RemoveRedundantParentheses="False" AddMissingParentheses="False" ArrangeBraces="True" ArrangeAttributes="False" ArrangeArgumentsStyle="False" /&gt;&lt;RemoveCodeRedundancies&gt;True&lt;/RemoveCodeRedundancies&gt;&lt;CSUseAutoProperty&gt;True&lt;/CSUseAutoProperty&gt;&lt;CSMakeAutoPropertyGetOnly&gt;True&lt;/CSMakeAutoPropertyGetOnly&gt;&lt;CSReformatCode&gt;True&lt;/CSReformatCode&gt;&lt;/Profile&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/CodeCleanup/SilentCleanupProfile/@EntryValue">Basic Clean</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/APPLY_ON_COMPLETION/@EntryValue">True</s:Boolean>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/ARGUMENTS_NAMED/@EntryValue">Named</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_FOR/@EntryValue">Required</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_FOREACH/@EntryValue">Required</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_IFELSE/@EntryValue">Required</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_WHILE/@EntryValue">Required</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_FIRST_ARG_BY_PAREN/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_LINQ_QUERY/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_ARGUMENT/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_ARRAY_AND_OBJECT_INITIALIZER/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_CALLS_CHAIN/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_EXPRESSION/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_EXTENDS_LIST/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_FOR_STMT/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_PARAMETER/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTIPLE_DECLARATION/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTLINE_TYPE_PARAMETER_CONSTRAINS/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTLINE_TYPE_PARAMETER_LIST/@EntryValue">True</s:Boolean>
<s:Int64 x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/BLANK_LINES_AFTER_START_COMMENT/@EntryValue">0</s:Int64>
<s:Int64 x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/BLANK_LINES_BEFORE_SINGLE_LINE_COMMENT/@EntryValue">1</s:Int64>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_ATTRIBUTE_STYLE/@EntryValue">SEPARATE</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_FIXED_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_FOR_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_FOREACH_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_IFELSE_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_USING_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_WHILE_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/INDENT_ANONYMOUS_METHOD_BLOCK/@EntryValue">True</s:Boolean>
<s:Int64 x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/KEEP_BLANK_LINES_IN_CODE/@EntryValue">1</s:Int64>
<s:Int64 x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/KEEP_BLANK_LINES_IN_DECLARATIONS/@EntryValue">1</s:Int64>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_CONSTRUCTOR_INITIALIZER_ON_SAME_LINE/@EntryValue">False</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_FIELD_ATTRIBUTE_ON_SAME_LINE/@EntryValue">False</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_ACCESSORHOLDER_ON_SINGLE_LINE/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_ACCESSOR_ATTRIBUTE_ON_SAME_LINE/@EntryValue">False</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_INITIALIZER_ON_SINGLE_LINE/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_WHILE_ON_NEW_LINE/@EntryValue">True</s:Boolean>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SIMPLE_EMBEDDED_STATEMENT_STYLE/@EntryValue">LINE_BREAK</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SPACE_AFTER_TYPECAST_PARENTHESES/@EntryValue">False</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SPACE_AROUND_ARROW_OP/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SPACE_AROUND_MULTIPLICATIVE_OP/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SPACE_BEFORE_SIZEOF_PARENTHESES/@EntryValue">False</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SPACE_BEFORE_TYPEOF_PARENTHESES/@EntryValue">False</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/STICK_COMMENT/@EntryValue">False</s:Boolean>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_ARGUMENTS_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_ARRAY_INITIALIZER_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_EXTENDS_LIST_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_LINES/@EntryValue">False</s:Boolean>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_PARAMETERS_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForBuiltInTypes/@EntryValue">UseVarWhenEvident</s:String>
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForOtherTypes/@EntryValue">UseVarWhenEvident</s:String>
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForSimpleTypes/@EntryValue">UseVarWhenEvident</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateInstanceFields/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="_" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateStaticFields/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateStaticReadonly/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=StaticReadonly/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FCONSTANT/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FFUNCTION/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FVARIABLE/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FCLASS/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FCONSTRUCTOR/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FFUNCTION/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FGLOBAL_005FVARIABLE/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FLABEL/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FLOCAL_005FCONSTRUCTOR/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FLOCAL_005FVARIABLE/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FOBJECT_005FPROPERTY_005FOF_005FFUNCTION/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FPARAMETER/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FCLASS/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FENUM/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FENUM_005FMEMBER/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FINTERFACE/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="I" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FMODULE/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FMODULE_005FEXPORTED/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FMODULE_005FLOCAL/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPRIVATE_005FMEMBER_005FACCESSOR/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPRIVATE_005FSTATIC_005FTYPE_005FFIELD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPRIVATE_005FTYPE_005FFIELD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPRIVATE_005FTYPE_005FMETHOD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPROTECTED_005FMEMBER_005FACCESSOR/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPROTECTED_005FSTATIC_005FTYPE_005FFIELD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPROTECTED_005FTYPE_005FFIELD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPROTECTED_005FTYPE_005FMETHOD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPUBLIC_005FMEMBER_005FACCESSOR/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPUBLIC_005FSTATIC_005FTYPE_005FFIELD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPUBLIC_005FTYPE_005FFIELD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPUBLIC_005FTYPE_005FMETHOD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FTYPE_005FPARAMETER/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="T" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/WebNaming/UserRules/=ASP_005FFIELD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/WebNaming/UserRules/=ASP_005FHTML_005FCONTROL/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/WebNaming/UserRules/=ASP_005FTAG_005FNAME/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/WebNaming/UserRules/=ASP_005FTAG_005FPREFIX/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/XamlNaming/UserRules/=NAMESPACE_005FALIAS/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/XamlNaming/UserRules/=XAML_005FFIELD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/XamlNaming/UserRules/=XAML_005FRESOURCE/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EAddAccessorOwnerDeclarationBracesMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateBlankLinesAroundFieldToBlankLinesAroundProperty/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateThisQualifierSettings/@EntryIndexedValue">True</s:Boolean></wpf:ResourceDictionary>

131
USAGE.md Normal file
View File

@@ -0,0 +1,131 @@
# SharpCompress Usage
## Stream Rules
When dealing with Streams, the rule should be that you don't close a stream you didn't create. This, in effect, should mean you should always put a Stream in a using block to dispose it.
However, the .NET Framework often has classes that will dispose streams by default to make things "easy" like the following:
```C#
using (var reader = new StreamReader(File.Open("foo")))
{
...
}
```
In this example, reader should get disposed. However, stream rules should say the the `FileStream` created by `File.Open` should remain open. However, the .NET Framework closes it for you by default unless you override the constructor. In general, you should be writing Stream code like this:
```C#
using (var fileStream = File.Open("foo"))
using (var reader = new StreamReader(fileStream))
{
...
}
```
To deal with the "correct" rules as well as the expectations of users, I've decided on this:
* When writing, leave streams open.
* When reading, close streams
To be explicit though, consider always using the overloads that use `ReaderOptions` or `WriterOptions` and explicitly set `LeaveStreamOpen` the way you want.
## Samples
Also, look over the tests for more thorough [examples](https://github.com/adamhathcock/sharpcompress/tree/master/test/SharpCompress.Test)
### Create Zip Archive from all files in a directory to a file
```C#
using (var archive = ZipArchive.Create())
{
archive.AddAllFromDirectory("D:\\temp");
archive.SaveTo("C:\\temp.zip", CompressionType.Deflate);
}
```
### Create Zip Archive from all files in a directory and save in memory
```C#
var memoryStream = new MemoryStream();
using (var archive = ZipArchive.Create())
{
archive.AddAllFromDirectory("D:\\temp");
archive.SaveTo(memoryStream, new WriterOptions(CompressionType.Deflate)
{
LeaveStreamOpen = true
});
}
//reset memoryStream to be usable now
memoryStream.Position = 0;
```
### Extract all files from a Rar file to a directory using RarArchive
```C#
using (var archive = RarArchive.Open("Test.rar"))
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory("D:\\temp", new ExtractionOptions()
{
ExtractFullPath = true,
Overwrite = true
});
}
}
```
### Use ReaderFactory to autodetect archive type and Open the entry stream
```C#
using (Stream stream = File.OpenRead("Tar.tar.bz2"))
using (var reader = ReaderFactory.Open(stream))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Console.WriteLine(reader.Entry.Key);
reader.WriteEntryToDirectory(@"C:\temp", new ExtractionOptions()
{
ExtractFullPath = true,
Overwrite = true
});
}
}
}
```
### Use ReaderFactory to autodetect archive type and Open the entry stream
```C#
using (Stream stream = File.OpenRead("Tar.tar.bz2"))
using (var reader = ReaderFactory.Open(stream))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
using (var entryStream = reader.OpenEntryStream())
{
entryStream.CopyTo(...);
}
}
}
}
```
### Use WriterFactory to write all files from a directory in a streaming manner.
```C#
using (Stream stream = File.OpenWrite("C:\\temp.tgz"))
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, new WriterOptions(CompressionType.GZip)
{
LeaveOpenStream = true
}))
{
writer.WriteAll("D:\\temp", "*", SearchOption.AllDirectories);
}
```

17
appveyor.yml Normal file
View File

@@ -0,0 +1,17 @@
version: '0.13.{build}'
init:
- git config --global core.autocrlf true
build_script:
- ps: .\build.ps1
test: off
cache:
- tools -> build.cake
- tools -> build.ps1
artifacts:
- path: nupkgs\*.nupkg
name: NuPkgs

229
build.cake Normal file
View File

@@ -0,0 +1,229 @@
#addin "Cake.Json"
#addin "nuget:?package=NuGet.Core"
using NuGet;
//////////////////////////////////////////////////////////////////////
// ARGUMENTS
//////////////////////////////////////////////////////////////////////
var target = Argument("target", "Default");
var apiKey = Argument("apiKey", "");
var repo = Argument("repo", "");
//////////////////////////////////////////////////////////////////////
// PREPARATION
//////////////////////////////////////////////////////////////////////
var sources = new [] { "https://api.nuget.org/v3/index.json" };
var publishTarget = "";
Warning("=============");
var globalPath = MakeFullPath("global.json");
var nupkgs = MakeFullPath("nupkgs");
Warning("Operating on global.json: " + globalPath);
Warning("=============");
//////////////////////////////////////////////////////////////////////
// FUNCTIONS
//////////////////////////////////////////////////////////////////////
string MakeFullPath(string relativePath)
{
if (string.IsNullOrEmpty(repo))
{
return MakeAbsolute(new DirectoryPath(relativePath)).ToString();
}
if (!System.IO.Path.IsPathRooted(repo))
{
return MakeAbsolute(new DirectoryPath(System.IO.Path.Combine(repo,relativePath))).ToString();
}
return System.IO.Path.Combine(repo, relativePath);
}
IEnumerable<string> GetAllProjects()
{
var global = DeserializeJsonFromFile<JObject>(globalPath);
var projs = global["projects"].Select(x => x.ToString());
foreach(var y in projs)
{
yield return MakeFullPath(y);
}
}
IEnumerable<string> GetSourceProjects()
{
return GetAllProjects().Where(x => x.EndsWith("src"));
}
IEnumerable<string> GetTestProjects()
{
return GetAllProjects().Where(x => x.EndsWith("test"));
}
IEnumerable<string> GetFrameworks(string path)
{
var projectJObject = DeserializeJsonFromFile<JObject>(path);
foreach(var prop in ((JObject)projectJObject["frameworks"]).Properties())
{
yield return prop.Name;
}
}
string GetVersion(string path)
{
var projectJObject = DeserializeJsonFromFile<JObject>(path);
return ((JToken)projectJObject["version"]).ToString();
}
IEnumerable<string> GetProjectJsons(IEnumerable<string> projects)
{
foreach(var proj in projects)
{
foreach(var projectJson in GetFiles(proj + "/**/project.json"))
{
yield return MakeFullPath(projectJson.ToString());
}
}
}
bool IsNuGetPublished (FilePath file, string nugetSource)
{
var pkg = new ZipPackage(file.ToString());
var repo = PackageRepositoryFactory.Default.CreateRepository(nugetSource);
var packages = repo.FindPackagesById(pkg.Id);
var version = SemanticVersion.Parse(pkg.Version.ToString());
//Filter the list of packages that are not Release (Stable) versions
var exists = packages.Any (p => p.Version == version);
return exists;
}
//////////////////////////////////////////////////////////////////////
// TASKS
//////////////////////////////////////////////////////////////////////
Task("Restore")
.Does(() =>
{
var settings = new DotNetCoreRestoreSettings
{
Sources = sources,
NoCache = true
};
foreach(var project in GetProjectJsons(GetSourceProjects().Concat(GetTestProjects())))
{
DotNetCoreRestore(project, settings);
}
});
Task("Build")
.Does(() =>
{
var settings = new DotNetCoreBuildSettings
{
Configuration = "Release"
};
foreach(var project in GetProjectJsons(GetSourceProjects().Concat(GetTestProjects())))
{
foreach(var framework in GetFrameworks(project))
{
Information("Building: {0} on Framework: {1}", project, framework);
Information("========");
settings.Framework = framework;
DotNetCoreBuild(project, settings);
}
}
});
Task("Test")
.Does(() =>
{
var settings = new DotNetCoreTestSettings
{
Configuration = "Release",
Verbose = true
};
foreach(var project in GetProjectJsons(GetTestProjects()))
{
settings.Framework = GetFrameworks(project).First();
DotNetCoreTest(project.ToString(), settings);
}
}).ReportError(exception =>
{
Error(exception.ToString());
});
Task("Pack")
.Does(() =>
{
if (DirectoryExists(nupkgs))
{
DeleteDirectory(nupkgs, true);
}
CreateDirectory(nupkgs);
var settings = new DotNetCorePackSettings
{
Configuration = "Release",
OutputDirectory = nupkgs
};
foreach(var project in GetProjectJsons(GetSourceProjects()))
{
DotNetCorePack(project, settings);
}
});
Task("Publish")
.IsDependentOn("Restore")
.IsDependentOn("Build")
.IsDependentOn("Test")
.IsDependentOn("Pack")
.Does(() =>
{
var packages = GetFiles(nupkgs + "/*.nupkg");
foreach(var package in packages)
{
if (package.ToString().Contains("symbols"))
{
Warning("Skipping Symbols package " + package);
continue;
}
if (IsNuGetPublished(package, sources[1]))
{
throw new InvalidOperationException(package + " is already published.");
}
NuGetPush(package, new NuGetPushSettings{
ApiKey = apiKey,
Verbosity = NuGetVerbosity.Detailed,
Source = publishTarget
});
}
});
//////////////////////////////////////////////////////////////////////
// TASK TARGETS
//////////////////////////////////////////////////////////////////////
Task("Default")
.IsDependentOn("Restore")
.IsDependentOn("Build")
.IsDependentOn("Test")
.IsDependentOn("Pack");
//////////////////////////////////////////////////////////////////////
// EXECUTION
//////////////////////////////////////////////////////////////////////
RunTarget(target);

130
build.ps1 Normal file
View File

@@ -0,0 +1,130 @@
<#
.SYNOPSIS
This is a Powershell script to bootstrap a Cake build.
.DESCRIPTION
This Powershell script will download NuGet if missing, restore NuGet tools (including Cake)
and execute your Cake build script with the parameters you provide.
.PARAMETER Target
The build script target to run.
.PARAMETER Configuration
The build configuration to use.
.PARAMETER Verbosity
Specifies the amount of information to be displayed.
.PARAMETER WhatIf
Performs a dry run of the build script.
No tasks will be executed.
.PARAMETER ScriptArgs
Remaining arguments are added here.
.LINK
http://cakebuild.net
#>
[CmdletBinding()]
Param(
[string]$Script = "build.cake",
[string]$Target = "Default",
[ValidateSet("Release", "Debug")]
[string]$Configuration = "Release",
[ValidateSet("Quiet", "Minimal", "Normal", "Verbose", "Diagnostic")]
[string]$Verbosity = "Verbose",
[switch]$WhatIf,
[Parameter(Position=0,Mandatory=$false,ValueFromRemainingArguments=$true)]
[string[]]$ScriptArgs
)
$CakeVersion = "0.16.1"
$DotNetChannel = "preview";
$DotNetVersion = "1.0.0-preview2-003131";
$DotNetInstallerUri = "https://raw.githubusercontent.com/dotnet/cli/rel/1.0.0-preview2/scripts/obtain/dotnet-install.ps1";
$NugetUrl = "https://dist.nuget.org/win-x86-commandline/latest/nuget.exe"
# Make sure tools folder exists
$PSScriptRoot = Split-Path $MyInvocation.MyCommand.Path -Parent
$ToolPath = Join-Path $PSScriptRoot "tools"
if (!(Test-Path $ToolPath)) {
Write-Verbose "Creating tools directory..."
New-Item -Path $ToolPath -Type directory | out-null
}
###########################################################################
# INSTALL .NET CORE CLI
###########################################################################
Function Remove-PathVariable([string]$VariableToRemove)
{
$path = [Environment]::GetEnvironmentVariable("PATH", "User")
if ($path -ne $null)
{
$newItems = $path.Split(';', [StringSplitOptions]::RemoveEmptyEntries) | Where-Object { "$($_)" -inotlike $VariableToRemove }
[Environment]::SetEnvironmentVariable("PATH", [System.String]::Join(';', $newItems), "User")
}
$path = [Environment]::GetEnvironmentVariable("PATH", "Process")
if ($path -ne $null)
{
$newItems = $path.Split(';', [StringSplitOptions]::RemoveEmptyEntries) | Where-Object { "$($_)" -inotlike $VariableToRemove }
[Environment]::SetEnvironmentVariable("PATH", [System.String]::Join(';', $newItems), "Process")
}
}
# Get .NET Core CLI path if installed.
$FoundDotNetCliVersion = $null;
if (Get-Command dotnet -ErrorAction SilentlyContinue) {
$FoundDotNetCliVersion = dotnet --version;
}
if($FoundDotNetCliVersion -ne $DotNetVersion) {
$InstallPath = Join-Path $PSScriptRoot ".dotnet"
if (!(Test-Path $InstallPath)) {
mkdir -Force $InstallPath | Out-Null;
}
(New-Object System.Net.WebClient).DownloadFile($DotNetInstallerUri, "$InstallPath\dotnet-install.ps1");
& $InstallPath\dotnet-install.ps1 -Channel $DotNetChannel -Version $DotNetVersion -InstallDir $InstallPath;
Remove-PathVariable "$InstallPath"
$env:PATH = "$InstallPath;$env:PATH"
$env:DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
$env:DOTNET_CLI_TELEMETRY_OPTOUT=1
}
###########################################################################
# INSTALL NUGET
###########################################################################
# Make sure nuget.exe exists.
$NugetPath = Join-Path $ToolPath "nuget.exe"
if (!(Test-Path $NugetPath)) {
Write-Host "Downloading NuGet.exe..."
(New-Object System.Net.WebClient).DownloadFile($NugetUrl, $NugetPath);
}
###########################################################################
# INSTALL CAKE
###########################################################################
# Make sure Cake has been installed.
$CakePath = Join-Path $ToolPath "Cake.$CakeVersion/Cake.exe"
if (!(Test-Path $CakePath)) {
Write-Host "Installing Cake..."
Invoke-Expression "&`"$NugetPath`" install Cake -Version $CakeVersion -OutputDirectory `"$ToolPath`"" | Out-Null;
if ($LASTEXITCODE -ne 0) {
Throw "An error occured while restoring Cake from NuGet."
}
}
###########################################################################
# RUN BUILD SCRIPT
###########################################################################
# Build the argument list.
$Arguments = @{
target=$Target;
configuration=$Configuration;
verbosity=$Verbosity;
dryrun=$WhatIf;
}.GetEnumerator() | %{"--{0}=`"{1}`"" -f $_.key, $_.value };
# Start Cake
Write-Host "Running build script..."
Invoke-Expression "& `"$CakePath`" `"$Script`" $Arguments $ScriptArgs"
exit $LASTEXITCODE

View File

@@ -1,111 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Archive.GZip
{
internal class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArchiveEntry
{
private readonly string path;
private readonly long size;
private readonly DateTime? lastModified;
private readonly bool closeStream;
private readonly Stream stream;
internal GZipWritableArchiveEntry(GZipArchive archive, Stream stream,
string path, long size, DateTime? lastModified, bool closeStream)
: base(archive, null)
{
this.stream = stream;
this.path = path;
this.size = size;
this.lastModified = lastModified;
this.closeStream = closeStream;
}
public override long Crc
{
get { return 0; }
}
public override string Key
{
get { return path; }
}
public override long CompressedSize
{
get { return 0; }
}
public override long Size
{
get { return size; }
}
public override DateTime? LastModifiedTime
{
get { return lastModified; }
}
public override DateTime? CreatedTime
{
get { return null; }
}
public override DateTime? LastAccessedTime
{
get { return null; }
}
public override DateTime? ArchivedTime
{
get { return null; }
}
public override bool IsEncrypted
{
get { return false; }
}
public override bool IsDirectory
{
get { return false; }
}
public override bool IsSplit
{
get { return false; }
}
internal override IEnumerable<FilePart> Parts
{
get { throw new NotImplementedException(); }
}
Stream IWritableArchiveEntry.Stream
{
get
{
return stream;
}
}
public override Stream OpenEntryStream()
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new NonDisposingStream(stream);
}
internal override void Close()
{
if (closeStream)
{
stream.Dispose();
}
}
}
}

View File

@@ -1,110 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Archive.Tar
{
internal class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiveEntry
{
private readonly string path;
private readonly long size;
private readonly DateTime? lastModified;
private readonly bool closeStream;
private readonly Stream stream;
internal TarWritableArchiveEntry(TarArchive archive, Stream stream, CompressionType compressionType,
string path, long size, DateTime? lastModified, bool closeStream)
: base(archive, null, compressionType)
{
this.stream = stream;
this.path = path;
this.size = size;
this.lastModified = lastModified;
this.closeStream = closeStream;
}
public override long Crc
{
get { return 0; }
}
public override string Key
{
get { return path; }
}
public override long CompressedSize
{
get { return 0; }
}
public override long Size
{
get { return size; }
}
public override DateTime? LastModifiedTime
{
get { return lastModified; }
}
public override DateTime? CreatedTime
{
get { return null; }
}
public override DateTime? LastAccessedTime
{
get { return null; }
}
public override DateTime? ArchivedTime
{
get { return null; }
}
public override bool IsEncrypted
{
get { return false; }
}
public override bool IsDirectory
{
get { return false; }
}
public override bool IsSplit
{
get { return false; }
}
internal override IEnumerable<FilePart> Parts
{
get { throw new NotImplementedException(); }
}
Stream IWritableArchiveEntry.Stream
{
get
{
return stream;
}
}
public override Stream OpenEntryStream()
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new NonDisposingStream(stream);
}
internal override void Close()
{
if (closeStream)
{
stream.Dispose();
}
}
}
}

View File

@@ -1,113 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Archive.Zip
{
internal class ZipWritableArchiveEntry : ZipArchiveEntry, IWritableArchiveEntry
{
private readonly string path;
private readonly long size;
private readonly DateTime? lastModified;
private readonly bool closeStream;
private readonly Stream stream;
private bool isDisposed;
internal ZipWritableArchiveEntry(ZipArchive archive, Stream stream, string path, long size,
DateTime? lastModified, bool closeStream)
: base(archive, null)
{
this.stream = stream;
this.path = path;
this.size = size;
this.lastModified = lastModified;
this.closeStream = closeStream;
}
public override long Crc
{
get { return 0; }
}
public override string Key
{
get { return path; }
}
public override long CompressedSize
{
get { return 0; }
}
public override long Size
{
get { return size; }
}
public override DateTime? LastModifiedTime
{
get { return lastModified; }
}
public override DateTime? CreatedTime
{
get { return null; }
}
public override DateTime? LastAccessedTime
{
get { return null; }
}
public override DateTime? ArchivedTime
{
get { return null; }
}
public override bool IsEncrypted
{
get { return false; }
}
public override bool IsDirectory
{
get { return false; }
}
public override bool IsSplit
{
get { return false; }
}
internal override IEnumerable<FilePart> Parts
{
get { throw new NotImplementedException(); }
}
Stream IWritableArchiveEntry.Stream
{
get
{
return stream;
}
}
public override Stream OpenEntryStream()
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new NonDisposingStream(stream);
}
internal override void Close()
{
if (closeStream && !isDisposed)
{
stream.Dispose();
isDisposed = true;
}
}
}
}

View File

@@ -3,9 +3,9 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.Reader;
using SharpCompress.Readers;
namespace SharpCompress.Archive
namespace SharpCompress.Archives
{
public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtractionListener
where TEntry : IArchiveEntry
@@ -20,31 +20,33 @@ namespace SharpCompress.Archive
public event EventHandler<CompressedBytesReadEventArgs> CompressedBytesRead;
public event EventHandler<FilePartExtractionBeginEventArgs> FilePartExtractionBegin;
protected string Password { get; private set; }
protected ReaderOptions ReaderOptions { get; }
private bool disposed;
#if !NO_FILE
internal AbstractArchive(ArchiveType type, FileInfo fileInfo, Options options, string password)
internal AbstractArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerOptions)
{
Type = type;
Password = password;
if (!fileInfo.Exists)
{
throw new ArgumentException("File does not exist: " + fileInfo.FullName);
}
options = (Options) FlagUtility.SetFlag(options, Options.KeepStreamsOpen, false);
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(fileInfo, options));
ReaderOptions = readerOptions;
readerOptions.LeaveStreamOpen = false;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(fileInfo));
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
}
protected abstract IEnumerable<TVolume> LoadVolumes(FileInfo file, Options options);
protected abstract IEnumerable<TVolume> LoadVolumes(FileInfo file);
#endif
internal AbstractArchive(ArchiveType type, IEnumerable<Stream> streams, Options options, string password)
internal AbstractArchive(ArchiveType type, IEnumerable<Stream> streams, ReaderOptions readerOptions)
{
Type = type;
Password = password;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(streams.Select(CheckStreams), options));
ReaderOptions = readerOptions;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(streams.Select(CheckStreams)));
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
}
@@ -54,7 +56,8 @@ namespace SharpCompress.Archive
lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
}
public ArchiveType Type { get; private set; }
public ArchiveType Type { get; }
void IArchiveExtractionListener.FireEntryExtractionBegin(IArchiveEntry entry)
{
@@ -84,49 +87,29 @@ namespace SharpCompress.Archive
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveEntries across the one or many parts of the RarArchive.
/// </summary>
public virtual ICollection<TEntry> Entries
{
get { return lazyEntries; }
}
public virtual ICollection<TEntry> Entries { get { return lazyEntries; } }
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveVolumes across the one or many parts of the RarArchive.
/// </summary>
public ICollection<TVolume> Volumes
{
get { return lazyVolumes; }
}
public ICollection<TVolume> Volumes { get { return lazyVolumes; } }
/// <summary>
/// The total size of the files compressed in the archive.
/// </summary>
public virtual long TotalSize
{
get { return Entries.Aggregate(0L, (total, cf) => total + cf.CompressedSize); }
}
public virtual long TotalSize { get { return Entries.Aggregate(0L, (total, cf) => total + cf.CompressedSize); } }
/// <summary>
/// The total size of the files as uncompressed in the archive.
/// </summary>
public virtual long TotalUncompressSize
{
get { return Entries.Aggregate(0L, (total, cf) => total + cf.Size); }
}
public virtual long TotalUncompressSize { get { return Entries.Aggregate(0L, (total, cf) => total + cf.Size); } }
protected abstract IEnumerable<TVolume> LoadVolumes(IEnumerable<Stream> streams, Options options);
protected abstract IEnumerable<TVolume> LoadVolumes(IEnumerable<Stream> streams);
protected abstract IEnumerable<TEntry> LoadEntries(IEnumerable<TVolume> volumes);
IEnumerable<IArchiveEntry> IArchive.Entries
{
get { return Entries.Cast<IArchiveEntry>(); }
}
IEnumerable<IArchiveEntry> IArchive.Entries { get { return Entries.Cast<IArchiveEntry>(); } }
IEnumerable<IVolume> IArchive.Volumes
{
get { return lazyVolumes.Cast<IVolume>(); }
}
private bool disposed;
IEnumerable<IVolume> IArchive.Volumes { get { return lazyVolumes.Cast<IVolume>(); } }
public virtual void Dispose()
{
@@ -148,11 +131,11 @@ namespace SharpCompress.Archive
{
if (CompressedBytesRead != null)
{
CompressedBytesRead(this, new CompressedBytesReadEventArgs()
{
CurrentFilePartCompressedBytesRead = currentPartCompressedBytes,
CompressedBytesRead = compressedReadBytes
});
CompressedBytesRead(this, new CompressedBytesReadEventArgs
{
CurrentFilePartCompressedBytesRead = currentPartCompressedBytes,
CompressedBytesRead = compressedReadBytes
});
}
}
@@ -160,12 +143,12 @@ namespace SharpCompress.Archive
{
if (FilePartExtractionBegin != null)
{
FilePartExtractionBegin(this, new FilePartExtractionBeginEventArgs()
{
CompressedSize = compressedSize,
Size = size,
Name = name,
});
FilePartExtractionBegin(this, new FilePartExtractionBeginEventArgs
{
CompressedSize = compressedSize,
Size = size,
Name = name
});
}
}
@@ -191,11 +174,7 @@ namespace SharpCompress.Archive
/// <summary>
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
/// </summary>
public virtual bool IsSolid
{
get { return false; }
}
public virtual bool IsSolid { get { return false; } }
/// <summary>
/// The archive can find all the parts of the archive needed to fully extract the archive. This forces the parsing of the entire archive.

View File

@@ -3,8 +3,10 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.Readers;
using SharpCompress.Writers;
namespace SharpCompress.Archive
namespace SharpCompress.Archives
{
public abstract class AbstractWritableArchive<TEntry, TVolume> : AbstractArchive<TEntry, TVolume>, IWritableArchive
where TEntry : IArchiveEntry
@@ -21,14 +23,14 @@ namespace SharpCompress.Archive
{
}
internal AbstractWritableArchive(ArchiveType type, Stream stream, Options options)
: base(type, stream.AsEnumerable(), options, null)
internal AbstractWritableArchive(ArchiveType type, Stream stream, ReaderOptions readerFactoryOptions)
: base(type, stream.AsEnumerable(), readerFactoryOptions)
{
}
#if !NO_FILE
internal AbstractWritableArchive(ArchiveType type, FileInfo fileInfo, Options options)
: base(type, fileInfo, options, null)
internal AbstractWritableArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerFactoryOptions)
: base(type, fileInfo, readerFactoryOptions)
{
}
#endif
@@ -53,10 +55,7 @@ namespace SharpCompress.Archive
modifiedEntries.AddRange(OldEntries.Concat(newEntries));
}
private IEnumerable<TEntry> OldEntries
{
get { return base.Entries.Where(x => !removedEntries.Contains(x)); }
}
private IEnumerable<TEntry> OldEntries { get { return base.Entries.Where(x => !removedEntries.Contains(x)); } }
public void RemoveEntry(TEntry entry)
{
@@ -66,25 +65,25 @@ namespace SharpCompress.Archive
RebuildModifiedCollection();
}
}
void IWritableArchive.RemoveEntry(IArchiveEntry entry)
{
RemoveEntry((TEntry)entry);
}
public TEntry AddEntry(string key, Stream source,
long size = 0, DateTime? modified = null)
long size = 0, DateTime? modified = null)
{
return AddEntry(key, source, false, size, modified);
}
IArchiveEntry IWritableArchive.AddEntry(string key, Stream source, bool closeStream, long size, DateTime? modified)
{
return AddEntry(key, source, closeStream, size, modified);
}
public TEntry AddEntry(string key, Stream source, bool closeStream,
long size = 0, DateTime? modified = null)
long size = 0, DateTime? modified = null)
{
if (key.StartsWith("/")
|| key.StartsWith("\\"))
@@ -105,7 +104,7 @@ namespace SharpCompress.Archive
{
foreach (var path in Entries.Select(x => x.Key))
{
var p = path.Replace('/','\\');
var p = path.Replace('/', '\\');
if (p.StartsWith("\\"))
{
p = p.Substring(1);
@@ -115,15 +114,15 @@ namespace SharpCompress.Archive
return false;
}
public void SaveTo(Stream stream, CompressionInfo compressionType)
public void SaveTo(Stream stream, WriterOptions options)
{
//reset streams of new entries
newEntries.Cast<IWritableArchiveEntry>().ForEach(x => x.Stream.Seek(0, SeekOrigin.Begin));
SaveTo(stream, compressionType, OldEntries, newEntries);
SaveTo(stream, options, OldEntries, newEntries);
}
protected TEntry CreateEntry(string key, Stream source, long size, DateTime? modified,
bool closeStream)
bool closeStream)
{
if (!source.CanRead || !source.CanSeek)
{
@@ -133,10 +132,9 @@ namespace SharpCompress.Archive
}
protected abstract TEntry CreateEntryInternal(string key, Stream source, long size, DateTime? modified,
bool closeStream);
bool closeStream);
protected abstract void SaveTo(Stream stream, CompressionInfo compressionType,
IEnumerable<TEntry> oldEntries, IEnumerable<TEntry> newEntries);
protected abstract void SaveTo(Stream stream, WriterOptions options, IEnumerable<TEntry> oldEntries, IEnumerable<TEntry> newEntries);
public override void Dispose()
{

View File

@@ -1,13 +1,14 @@
using System;
using System.IO;
using SharpCompress.Archive.GZip;
using SharpCompress.Archive.Rar;
using SharpCompress.Archive.SevenZip;
using SharpCompress.Archive.Tar;
using SharpCompress.Archive.Zip;
using SharpCompress.Archives.GZip;
using SharpCompress.Archives.Rar;
using SharpCompress.Archives.SevenZip;
using SharpCompress.Archives.Tar;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
using SharpCompress.Readers;
namespace SharpCompress.Archive
namespace SharpCompress.Archives
{
public class ArchiveFactory
{
@@ -15,44 +16,44 @@ namespace SharpCompress.Archive
/// Opens an Archive for random access
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
/// <param name="readerOptions"></param>
/// <returns></returns>
public static IArchive Open(Stream stream, Options options = Options.KeepStreamsOpen)
public static IArchive Open(Stream stream, ReaderOptions readerOptions = null)
{
stream.CheckNotNull("stream");
if (!stream.CanRead || !stream.CanSeek)
{
throw new ArgumentException("Stream should be readable and seekable");
}
readerOptions = readerOptions ?? new ReaderOptions();
if (ZipArchive.IsZipFile(stream, null))
{
stream.Seek(0, SeekOrigin.Begin);
return ZipArchive.Open(stream, options, null);
return ZipArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
if (SevenZipArchive.IsSevenZipFile(stream))
{
stream.Seek(0, SeekOrigin.Begin);
return SevenZipArchive.Open(stream, options);
return SevenZipArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
if (GZipArchive.IsGZipFile(stream))
{
stream.Seek(0, SeekOrigin.Begin);
return GZipArchive.Open(stream, options);
return GZipArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
if (RarArchive.IsRarFile(stream, options))
if (RarArchive.IsRarFile(stream, readerOptions))
{
stream.Seek(0, SeekOrigin.Begin);
return RarArchive.Open(stream, options);
stream.Seek(0, SeekOrigin.Begin);
return RarArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
if (TarArchive.IsTarFile(stream))
{
stream.Seek(0, SeekOrigin.Begin);
return TarArchive.Open(stream, options);
return TarArchive.Open(stream, readerOptions);
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
}
@@ -62,52 +63,35 @@ namespace SharpCompress.Archive
switch (type)
{
case ArchiveType.Zip:
{
return ZipArchive.Create();
}
{
return ZipArchive.Create();
}
case ArchiveType.Tar:
{
return TarArchive.Create();
}
{
return TarArchive.Create();
}
case ArchiveType.GZip:
{
return GZipArchive.Create();
}
{
return GZipArchive.Create();
}
default:
{
throw new NotSupportedException("Cannot create Archives of type: " + type);
}
{
throw new NotSupportedException("Cannot create Archives of type: " + type);
}
}
}
#if !NO_FILE
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
public static IArchive Open(string filePath)
{
return Open(filePath, Options.None);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
public static IArchive Open(FileInfo fileInfo)
{
return Open(fileInfo, Options.None);
}
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
public static IArchive Open(string filePath, Options options)
public static IArchive Open(string filePath, ReaderOptions options = null)
{
filePath.CheckNotNullOrEmpty("filePath");
return Open(new FileInfo(filePath), options);
return Open(new FileInfo(filePath), options ?? new ReaderOptions());
}
/// <summary>
@@ -115,15 +99,16 @@ namespace SharpCompress.Archive
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
public static IArchive Open(FileInfo fileInfo, Options options)
public static IArchive Open(FileInfo fileInfo, ReaderOptions options = null)
{
fileInfo.CheckNotNull("fileInfo");
options = options ?? new ReaderOptions();
using (var stream = fileInfo.OpenRead())
{
if (ZipArchive.IsZipFile(stream, null))
{
stream.Dispose();
return ZipArchive.Open(fileInfo, options, null);
return ZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (SevenZipArchive.IsSevenZipFile(stream))
@@ -157,7 +142,7 @@ namespace SharpCompress.Archive
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(string sourceArchive, string destinationDirectory,
ExtractOptions options = ExtractOptions.Overwrite)
ExtractionOptions options = null)
{
using (IArchive archive = Open(sourceArchive))
{

View File

@@ -4,11 +4,12 @@ using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.Common.GZip;
using SharpCompress.Reader;
using SharpCompress.Reader.GZip;
using SharpCompress.Writer.GZip;
using SharpCompress.Readers;
using SharpCompress.Readers.GZip;
using SharpCompress.Writers;
using SharpCompress.Writers.GZip;
namespace SharpCompress.Archive.GZip
namespace SharpCompress.Archives.GZip
{
public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
{
@@ -17,62 +18,33 @@ namespace SharpCompress.Archive.GZip
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
public static GZipArchive Open(string filePath)
{
return Open(filePath, Options.None);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
public static GZipArchive Open(FileInfo fileInfo)
{
return Open(fileInfo, Options.None);
}
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
public static GZipArchive Open(string filePath, Options options)
/// <param name="readerOptions"></param>
public static GZipArchive Open(string filePath, ReaderOptions readerOptions = null)
{
filePath.CheckNotNullOrEmpty("filePath");
return Open(new FileInfo(filePath), options);
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
public static GZipArchive Open(FileInfo fileInfo, Options options)
/// <param name="readerOptions"></param>
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
{
fileInfo.CheckNotNull("fileInfo");
return new GZipArchive(fileInfo, options);
return new GZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
#endif
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
public static GZipArchive Open(Stream stream)
/// <param name="readerOptions"></param>
public static GZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
{
stream.CheckNotNull("stream");
return Open(stream, Options.None);
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
public static GZipArchive Open(Stream stream, Options options)
{
stream.CheckNotNull("stream");
return new GZipArchive(stream, options);
return new GZipArchive(stream, readerOptions ?? new ReaderOptions());
}
public static GZipArchive Create()
@@ -81,19 +53,20 @@ namespace SharpCompress.Archive.GZip
}
#if !NO_FILE
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
internal GZipArchive(FileInfo fileInfo, Options options)
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
internal GZipArchive(FileInfo fileInfo, ReaderOptions options)
: base(ArchiveType.GZip, fileInfo, options)
{
}
protected override IEnumerable<GZipVolume> LoadVolumes(FileInfo file, Options options)
protected override IEnumerable<GZipVolume> LoadVolumes(FileInfo file)
{
return new GZipVolume(file, options).AsEnumerable();
return new GZipVolume(file, ReaderOptions).AsEnumerable();
}
public static bool IsGZipFile(string filePath)
@@ -122,7 +95,7 @@ namespace SharpCompress.Archive.GZip
{
using (var stream = fileInfo.Open(FileMode.Create, FileAccess.Write))
{
SaveTo(stream);
SaveTo(stream, new WriterOptions(CompressionType.GZip));
}
}
#endif
@@ -135,13 +108,19 @@ namespace SharpCompress.Archive.GZip
// workitem 8501: handle edge case (decompress empty stream)
if (n == 0)
{
return false;
}
if (n != 10)
{
return false;
}
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
return false;
}
return true;
}
@@ -151,7 +130,7 @@ namespace SharpCompress.Archive.GZip
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
internal GZipArchive(Stream stream, Options options)
internal GZipArchive(Stream stream, ReaderOptions options)
: base(ArchiveType.GZip, stream, options)
{
}
@@ -161,13 +140,8 @@ namespace SharpCompress.Archive.GZip
{
}
public void SaveTo(Stream stream)
{
SaveTo(stream, CompressionType.GZip);
}
protected override GZipArchiveEntry CreateEntryInternal(string filePath, Stream source, long size, DateTime? modified,
bool closeStream)
bool closeStream)
{
if (Entries.Any())
{
@@ -176,7 +150,7 @@ namespace SharpCompress.Archive.GZip
return new GZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
}
protected override void SaveTo(Stream stream, CompressionInfo compressionInfo,
protected override void SaveTo(Stream stream, WriterOptions options,
IEnumerable<GZipArchiveEntry> oldEntries,
IEnumerable<GZipArchiveEntry> newEntries)
{
@@ -197,9 +171,9 @@ namespace SharpCompress.Archive.GZip
}
}
protected override IEnumerable<GZipVolume> LoadVolumes(IEnumerable<Stream> streams, Options options)
protected override IEnumerable<GZipVolume> LoadVolumes(IEnumerable<Stream> streams)
{
return new GZipVolume(streams.First(), options).AsEnumerable();
return new GZipVolume(streams.First(), ReaderOptions).AsEnumerable();
}
protected override IEnumerable<GZipArchiveEntry> LoadEntries(IEnumerable<GZipVolume> volumes)

View File

@@ -2,11 +2,10 @@
using System.Linq;
using SharpCompress.Common.GZip;
namespace SharpCompress.Archive.GZip
namespace SharpCompress.Archives.GZip
{
public class GZipArchiveEntry : GZipEntry, IArchiveEntry
{
internal GZipArchiveEntry(GZipArchive archive, GZipFilePart part)
: base(part)
{
@@ -19,12 +18,10 @@ namespace SharpCompress.Archive.GZip
}
#region IArchiveEntry Members
public IArchive Archive { get; private set; }
public bool IsComplete
{
get { return true; }
}
public IArchive Archive { get; }
public bool IsComplete { get { return true; } }
#endregion
}

View File

@@ -0,0 +1,66 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Archives.GZip
{
internal class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArchiveEntry
{
private readonly bool closeStream;
private readonly Stream stream;
internal GZipWritableArchiveEntry(GZipArchive archive, Stream stream,
string path, long size, DateTime? lastModified, bool closeStream)
: base(archive, null)
{
this.stream = stream;
Key = path;
Size = size;
LastModifiedTime = lastModified;
this.closeStream = closeStream;
}
public override long Crc { get { return 0; } }
public override string Key { get; }
public override long CompressedSize { get { return 0; } }
public override long Size { get; }
public override DateTime? LastModifiedTime { get; }
public override DateTime? CreatedTime { get { return null; } }
public override DateTime? LastAccessedTime { get { return null; } }
public override DateTime? ArchivedTime { get { return null; } }
public override bool IsEncrypted { get { return false; } }
public override bool IsDirectory { get { return false; } }
public override bool IsSplit { get { return false; } }
internal override IEnumerable<FilePart> Parts { get { throw new NotImplementedException(); } }
Stream IWritableArchiveEntry.Stream { get { return stream; } }
public override Stream OpenEntryStream()
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new NonDisposingStream(stream);
}
internal override void Close()
{
if (closeStream)
{
stream.Dispose();
}
}
}
}

View File

@@ -1,9 +1,9 @@
using System;
using System.Collections.Generic;
using SharpCompress.Common;
using SharpCompress.Reader;
using SharpCompress.Readers;
namespace SharpCompress.Archive
namespace SharpCompress.Archives
{
public interface IArchive : IDisposable
{

View File

@@ -1,7 +1,7 @@
using System.IO;
using SharpCompress.Common;
namespace SharpCompress.Archive
namespace SharpCompress.Archives
{
public interface IArchiveEntry : IEntry
{

View File

@@ -1,8 +1,9 @@
using System.IO;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archive
namespace SharpCompress.Archives
{
public static class IArchiveEntryExtensions
{
@@ -28,25 +29,33 @@ namespace SharpCompress.Archive
return;
}
using (entryStream)
using (Stream s = new ListeningStream(streamListener, entryStream))
{
s.TransferTo(streamToWriteTo);
using (Stream s = new ListeningStream(streamListener, entryStream))
{
s.TransferTo(streamToWriteTo);
}
}
streamListener.FireEntryExtractionEnd(archiveEntry);
}
#if !NO_FILE
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(this IArchiveEntry entry, string destinationDirectory,
ExtractOptions options = ExtractOptions.Overwrite)
ExtractionOptions options = null)
{
string destinationFileName;
string file = Path.GetFileName(entry.Key);
options = options ?? new ExtractionOptions()
{
Overwrite = true
};
if (options.HasFlag(ExtractOptions.ExtractFullPath))
if (options.ExtractFullPath)
{
string folder = Path.GetDirectoryName(entry.Key);
string destdir = Path.Combine(destinationDirectory, folder);
@@ -70,11 +79,16 @@ namespace SharpCompress.Archive
/// Extract to specific file
/// </summary>
public static void WriteToFile(this IArchiveEntry entry, string destinationFileName,
ExtractOptions options = ExtractOptions.Overwrite)
ExtractionOptions options = null)
{
FileMode fm = FileMode.Create;
options = options ?? new ExtractionOptions()
{
Overwrite = true
};
if (!options.HasFlag(ExtractOptions.Overwrite))
if (!options.Overwrite)
{
fm = FileMode.CreateNew;
}

View File

@@ -1,16 +1,20 @@
using System.Linq;
using SharpCompress.Common;
#if !NO_FILE
using System.Linq;
using SharpCompress.Readers;
namespace SharpCompress.Archive
#endif
namespace SharpCompress.Archives
{
public static class IArchiveExtensions
{
#if !NO_FILE
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(this IArchive archive, string destinationDirectory,
ExtractOptions options = ExtractOptions.Overwrite)
ExtractionOptions options = null)
{
foreach (IArchiveEntry entry in archive.Entries.Where(x => !x.IsDirectory))
{

View File

@@ -1,6 +1,6 @@
using SharpCompress.Common;
namespace SharpCompress.Archive
namespace SharpCompress.Archives
{
internal interface IArchiveExtractionListener : IExtractionListener
{

View File

@@ -1,8 +1,8 @@
using System;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Writers;
namespace SharpCompress.Archive
namespace SharpCompress.Archives
{
public interface IWritableArchive : IArchive
{
@@ -10,6 +10,6 @@ namespace SharpCompress.Archive
IArchiveEntry AddEntry(string key, Stream source, bool closeStream, long size = 0, DateTime? modified = null);
void SaveTo(Stream stream, CompressionInfo compressionType);
void SaveTo(Stream stream, WriterOptions options);
}
}

View File

@@ -1,6 +1,6 @@
using System.IO;
namespace SharpCompress.Archive
namespace SharpCompress.Archives
{
internal interface IWritableArchiveEntry
{

View File

@@ -1,17 +1,13 @@
using System;
#if !NO_FILE
using System;
#endif
using System.IO;
using SharpCompress.Common;
using SharpCompress.Writers;
namespace SharpCompress.Archive
namespace SharpCompress.Archives
{
public static class IWritableArchiveExtensions
{
public static void SaveTo(this IWritableArchive writableArchive,
Stream stream, CompressionType compressionType)
{
writableArchive.SaveTo(stream, new CompressionInfo {Type = compressionType});
}
#if !NO_FILE
public static void AddEntry(this IWritableArchive writableArchive,
@@ -26,33 +22,16 @@ namespace SharpCompress.Archive
fileInfo.LastWriteTime);
}
public static void SaveTo(this IWritableArchive writableArchive,
string filePath, CompressionType compressionType)
public static void SaveTo(this IWritableArchive writableArchive, string filePath, WriterOptions options)
{
writableArchive.SaveTo(new FileInfo(filePath), new CompressionInfo {Type = compressionType});
writableArchive.SaveTo(new FileInfo(filePath), options);
}
public static void SaveTo(this IWritableArchive writableArchive,
FileInfo fileInfo, CompressionType compressionType)
public static void SaveTo(this IWritableArchive writableArchive, FileInfo fileInfo, WriterOptions options)
{
using (var stream = fileInfo.Open(FileMode.Create, FileAccess.Write))
{
writableArchive.SaveTo(stream, new CompressionInfo {Type = compressionType});
}
}
public static void SaveTo(this IWritableArchive writableArchive,
string filePath, CompressionInfo compressionInfo)
{
writableArchive.SaveTo(new FileInfo(filePath), compressionInfo);
}
public static void SaveTo(this IWritableArchive writableArchive,
FileInfo fileInfo, CompressionInfo compressionInfo)
{
using (var stream = fileInfo.Open(FileMode.Create, FileAccess.Write))
{
writableArchive.SaveTo(stream, compressionInfo);
writableArchive.SaveTo(stream, options);
}
}

View File

@@ -1,42 +1,40 @@
#if !NO_FILE

#if !NO_FILE
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archive.Rar
namespace SharpCompress.Archives.Rar
{
/// <summary>
/// A rar part based on a FileInfo object
/// </summary>
internal class FileInfoRarArchiveVolume : RarVolume
{
internal FileInfoRarArchiveVolume(FileInfo fileInfo, string password, Options options)
: base(StreamingMode.Seekable, fileInfo.OpenRead(), password, FixOptions(options))
internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options)
: base(StreamingMode.Seekable, fileInfo.OpenRead(), FixOptions(options))
{
FileInfo = fileInfo;
FileParts = base.GetVolumeFileParts().ToReadOnly();
FileParts = GetVolumeFileParts().ToReadOnly();
}
private static Options FixOptions(Options options)
private static ReaderOptions FixOptions(ReaderOptions options)
{
//make sure we're closing streams with fileinfo
if (options.HasFlag(Options.KeepStreamsOpen))
{
options = (Options) FlagUtility.SetFlag(options, Options.KeepStreamsOpen, false);
}
options.LeaveStreamOpen = false;
return options;
}
internal ReadOnlyCollection<RarFilePart> FileParts { get; private set; }
internal ReadOnlyCollection<RarFilePart> FileParts { get; }
internal FileInfo FileInfo { get; private set; }
internal FileInfo FileInfo { get; }
internal override RarFilePart CreateFilePart(FileHeader fileHeader, MarkHeader markHeader)
{
return new FileInfoRarFilePart(this, markHeader, fileHeader, FileInfo);
return new FileInfoRarFilePart(this, ReaderOptions.Password, markHeader, fileHeader, FileInfo);
}
internal override IEnumerable<RarFilePart> ReadFileParts()

View File

@@ -1,19 +1,19 @@
#if !NO_FILE

#if !NO_FILE
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Archive.Rar
namespace SharpCompress.Archives.Rar
{
internal class FileInfoRarFilePart : SeekableFilePart
{
internal FileInfoRarFilePart(FileInfoRarArchiveVolume volume, MarkHeader mh, FileHeader fh, FileInfo fi)
: base(mh, fh, volume.Stream, volume.Password)
internal FileInfoRarFilePart(FileInfoRarArchiveVolume volume, string password, MarkHeader mh, FileHeader fh, FileInfo fi)
: base(mh, fh, volume.Stream, password)
{
FileInfo = fi;
}
internal FileInfo FileInfo { get; private set; }
internal FileInfo FileInfo { get; }
internal override string FilePartName
{

View File

@@ -1,6 +1,6 @@
using System.Linq;
namespace SharpCompress.Archive.Rar
namespace SharpCompress.Archives.Rar
{
public static class RarArchiveExtensions
{

View File

@@ -1,41 +1,35 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Compressor.Rar;
using SharpCompress.Compressors.Rar;
using SharpCompress.IO;
using SharpCompress.Reader;
using SharpCompress.Reader.Rar;
using SharpCompress.Readers;
using SharpCompress.Readers.Rar;
namespace SharpCompress.Archive.Rar
namespace SharpCompress.Archives.Rar
{
public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
{
private readonly Unpack unpack = new Unpack();
internal Unpack Unpack
{
get { return unpack; }
}
internal Unpack Unpack { get; } = new Unpack();
#if !NO_FILE
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
/// <param name="password"></param>
internal RarArchive(FileInfo fileInfo, Options options, string password)
: base(ArchiveType.Rar, fileInfo, options, password)
internal RarArchive(FileInfo fileInfo, ReaderOptions options)
: base(ArchiveType.Rar, fileInfo, options)
{
}
protected override IEnumerable<RarVolume> LoadVolumes(FileInfo file, Options options)
protected override IEnumerable<RarVolume> LoadVolumes(FileInfo file)
{
return RarArchiveVolumeFactory.GetParts(file, Password, options);
return RarArchiveVolumeFactory.GetParts(file, ReaderOptions);
}
#endif
@@ -44,9 +38,8 @@ namespace SharpCompress.Archive.Rar
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
/// <param name="password"></param>
internal RarArchive(IEnumerable<Stream> streams, Options options, string password)
: base(ArchiveType.Rar, streams, options, password)
internal RarArchive(IEnumerable<Stream> streams, ReaderOptions options)
: base(ArchiveType.Rar, streams, options)
{
}
@@ -55,36 +48,33 @@ namespace SharpCompress.Archive.Rar
return RarArchiveEntryFactory.GetEntries(this, volumes);
}
protected override IEnumerable<RarVolume> LoadVolumes(IEnumerable<Stream> streams, Options options)
protected override IEnumerable<RarVolume> LoadVolumes(IEnumerable<Stream> streams)
{
return RarArchiveVolumeFactory.GetParts(streams, Password, options);
return RarArchiveVolumeFactory.GetParts(streams, ReaderOptions);
}
protected override IReader CreateReaderForSolidExtraction()
{
var stream = Volumes.First().Stream;
stream.Position = 0;
return RarReader.Open(stream, Password);
return RarReader.Open(stream, ReaderOptions);
}
public override bool IsSolid
{
get { return Volumes.First().IsSolidArchive; }
}
public override bool IsSolid { get { return Volumes.First().IsSolidArchive; } }
#region Creation
#if !NO_FILE
/// <summary>
/// Constructor expects a filepath to an existing file.
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
/// <param name="password"></param>
public static RarArchive Open(string filePath, Options options = Options.None, string password = null)
public static RarArchive Open(string filePath, ReaderOptions options = null)
{
filePath.CheckNotNullOrEmpty("filePath");
return Open(new FileInfo(filePath), options, password);
return new RarArchive(new FileInfo(filePath), options ?? new ReaderOptions());
}
/// <summary>
@@ -92,23 +82,22 @@ namespace SharpCompress.Archive.Rar
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
/// <param name="password"></param>
public static RarArchive Open(FileInfo fileInfo, Options options = Options.None, string password = null)
public static RarArchive Open(FileInfo fileInfo, ReaderOptions options = null)
{
fileInfo.CheckNotNull("fileInfo");
return new RarArchive(fileInfo, options, password);
return new RarArchive(fileInfo, options ?? new ReaderOptions());
}
#endif
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
/// <param name="password"></param>
public static RarArchive Open(Stream stream, Options options = Options.KeepStreamsOpen, string password = null)
public static RarArchive Open(Stream stream, ReaderOptions options = null)
{
stream.CheckNotNull("stream");
return Open(stream.AsEnumerable(), options, password);
return Open(stream.AsEnumerable(), options ?? new ReaderOptions());
}
/// <summary>
@@ -116,11 +105,10 @@ namespace SharpCompress.Archive.Rar
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
/// <param name="password"></param>
public static RarArchive Open(IEnumerable<Stream> streams, Options options = Options.KeepStreamsOpen, string password = null)
public static RarArchive Open(IEnumerable<Stream> streams, ReaderOptions options = null)
{
streams.CheckNotNull("streams");
return new RarArchive(streams, options, password);
return new RarArchive(streams, options ?? new ReaderOptions());
}
#if !NO_FILE
@@ -141,17 +129,12 @@ namespace SharpCompress.Archive.Rar
}
}
#endif
public static bool IsRarFile(Stream stream)
{
return IsRarFile(stream, Options.None);
}
public static bool IsRarFile(Stream stream, Options options)
public static bool IsRarFile(Stream stream, ReaderOptions options = null)
{
try
{
var headerFactory = new RarHeaderFactory(StreamingMode.Seekable, options);
var headerFactory = new RarHeaderFactory(StreamingMode.Seekable, options ?? new ReaderOptions());
var markHeader = headerFactory.ReadHeaders(stream).FirstOrDefault() as MarkHeader;
return markHeader != null && markHeader.IsValid();
}

View File

@@ -5,9 +5,9 @@ using System.Linq;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Compressor.Rar;
using SharpCompress.Compressors.Rar;
namespace SharpCompress.Archive.Rar
namespace SharpCompress.Archives.Rar
{
public class RarArchiveEntry : RarEntry, IArchiveEntry
{
@@ -20,28 +20,13 @@ namespace SharpCompress.Archive.Rar
this.archive = archive;
}
public override CompressionType CompressionType
{
get { return CompressionType.Rar; }
}
public override CompressionType CompressionType { get { return CompressionType.Rar; } }
public IArchive Archive
{
get
{
return archive;
}
}
public IArchive Archive { get { return archive; } }
internal override IEnumerable<FilePart> Parts
{
get { return parts.Cast<FilePart>(); }
}
internal override IEnumerable<FilePart> Parts { get { return parts.Cast<FilePart>(); } }
internal override FileHeader FileHeader
{
get { return parts.First().FileHeader; }
}
internal override FileHeader FileHeader { get { return parts.First().FileHeader; } }
public override long Crc
{
@@ -49,11 +34,10 @@ namespace SharpCompress.Archive.Rar
{
CheckIncomplete();
return parts.Select(fp => fp.FileHeader)
.Single(fh => !fh.FileFlags.HasFlag(FileFlags.SPLIT_AFTER)).FileCRC;
.Single(fh => !fh.FileFlags.HasFlag(FileFlags.SPLIT_AFTER)).FileCRC;
}
}
public override long Size
{
get
@@ -81,10 +65,7 @@ namespace SharpCompress.Archive.Rar
return new RarStream(archive.Unpack, FileHeader, new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive));
}
public bool IsComplete
{
get { return parts.Select(fp => fp.FileHeader).Any(fh => !fh.FileFlags.HasFlag(FileFlags.SPLIT_AFTER)); }
}
public bool IsComplete { get { return parts.Select(fp => fp.FileHeader).Any(fh => !fh.FileFlags.HasFlag(FileFlags.SPLIT_AFTER)); } }
private void CheckIncomplete()
{

View File

@@ -3,7 +3,7 @@ using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Archive.Rar
namespace SharpCompress.Archives.Rar
{
internal static class RarArchiveEntryFactory
{
@@ -25,7 +25,7 @@ namespace SharpCompress.Archive.Rar
{
groupedParts.Add(fp);
if (!FlagUtility.HasFlag((long) fp.FileHeader.FileFlags, (long) FileFlags.SPLIT_AFTER))
if (!FlagUtility.HasFlag((long)fp.FileHeader.FileFlags, (long)FileFlags.SPLIT_AFTER))
{
yield return groupedParts;
groupedParts = new List<RarFilePart>();

View File

@@ -1,17 +1,20 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Readers;
#if !NO_FILE
using System.Linq;
using System.Text;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
#endif
namespace SharpCompress.Archive.Rar
namespace SharpCompress.Archives.Rar
{
internal static class RarArchiveVolumeFactory
{
internal static IEnumerable<RarVolume> GetParts(IEnumerable<Stream> streams, string password, Options options)
internal static IEnumerable<RarVolume> GetParts(IEnumerable<Stream> streams, ReaderOptions options)
{
foreach (Stream s in streams)
{
@@ -19,15 +22,15 @@ namespace SharpCompress.Archive.Rar
{
throw new ArgumentException("Stream is not readable and seekable");
}
StreamRarArchiveVolume part = new StreamRarArchiveVolume(s, password, options);
StreamRarArchiveVolume part = new StreamRarArchiveVolume(s, options);
yield return part;
}
}
#if !NO_FILE
internal static IEnumerable<RarVolume> GetParts(FileInfo fileInfo, string password, Options options)
internal static IEnumerable<RarVolume> GetParts(FileInfo fileInfo, ReaderOptions options)
{
FileInfoRarArchiveVolume part = new FileInfoRarArchiveVolume(fileInfo, password, options);
FileInfoRarArchiveVolume part = new FileInfoRarArchiveVolume(fileInfo, options);
yield return part;
if (!part.ArchiveHeader.ArchiveHeaderFlags.HasFlag(ArchiveFlags.VOLUME))
@@ -39,7 +42,7 @@ namespace SharpCompress.Archive.Rar
//we use fileinfo because rar is dumb and looks at file names rather than archive info for another volume
while (fileInfo != null && fileInfo.Exists)
{
part = new FileInfoRarArchiveVolume(fileInfo, password, options);
part = new FileInfoRarArchiveVolume(fileInfo, options);
fileInfo = GetNextFileInfo(ah, part.FileParts.FirstOrDefault() as FileInfoRarFilePart);
yield return part;

View File

@@ -1,9 +1,8 @@
using System;
using System.IO;
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Archive.Rar
namespace SharpCompress.Archives.Rar
{
internal class SeekableFilePart : RarFilePart
{
@@ -29,9 +28,6 @@ namespace SharpCompress.Archive.Rar
return stream;
}
internal override string FilePartName
{
get { return "Unknown Stream - File Entry: " + FileHeader.FileName; }
}
internal override string FilePartName { get { return "Unknown Stream - File Entry: " + FileHeader.FileName; } }
}
}

View File

@@ -1,16 +1,16 @@
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archive.Rar
namespace SharpCompress.Archives.Rar
{
internal class StreamRarArchiveVolume : RarVolume
{
internal StreamRarArchiveVolume(Stream stream, string password, Options options)
: base(StreamingMode.Seekable, stream, password, options)
internal StreamRarArchiveVolume(Stream stream, ReaderOptions options)
: base(StreamingMode.Seekable, stream, options)
{
}
@@ -21,7 +21,7 @@ namespace SharpCompress.Archive.Rar
internal override RarFilePart CreateFilePart(FileHeader fileHeader, MarkHeader markHeader)
{
return new SeekableFilePart(markHeader, fileHeader, Stream, Password);
return new SeekableFilePart(markHeader, fileHeader, Stream, ReaderOptions.Password);
}
}
}

View File

@@ -5,89 +5,57 @@ using System.Linq;
using SharpCompress.Common;
using SharpCompress.Common.SevenZip;
using SharpCompress.IO;
using SharpCompress.Reader;
using SharpCompress.Readers;
namespace SharpCompress.Archive.SevenZip
namespace SharpCompress.Archives.SevenZip
{
public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVolume>
{
private ArchiveDatabase database;
#if !NO_FILE
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
public static SevenZipArchive Open(string filePath)
{
return Open(filePath, Options.None);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
public static SevenZipArchive Open(FileInfo fileInfo)
{
return Open(fileInfo, Options.None);
}
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
public static SevenZipArchive Open(string filePath, Options options)
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(string filePath, ReaderOptions readerOptions = null)
{
filePath.CheckNotNullOrEmpty("filePath");
return Open(new FileInfo(filePath), options);
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
public static SevenZipArchive Open(FileInfo fileInfo, Options options)
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
{
fileInfo.CheckNotNull("fileInfo");
return new SevenZipArchive(fileInfo, options);
return new SevenZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
#endif
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
public static SevenZipArchive Open(Stream stream)
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
{
stream.CheckNotNull("stream");
return Open(stream, Options.None);
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
public static SevenZipArchive Open(Stream stream, Options options)
{
stream.CheckNotNull("stream");
return new SevenZipArchive(stream, options);
return new SevenZipArchive(stream, readerOptions ?? new ReaderOptions());
}
#if !NO_FILE
internal SevenZipArchive(FileInfo fileInfo, Options options)
: base(ArchiveType.SevenZip, fileInfo, options, null)
internal SevenZipArchive(FileInfo fileInfo, ReaderOptions readerOptions)
: base(ArchiveType.SevenZip, fileInfo, readerOptions)
{
}
protected override IEnumerable<SevenZipVolume> LoadVolumes(FileInfo file, Options options)
protected override IEnumerable<SevenZipVolume> LoadVolumes(FileInfo file)
{
if (FlagUtility.HasFlag(options, Options.KeepStreamsOpen))
{
options = (Options)FlagUtility.SetFlag(options, Options.KeepStreamsOpen, false);
}
return new SevenZipVolume(file.OpenRead(), options).AsEnumerable();
return new SevenZipVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
}
public static bool IsSevenZipFile(string filePath)
@@ -108,8 +76,8 @@ namespace SharpCompress.Archive.SevenZip
}
#endif
internal SevenZipArchive(Stream stream, Options options)
: base(ArchiveType.SevenZip, stream.AsEnumerable(), options, null)
internal SevenZipArchive(Stream stream, ReaderOptions readerOptions)
: base(ArchiveType.SevenZip, stream.AsEnumerable(), readerOptions)
{
}
@@ -118,7 +86,7 @@ namespace SharpCompress.Archive.SevenZip
{
}
protected override IEnumerable<SevenZipVolume> LoadVolumes(IEnumerable<Stream> streams, Options options)
protected override IEnumerable<SevenZipVolume> LoadVolumes(IEnumerable<Stream> streams)
{
foreach (Stream s in streams)
{
@@ -126,7 +94,7 @@ namespace SharpCompress.Archive.SevenZip
{
throw new ArgumentException("Stream is not readable and seekable");
}
SevenZipVolume volume = new SevenZipVolume(s, options);
SevenZipVolume volume = new SevenZipVolume(s, ReaderOptions);
yield return volume;
}
}
@@ -156,7 +124,6 @@ namespace SharpCompress.Archive.SevenZip
}
}
public static bool IsSevenZipFile(Stream stream)
{
try
@@ -169,7 +136,7 @@ namespace SharpCompress.Archive.SevenZip
}
}
private static readonly byte[] SIGNATURE = new byte[] {(byte) '7', (byte) 'z', 0xBC, 0xAF, 0x27, 0x1C};
private static readonly byte[] SIGNATURE = {(byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C};
private static bool SignatureMatch(Stream stream)
{
@@ -183,10 +150,7 @@ namespace SharpCompress.Archive.SevenZip
return new SevenZipReader(this);
}
public override bool IsSolid
{
get { return Entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder).Count() > 1; }
}
public override bool IsSolid { get { return Entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder).Count() > 1; } }
public override long TotalSize
{
@@ -205,16 +169,12 @@ namespace SharpCompress.Archive.SevenZip
private CFileItem currentItem;
internal SevenZipReader(SevenZipArchive archive)
: base(Options.KeepStreamsOpen, ArchiveType.SevenZip)
: base(new ReaderOptions(), ArchiveType.SevenZip)
{
this.archive = archive;
}
public override SevenZipVolume Volume
{
get { return archive.Volumes.Single(); }
}
public override SevenZipVolume Volume { get { return archive.Volumes.Single(); } }
internal override IEnumerable<SevenZipEntry> GetEntries(Stream stream)
{

View File

@@ -1,8 +1,7 @@
using System;
using System.IO;
using System.IO;
using SharpCompress.Common.SevenZip;
namespace SharpCompress.Archive.SevenZip
namespace SharpCompress.Archives.SevenZip
{
public class SevenZipArchiveEntry : SevenZipEntry, IArchiveEntry
{
@@ -16,19 +15,14 @@ namespace SharpCompress.Archive.SevenZip
{
return FilePart.GetCompressedStream();
}
public IArchive Archive { get; private set; }
public bool IsComplete
{
get { return true; }
}
public IArchive Archive { get; }
public bool IsComplete { get { return true; } }
/// <summary>
/// This is a 7Zip Anti item
/// </summary>
public bool IsAnti
{
get { return FilePart.Header.IsAnti; }
}
public bool IsAnti { get { return FilePart.Header.IsAnti; } }
}
}

View File

@@ -6,75 +6,49 @@ using SharpCompress.Common;
using SharpCompress.Common.Tar;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
using SharpCompress.Reader;
using SharpCompress.Reader.Tar;
using SharpCompress.Writer.Tar;
using SharpCompress.Readers;
using SharpCompress.Readers.Tar;
using SharpCompress.Writers;
using SharpCompress.Writers.Tar;
namespace SharpCompress.Archive.Tar
namespace SharpCompress.Archives.Tar
{
public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
{
#if !NO_FILE
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
public static TarArchive Open(string filePath)
{
return Open(filePath, Options.None);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
public static TarArchive Open(FileInfo fileInfo)
{
return Open(fileInfo, Options.None);
}
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
public static TarArchive Open(string filePath, Options options)
/// <param name="readerOptions"></param>
public static TarArchive Open(string filePath, ReaderOptions readerOptions = null)
{
filePath.CheckNotNullOrEmpty("filePath");
return Open(new FileInfo(filePath), options);
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
public static TarArchive Open(FileInfo fileInfo, Options options)
/// <param name="readerOptions"></param>
public static TarArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
{
fileInfo.CheckNotNull("fileInfo");
return new TarArchive(fileInfo, options);
return new TarArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
#endif
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
public static TarArchive Open(Stream stream)
/// <param name="readerOptions"></param>
public static TarArchive Open(Stream stream, ReaderOptions readerOptions = null)
{
stream.CheckNotNull("stream");
return Open(stream, Options.None);
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
public static TarArchive Open(Stream stream, Options options)
{
stream.CheckNotNull("stream");
return new TarArchive(stream, options);
return new TarArchive(stream, readerOptions ?? new ReaderOptions());
}
#if !NO_FILE
@@ -102,7 +76,7 @@ namespace SharpCompress.Archive.Tar
{
TarHeader tar = new TarHeader();
tar.Read(new BinaryReader(stream));
return tar.Name.Length > 0 && Enum.IsDefined(typeof (EntryType), tar.EntryType);
return tar.Name.Length > 0 && Enum.IsDefined(typeof(EntryType), tar.EntryType);
}
catch
{
@@ -111,23 +85,21 @@ namespace SharpCompress.Archive.Tar
}
#if !NO_FILE
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
internal TarArchive(FileInfo fileInfo, Options options)
: base(ArchiveType.Tar, fileInfo, options)
/// <param name="readerOptions"></param>
internal TarArchive(FileInfo fileInfo, ReaderOptions readerOptions)
: base(ArchiveType.Tar, fileInfo, readerOptions)
{
}
protected override IEnumerable<TarVolume> LoadVolumes(FileInfo file, Options options)
protected override IEnumerable<TarVolume> LoadVolumes(FileInfo file)
{
if (FlagUtility.HasFlag(options, Options.KeepStreamsOpen))
{
options = (Options)FlagUtility.SetFlag(options, Options.KeepStreamsOpen, false);
}
return new TarVolume(file.OpenRead(), options).AsEnumerable();
return new TarVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
}
#endif
@@ -135,9 +107,9 @@ namespace SharpCompress.Archive.Tar
/// Takes multiple seekable Streams for a multi-part archive
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
internal TarArchive(Stream stream, Options options)
: base(ArchiveType.Tar, stream, options)
/// <param name="readerOptions"></param>
internal TarArchive(Stream stream, ReaderOptions readerOptions)
: base(ArchiveType.Tar, stream, readerOptions)
{
}
@@ -146,9 +118,9 @@ namespace SharpCompress.Archive.Tar
{
}
protected override IEnumerable<TarVolume> LoadVolumes(IEnumerable<Stream> streams, Options options)
protected override IEnumerable<TarVolume> LoadVolumes(IEnumerable<Stream> streams)
{
return new TarVolume(streams.First(), options).AsEnumerable();
return new TarVolume(streams.First(), ReaderOptions).AsEnumerable();
}
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
@@ -172,14 +144,16 @@ namespace SharpCompress.Archive.Tar
var oldStreamPos = stream.Position;
using(var entryStream = entry.OpenEntryStream())
using(var memoryStream = new MemoryStream())
using (var entryStream = entry.OpenEntryStream())
{
entryStream.TransferTo(memoryStream);
memoryStream.Position = 0;
var bytes = memoryStream.ToArray();
using (var memoryStream = new MemoryStream())
{
entryStream.TransferTo(memoryStream);
memoryStream.Position = 0;
var bytes = memoryStream.ToArray();
header.Name = ArchiveEncoding.Default.GetString(bytes, 0, bytes.Length).TrimNulls();
header.Name = ArchiveEncoding.Default.GetString(bytes, 0, bytes.Length).TrimNulls();
}
}
stream.Position = oldStreamPos;
@@ -198,17 +172,17 @@ namespace SharpCompress.Archive.Tar
}
protected override TarArchiveEntry CreateEntryInternal(string filePath, Stream source,
long size, DateTime? modified, bool closeStream)
long size, DateTime? modified, bool closeStream)
{
return new TarWritableArchiveEntry(this, source, CompressionType.Unknown, filePath, size, modified,
closeStream);
}
protected override void SaveTo(Stream stream, CompressionInfo compressionInfo,
protected override void SaveTo(Stream stream, WriterOptions options,
IEnumerable<TarArchiveEntry> oldEntries,
IEnumerable<TarArchiveEntry> newEntries)
{
using (var writer = new TarWriter(stream, compressionInfo))
using (var writer = new TarWriter(stream, options))
{
foreach (var entry in oldEntries.Concat(newEntries)
.Where(x => !x.IsDirectory))

View File

@@ -3,7 +3,7 @@ using System.Linq;
using SharpCompress.Common;
using SharpCompress.Common.Tar;
namespace SharpCompress.Archive.Tar
namespace SharpCompress.Archives.Tar
{
public class TarArchiveEntry : TarEntry, IArchiveEntry
{
@@ -19,12 +19,10 @@ namespace SharpCompress.Archive.Tar
}
#region IArchiveEntry Members
public IArchive Archive { get; private set; }
public bool IsComplete
{
get { return true; }
}
public IArchive Archive { get; }
public bool IsComplete { get { return true; } }
#endregion
}

View File

@@ -0,0 +1,65 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Archives.Tar
{
internal class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiveEntry
{
private readonly bool closeStream;
private readonly Stream stream;
internal TarWritableArchiveEntry(TarArchive archive, Stream stream, CompressionType compressionType,
string path, long size, DateTime? lastModified, bool closeStream)
: base(archive, null, compressionType)
{
this.stream = stream;
Key = path;
Size = size;
LastModifiedTime = lastModified;
this.closeStream = closeStream;
}
public override long Crc { get { return 0; } }
public override string Key { get; }
public override long CompressedSize { get { return 0; } }
public override long Size { get; }
public override DateTime? LastModifiedTime { get; }
public override DateTime? CreatedTime { get { return null; } }
public override DateTime? LastAccessedTime { get { return null; } }
public override DateTime? ArchivedTime { get { return null; } }
public override bool IsEncrypted { get { return false; } }
public override bool IsDirectory { get { return false; } }
public override bool IsSplit { get { return false; } }
internal override IEnumerable<FilePart> Parts { get { throw new NotImplementedException(); } }
Stream IWritableArchiveEntry.Stream { get { return stream; } }
public override Stream OpenEntryStream()
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new NonDisposingStream(stream);
}
internal override void Close()
{
if (closeStream)
{
stream.Dispose();
}
}
}
}

View File

@@ -5,12 +5,13 @@ using System.Linq;
using SharpCompress.Common;
using SharpCompress.Common.Zip;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressor.Deflate;
using SharpCompress.Reader;
using SharpCompress.Reader.Zip;
using SharpCompress.Writer.Zip;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Readers;
using SharpCompress.Readers.Zip;
using SharpCompress.Writers;
using SharpCompress.Writers.Zip;
namespace SharpCompress.Archive.Zip
namespace SharpCompress.Archives.Zip
{
public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
{
@@ -27,68 +28,33 @@ namespace SharpCompress.Archive.Zip
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="password"></param>
public static ZipArchive Open(string filePath, string password = null)
{
return Open(filePath, Options.None, password);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="password"></param>
public static ZipArchive Open(FileInfo fileInfo, string password = null)
{
return Open(fileInfo, Options.None, password);
}
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
/// <param name="password"></param>
public static ZipArchive Open(string filePath, Options options, string password = null)
/// <param name="readerOptions"></param>
public static ZipArchive Open(string filePath, ReaderOptions readerOptions = null)
{
filePath.CheckNotNullOrEmpty("filePath");
return Open(new FileInfo(filePath), options, password);
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
/// <param name="password"></param>
public static ZipArchive Open(FileInfo fileInfo, Options options, string password = null)
/// <param name="readerOptions"></param>
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
{
fileInfo.CheckNotNull("fileInfo");
return new ZipArchive(fileInfo, options, password);
return new ZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
#endif
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="password"></param>
public static ZipArchive Open(Stream stream, string password = null)
/// <param name="readerOptions"></param>
public static ZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
{
stream.CheckNotNull("stream");
return Open(stream, Options.None, password);
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
/// <param name="password"></param>
public static ZipArchive Open(Stream stream, Options options, string password = null)
{
stream.CheckNotNull("stream");
return new ZipArchive(stream, options, password);
return new ZipArchive(stream, readerOptions ?? new ReaderOptions());
}
#if !NO_FILE
@@ -121,7 +87,7 @@ namespace SharpCompress.Archive.Zip
{
return false;
}
return Enum.IsDefined(typeof (ZipHeaderType), header.ZipHeaderType);
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
@@ -134,25 +100,21 @@ namespace SharpCompress.Archive.Zip
}
#if !NO_FILE
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
/// <param name="password"></param>
internal ZipArchive(FileInfo fileInfo, Options options, string password = null)
: base(ArchiveType.Zip, fileInfo, options)
/// <param name="readerOptions"></param>
internal ZipArchive(FileInfo fileInfo, ReaderOptions readerOptions)
: base(ArchiveType.Zip, fileInfo, readerOptions)
{
headerFactory = new SeekableZipHeaderFactory(password);
headerFactory = new SeekableZipHeaderFactory(readerOptions.Password);
}
protected override IEnumerable<ZipVolume> LoadVolumes(FileInfo file, Options options)
protected override IEnumerable<ZipVolume> LoadVolumes(FileInfo file)
{
if (FlagUtility.HasFlag(options, Options.KeepStreamsOpen))
{
options = (Options)FlagUtility.SetFlag(options, Options.KeepStreamsOpen, false);
}
return new ZipVolume(file.OpenRead(), options).AsEnumerable();
return new ZipVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
}
#endif
@@ -165,17 +127,16 @@ namespace SharpCompress.Archive.Zip
/// Takes multiple seekable Streams for a multi-part archive
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
/// <param name="password"></param>
internal ZipArchive(Stream stream, Options options, string password = null)
: base(ArchiveType.Zip, stream, options)
/// <param name="readerOptions"></param>
internal ZipArchive(Stream stream, ReaderOptions readerOptions)
: base(ArchiveType.Zip, stream, readerOptions)
{
headerFactory = new SeekableZipHeaderFactory(password);
headerFactory = new SeekableZipHeaderFactory(readerOptions.Password);
}
protected override IEnumerable<ZipVolume> LoadVolumes(IEnumerable<Stream> streams, Options options)
protected override IEnumerable<ZipVolume> LoadVolumes(IEnumerable<Stream> streams)
{
return new ZipVolume(streams.First(), options).AsEnumerable();
return new ZipVolume(streams.First(), ReaderOptions).AsEnumerable();
}
protected override IEnumerable<ZipArchiveEntry> LoadEntries(IEnumerable<ZipVolume> volumes)
@@ -189,43 +150,48 @@ namespace SharpCompress.Archive.Zip
switch (h.ZipHeaderType)
{
case ZipHeaderType.DirectoryEntry:
{
yield return new ZipArchiveEntry(this,
new SeekableZipFilePart(headerFactory,
h as DirectoryEntryHeader,
stream));
}
{
yield return new ZipArchiveEntry(this,
new SeekableZipFilePart(headerFactory,
h as DirectoryEntryHeader,
stream));
}
break;
case ZipHeaderType.DirectoryEnd:
{
byte[] bytes = (h as DirectoryEndHeader).Comment;
volume.Comment = ArchiveEncoding.Default.GetString(bytes, 0, bytes.Length);
yield break;
}
{
byte[] bytes = (h as DirectoryEndHeader).Comment;
volume.Comment = ArchiveEncoding.Default.GetString(bytes, 0, bytes.Length);
yield break;
}
}
}
}
}
protected override void SaveTo(Stream stream, CompressionInfo compressionInfo,
public void SaveTo(Stream stream)
{
SaveTo(stream, new WriterOptions(CompressionType.Deflate));
}
protected override void SaveTo(Stream stream, WriterOptions options,
IEnumerable<ZipArchiveEntry> oldEntries,
IEnumerable<ZipArchiveEntry> newEntries)
{
using (var writer = new ZipWriter(stream, compressionInfo, string.Empty))
using (var writer = new ZipWriter(stream, new ZipWriterOptions(options)))
{
foreach (var entry in oldEntries.Concat(newEntries)
.Where(x => !x.IsDirectory))
{
using (var entryStream = entry.OpenEntryStream())
{
writer.Write(entry.Key, entryStream, entry.LastModifiedTime, string.Empty);
writer.Write(entry.Key, entryStream, entry.LastModifiedTime);
}
}
}
}
protected override ZipArchiveEntry CreateEntryInternal(string filePath, Stream source, long size, DateTime? modified,
bool closeStream)
bool closeStream)
{
return new ZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
}

View File

@@ -2,7 +2,7 @@
using System.Linq;
using SharpCompress.Common.Zip;
namespace SharpCompress.Archive.Zip
namespace SharpCompress.Archives.Zip
{
public class ZipArchiveEntry : ZipEntry, IArchiveEntry
{
@@ -19,18 +19,12 @@ namespace SharpCompress.Archive.Zip
#region IArchiveEntry Members
public IArchive Archive { get; private set; }
public IArchive Archive { get; }
public bool IsComplete
{
get { return true; }
}
public bool IsComplete { get { return true; } }
#endregion
public string Comment
{
get { return (Parts.Single() as SeekableZipFilePart).Comment; }
}
public string Comment { get { return (Parts.Single() as SeekableZipFilePart).Comment; } }
}
}

View File

@@ -0,0 +1,68 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Archives.Zip
{
internal class ZipWritableArchiveEntry : ZipArchiveEntry, IWritableArchiveEntry
{
private readonly bool closeStream;
private readonly Stream stream;
private bool isDisposed;
internal ZipWritableArchiveEntry(ZipArchive archive, Stream stream, string path, long size,
DateTime? lastModified, bool closeStream)
: base(archive, null)
{
this.stream = stream;
Key = path;
Size = size;
LastModifiedTime = lastModified;
this.closeStream = closeStream;
}
public override long Crc { get { return 0; } }
public override string Key { get; }
public override long CompressedSize { get { return 0; } }
public override long Size { get; }
public override DateTime? LastModifiedTime { get; }
public override DateTime? CreatedTime { get { return null; } }
public override DateTime? LastAccessedTime { get { return null; } }
public override DateTime? ArchivedTime { get { return null; } }
public override bool IsEncrypted { get { return false; } }
public override bool IsDirectory { get { return false; } }
public override bool IsSplit { get { return false; } }
internal override IEnumerable<FilePart> Parts { get { throw new NotImplementedException(); } }
Stream IWritableArchiveEntry.Stream { get { return stream; } }
public override Stream OpenEntryStream()
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new NonDisposingStream(stream);
}
internal override void Close()
{
if (closeStream && !isDisposed)
{
stream.Dispose();
isDisposed = true;
}
}
}
}

View File

@@ -2,12 +2,8 @@
using System.Reflection;
using System.Runtime.CompilerServices;
[assembly: AssemblyTitle("SharpCompress")]
[assembly: AssemblyProduct("SharpCompress")]
[assembly: InternalsVisibleTo("SharpCompress.Test")]
[assembly: InternalsVisibleTo("SharpCompress.Test.Portable")]
[assembly: CLSCompliant(true)]
[assembly: CLSCompliant(true)]

View File

@@ -6,6 +6,6 @@
Zip,
Tar,
SevenZip,
GZip,
GZip
}
}

View File

@@ -1,30 +0,0 @@
using SharpCompress.Compressor.Deflate;
namespace SharpCompress.Common
{
/// <summary>
/// Detailed compression properties when saving.
/// </summary>
public class CompressionInfo
{
public CompressionInfo()
{
DeflateCompressionLevel = CompressionLevel.Default;
}
/// <summary>
/// The algorthm to use. Must be valid for the format type.
/// </summary>
public CompressionType Type { get; set; }
/// <summary>
/// When CompressionType.Deflate is used, this property is referenced. Defaults to CompressionLevel.Default.
/// </summary>
public CompressionLevel DeflateCompressionLevel { get; set; }
public static implicit operator CompressionInfo(CompressionType compressionType)
{
return new CompressionInfo() {Type = compressionType};
}
}
}

View File

@@ -11,6 +11,7 @@
LZMA,
BCJ,
BCJ2,
Unknown,
LZip,
Unknown
}
}

View File

@@ -70,16 +70,11 @@ namespace SharpCompress.Common
internal virtual void Close()
{
}
/// <summary>
/// Entry file attribute.
/// </summary>
public virtual int? Attrib
{
get { throw new NotImplementedException(); }
}
public virtual int? Attrib { get { throw new NotImplementedException(); } }
}
}

View File

@@ -1,19 +1,19 @@
using System;
using System.IO;
using SharpCompress.Reader;
using SharpCompress.Readers;
namespace SharpCompress.Common
{
public class EntryStream : Stream
{
public IReader Reader { get; private set; }
private Stream stream;
public IReader Reader { get; }
private readonly Stream stream;
private bool completed;
private bool isDisposed;
internal EntryStream(IReader reader, Stream stream)
{
this.Reader = reader;
Reader = reader;
this.stream = stream;
}
@@ -44,36 +44,20 @@ namespace SharpCompress.Common
stream.Dispose();
}
public override bool CanRead
{
get { return true; }
}
public override bool CanRead { get { return true; } }
public override bool CanSeek
{
get { return false; }
}
public override bool CanSeek { get { return false; } }
public override bool CanWrite
{
get { return false; }
}
public override bool CanWrite { get { return false; } }
public override void Flush()
{
throw new NotSupportedException();
}
public override long Length
{
get { throw new NotSupportedException(); }
}
public override long Length { get { throw new NotSupportedException(); } }
public override long Position
{
get { throw new NotSupportedException(); }
set { throw new NotSupportedException(); }
}
public override long Position { get { throw new NotSupportedException(); } set { throw new NotSupportedException(); } }
public override int Read(byte[] buffer, int offset, int count)
{

View File

@@ -48,7 +48,7 @@ namespace SharpCompress.Common
{
return ((bitField & flag) == flag);
}
/// <summary>
/// Returns true if the flag is set on the specified bit field.
/// Currently only works with 32-bit bitfields.
@@ -75,7 +75,6 @@ namespace SharpCompress.Common
return ((bitField & flag) == flag);
}
/// <summary>
/// Sets a bit-field to either on or off for the specified flag.
/// </summary>
@@ -83,9 +82,9 @@ namespace SharpCompress.Common
/// <param name="flag">Flag to change</param>
/// <param name="on">bool</param>
/// <returns>The flagged variable with the flag changed</returns>
public static long SetFlag(long bitField, long flag, bool @on)
public static long SetFlag(long bitField, long flag, bool on)
{
if (@on)
if (on)
{
return bitField | flag;
}
@@ -100,10 +99,10 @@ namespace SharpCompress.Common
/// <param name="flag">Flag to change</param>
/// <param name="on">bool</param>
/// <returns>The flagged variable with the flag changed</returns>
public static long SetFlag<T>(T bitField, T flag, bool @on)
public static long SetFlag<T>(T bitField, T flag, bool on)
where T : struct
{
return SetFlag(Convert.ToInt64(bitField), Convert.ToInt64(flag), @on);
return SetFlag(Convert.ToInt64(bitField), Convert.ToInt64(flag), on);
}
}
}

View File

@@ -13,70 +13,31 @@ namespace SharpCompress.Common.GZip
this.filePart = filePart;
}
public override CompressionType CompressionType
{
get { return CompressionType.GZip; }
}
public override CompressionType CompressionType { get { return CompressionType.GZip; } }
public override long Crc
{
get { return 0; }
}
public override long Crc { get { return 0; } }
public override string Key
{
get { return filePart.FilePartName; }
}
public override string Key { get { return filePart.FilePartName; } }
public override long CompressedSize
{
get { return 0; }
}
public override long CompressedSize { get { return 0; } }
public override long Size
{
get { return 0; }
}
public override long Size { get { return 0; } }
public override DateTime? LastModifiedTime
{
get { return filePart.DateModified; }
}
public override DateTime? LastModifiedTime { get { return filePart.DateModified; } }
public override DateTime? CreatedTime
{
get { return null; }
}
public override DateTime? CreatedTime { get { return null; } }
public override DateTime? LastAccessedTime
{
get { return null; }
}
public override DateTime? LastAccessedTime { get { return null; } }
public override DateTime? ArchivedTime
{
get { return null; }
}
public override DateTime? ArchivedTime { get { return null; } }
public override bool IsEncrypted
{
get { return false; }
}
public override bool IsEncrypted { get { return false; } }
public override bool IsDirectory
{
get { return false; }
}
public override bool IsDirectory { get { return false; } }
public override bool IsSplit
{
get { return false; }
}
public override bool IsSplit { get { return false; } }
internal override IEnumerable<FilePart> Parts
{
get { return filePart.AsEnumerable<FilePart>(); }
}
internal override IEnumerable<FilePart> Parts { get { return filePart.AsEnumerable<FilePart>(); } }
internal static IEnumerable<GZipEntry> GetEntries(Stream stream)
{

View File

@@ -1,9 +1,10 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Compressor;
using SharpCompress.Compressor.Deflate;
using SharpCompress.Converter;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Converters;
namespace SharpCompress.Common.GZip
{
@@ -20,10 +21,7 @@ namespace SharpCompress.Common.GZip
internal DateTime? DateModified { get; private set; }
internal override string FilePartName
{
get { return name; }
}
internal override string FilePartName { get { return name; } }
internal override Stream GetCompressedStream()
{
@@ -43,13 +41,19 @@ namespace SharpCompress.Common.GZip
// workitem 8501: handle edge case (decompress empty stream)
if (n == 0)
{
return;
}
if (n != 10)
{
throw new ZlibException("Not a valid GZIP stream.");
}
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
throw new ZlibException("Bad GZIP header.");
}
Int32 timet = DataConverter.LittleEndian.GetInt32(header, 4);
DateModified = TarHeader.Epoch.AddSeconds(timet);
@@ -58,7 +62,7 @@ namespace SharpCompress.Common.GZip
// read and discard extra field
n = stream.Read(header, 0, 2); // 2-byte length field
Int16 extraLength = (Int16) (header[0] + header[1]*256);
Int16 extraLength = (Int16)(header[0] + header[1] * 256);
byte[] extra = new byte[extraLength];
n = stream.Read(extra, 0, extra.Length);
if (n != extraLength)
@@ -67,18 +71,23 @@ namespace SharpCompress.Common.GZip
}
}
if ((header[3] & 0x08) == 0x08)
{
name = ReadZeroTerminatedString(stream);
}
if ((header[3] & 0x10) == 0x010)
{
ReadZeroTerminatedString(stream);
}
if ((header[3] & 0x02) == 0x02)
{
stream.ReadByte(); // CRC16, ignore
}
}
private static string ReadZeroTerminatedString(Stream stream)
{
byte[] buf1 = new byte[1];
var list = new System.Collections.Generic.List<byte>();
var list = new List<byte>();
bool done = false;
do
{
@@ -96,7 +105,8 @@ namespace SharpCompress.Common.GZip
{
list.Add(buf1[0]);
}
} while (!done);
}
while (!done);
byte[] a = list.ToArray();
return ArchiveEncoding.Default.GetString(a, 0, a.Length);
}

View File

@@ -1,29 +1,25 @@
using System.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common.GZip
{
public class GZipVolume : Volume
{
public GZipVolume(Stream stream, Options options)
public GZipVolume(Stream stream, ReaderOptions options)
: base(stream, options)
{
}
#if !NO_FILE
public GZipVolume(FileInfo fileInfo, Options options)
public GZipVolume(FileInfo fileInfo, ReaderOptions options)
: base(fileInfo.OpenRead(), options)
{
options.LeaveStreamOpen = false;
}
#endif
public override bool IsFirstVolume
{
get { return true; }
}
public override bool IsFirstVolume { get { return true; } }
public override bool IsMultiVolume
{
get { return true; }
}
public override bool IsMultiVolume { get { return true; } }
}
}

View File

@@ -1,14 +1,16 @@
#if !NO_FILE
#if !NO_FILE
using System.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common
{
internal static class IEntryExtensions
{
internal static void PreserveExtractionOptions(this IEntry entry, string destinationFileName,
ExtractOptions options)
ExtractionOptions options)
{
if (options.HasFlag(ExtractOptions.PreserveFileTime) || options.HasFlag(ExtractOptions.PreserveAttributes))
if (options.PreserveFileTime || options.PreserveAttributes)
{
FileInfo nf = new FileInfo(destinationFileName);
if (!nf.Exists)
@@ -17,7 +19,7 @@ namespace SharpCompress.Common
}
// update file time to original packed time
if (options.HasFlag(ExtractOptions.PreserveFileTime))
if (options.PreserveFileTime)
{
if (entry.CreatedTime.HasValue)
{
@@ -35,7 +37,7 @@ namespace SharpCompress.Common
}
}
if (options.HasFlag(ExtractOptions.PreserveAttributes))
if (options.PreserveAttributes)
{
if (entry.Attrib.HasValue)
{
@@ -46,4 +48,4 @@ namespace SharpCompress.Common
}
}
}
#endif
#endif

View File

@@ -1,4 +1,5 @@
using System;
#if !NO_FILE
using System.IO;
#endif

View File

@@ -1,23 +0,0 @@
using System;
namespace SharpCompress.Common
{
[Flags]
public enum Options
{
/// <summary>
/// No options specified
/// </summary>
None = 0,
/// <summary>
/// SharpCompress will keep the supplied streams open
/// </summary>
KeepStreamsOpen = 1,
/// <summary>
/// Look for RarArchive (Check for self-extracting archives or cases where RarArchive isn't at the start of the file)
/// </summary>
LookForHeader = 2,
}
}

View File

@@ -0,0 +1,10 @@
namespace SharpCompress.Common
{
public class OptionsBase
{
/// <summary>
/// SharpCompress will keep the supplied streams open. Default is true.
/// </summary>
public bool LeaveStreamOpen { get; set; } = true;
}
}

View File

@@ -17,10 +17,7 @@ namespace SharpCompress.Common.Rar.Headers
}
}
internal ArchiveFlags ArchiveHeaderFlags
{
get { return (ArchiveFlags) base.Flags; }
}
internal ArchiveFlags ArchiveHeaderFlags { get { return (ArchiveFlags)Flags; } }
internal short HighPosAv { get; private set; }
@@ -28,9 +25,6 @@ namespace SharpCompress.Common.Rar.Headers
internal byte EncryptionVersion { get; private set; }
public bool HasPassword
{
get { return ArchiveHeaderFlags.HasFlag(ArchiveFlags.PASSWORD); }
}
public bool HasPassword { get { return ArchiveHeaderFlags.HasFlag(ArchiveFlags.PASSWORD); } }
}
}

View File

@@ -16,10 +16,7 @@ namespace SharpCompress.Common.Rar.Headers
}
}
internal EndArchiveFlags EndArchiveFlags
{
get { return (EndArchiveFlags) base.Flags; }
}
internal EndArchiveFlags EndArchiveFlags { get { return (EndArchiveFlags)Flags; } }
internal int? ArchiveCRC { get; private set; }

View File

@@ -52,50 +52,50 @@ namespace SharpCompress.Common.Rar.Headers
switch (HeaderType)
{
case HeaderType.FileHeader:
{
if (FileFlags.HasFlag(FileFlags.UNICODE))
{
if (FileFlags.HasFlag(FileFlags.UNICODE))
int length = 0;
while (length < fileNameBytes.Length
&& fileNameBytes[length] != 0)
{
int length = 0;
while (length < fileNameBytes.Length
&& fileNameBytes[length] != 0)
{
length++;
}
if (length != nameSize)
{
length++;
FileName = FileNameDecoder.Decode(fileNameBytes, length);
}
else
{
FileName = DecodeDefault(fileNameBytes);
}
length++;
}
if (length != nameSize)
{
length++;
FileName = FileNameDecoder.Decode(fileNameBytes, length);
}
else
{
FileName = DecodeDefault(fileNameBytes);
}
FileName = ConvertPath(FileName, HostOS);
}
else
{
FileName = DecodeDefault(fileNameBytes);
}
FileName = ConvertPath(FileName, HostOS);
}
break;
case HeaderType.NewSubHeader:
{
int datasize = HeaderSize - NEWLHD_SIZE - nameSize;
if (FileFlags.HasFlag(FileFlags.SALT))
{
int datasize = HeaderSize - NEWLHD_SIZE - nameSize;
if (FileFlags.HasFlag(FileFlags.SALT))
{
datasize -= SALT_SIZE;
}
if (datasize > 0)
{
SubData = reader.ReadBytes(datasize);
}
if (NewSubHeaderType.SUBHEAD_TYPE_RR.Equals(fileNameBytes))
{
RecoverySectors = SubData[8] + (SubData[9] << 8)
+ (SubData[10] << 16) + (SubData[11] << 24);
}
datasize -= SALT_SIZE;
}
if (datasize > 0)
{
SubData = reader.ReadBytes(datasize);
}
if (NewSubHeaderType.SUBHEAD_TYPE_RR.Equals(fileNameBytes))
{
RecoverySectors = SubData[8] + (SubData[9] << 8)
+ (SubData[10] << 16) + (SubData[11] << 24);
}
}
break;
}
@@ -155,6 +155,7 @@ namespace SharpCompress.Common.Rar.Headers
byte b = reader.ReadByte();
nanosecondHundreds |= (((uint)b) << ((j + 3 - count) * 8));
}
//10^-7 to 10^-3
return time.Value.AddMilliseconds(nanosecondHundreds * Math.Pow(10, -4));
}
@@ -207,10 +208,7 @@ namespace SharpCompress.Common.Rar.Headers
internal int FileAttributes { get; private set; }
internal FileFlags FileFlags
{
get { return (FileFlags)base.Flags; }
}
internal FileFlags FileFlags { get { return (FileFlags)Flags; } }
internal long CompressedSize { get; private set; }
internal long UncompressedSize { get; private set; }

View File

@@ -32,19 +32,19 @@ namespace SharpCompress.Common.Rar.Headers
switch (flags >> 6)
{
case 0:
buf.Append((char) (GetChar(name, encPos++)));
buf.Append((char)(GetChar(name, encPos++)));
++decPos;
break;
case 1:
buf.Append((char) (GetChar(name, encPos++) + (highByte << 8)));
buf.Append((char)(GetChar(name, encPos++) + (highByte << 8)));
++decPos;
break;
case 2:
low = GetChar(name, encPos);
high = GetChar(name, encPos + 1);
buf.Append((char) ((high << 8) + low));
buf.Append((char)((high << 8) + low));
++decPos;
encPos += 2;
break;
@@ -57,14 +57,14 @@ namespace SharpCompress.Common.Rar.Headers
for (length = (length & 0x7f) + 2; length > 0 && decPos < name.Length; length--, decPos++)
{
low = (GetChar(name, decPos) + correction) & 0xff;
buf.Append((char) ((highByte << 8) + low));
buf.Append((char)((highByte << 8) + low));
}
}
else
{
for (length += 2; length > 0 && decPos < name.Length; length--, decPos++)
{
buf.Append((char) (GetChar(name, decPos)));
buf.Append((char)(GetChar(name, decPos)));
}
}
break;

View File

@@ -13,12 +13,12 @@ namespace SharpCompress.Common.Rar.Headers
ProtectHeader = 0x78,
SignHeader = 0x79,
NewSubHeader = 0x7a,
EndArchiveHeader = 0x7b,
EndArchiveHeader = 0x7b
}
internal enum HeaderFlags : short
{
LONG_BLOCK = -0x8000,
LONG_BLOCK = -0x8000
}
[Flags]
@@ -33,7 +33,7 @@ namespace SharpCompress.Common.Rar.Headers
PROTECT = 0x0040,
PASSWORD = 0x0080,
FIRSTVOLUME = 0x0100,
ENCRYPTVER = 0x0200,
ENCRYPTVER = 0x0200
}
internal enum HostOS
@@ -70,16 +70,15 @@ namespace SharpCompress.Common.Rar.Headers
SALT = 0x0400,
VERSION = 0x0800,
EXTTIME = 0x1000,
EXTFLAGS = 0x2000,
EXTFLAGS = 0x2000
}
[Flags]
internal enum EndArchiveFlags
{
EARC_NEXT_VOLUME = 0x0001,
EARC_DATACRC = 0x0002,
EARC_REVSPACE = 0x0004,
EARC_VOLNUMBER = 0x0008,
EARC_VOLNUMBER = 0x0008
}
}

View File

@@ -20,14 +20,14 @@ namespace SharpCompress.Common.Rar.Headers
//internal static final NewSubHeaderType SUBHEAD_TYPE_BEOSEA = new NewSubHeaderType(new byte[]{'E','A','B','E'});
private byte[] bytes;
private readonly byte[] bytes;
private NewSubHeaderType(params char[] chars)
{
bytes = new byte[chars.Length];
for (int i = 0; i < chars.Length; ++i)
{
bytes[i] = (byte) chars[i];
bytes[i] = (byte)chars[i];
}
}

View File

@@ -19,4 +19,4 @@ namespace SharpCompress.Common.Rar.Headers
internal uint TotalBlocks { get; private set; }
internal byte[] Mark { get; private set; }
}
}
}

View File

@@ -35,6 +35,7 @@ namespace SharpCompress.Common.Rar.Headers
return null;
}
}
protected virtual void ReadFromReader(MarkingBinaryReader reader)
{
HeadCRC = reader.ReadInt16();

View File

@@ -2,6 +2,7 @@ using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common.Rar.Headers
{
@@ -9,21 +10,19 @@ namespace SharpCompress.Common.Rar.Headers
{
private const int MAX_SFX_SIZE = 0x80000 - 16; //archive.cpp line 136
internal RarHeaderFactory(StreamingMode mode, Options options, string password = null)
internal RarHeaderFactory(StreamingMode mode, ReaderOptions options)
{
StreamingMode = mode;
Options = options;
Password = password;
}
private Options Options { get; set; }
public string Password { get; private set; }
internal StreamingMode StreamingMode { get; private set; }
private ReaderOptions Options { get; }
internal StreamingMode StreamingMode { get; }
internal bool IsEncrypted { get; private set; }
internal IEnumerable<RarHeader> ReadHeaders(Stream stream)
{
if (Options.HasFlag(Options.LookForHeader))
if (Options.LookForHeader)
{
stream = CheckSFX(stream);
}
@@ -91,7 +90,7 @@ namespace SharpCompress.Common.Rar.Headers
}
catch (Exception e)
{
if (!Options.HasFlag(Options.KeepStreamsOpen))
if (!Options.LeaveStreamOpen)
{
#if NET35
reader.Close();
@@ -114,15 +113,14 @@ namespace SharpCompress.Common.Rar.Headers
return rewindableStream;
}
private RarHeader ReadNextHeader(Stream stream)
{
#if !NO_CRYPTO
var reader = new RarCryptoBinaryReader(stream, Password);
var reader = new RarCryptoBinaryReader(stream, Options.Password);
if (IsEncrypted)
{
if (Password == null)
if (Options.Password == null)
{
throw new CryptographicException("Encrypted Rar archive has no password specified.");
}
@@ -143,109 +141,109 @@ namespace SharpCompress.Common.Rar.Headers
switch (header.HeaderType)
{
case HeaderType.ArchiveHeader:
{
var ah = header.PromoteHeader<ArchiveHeader>(reader);
IsEncrypted = ah.HasPassword;
return ah;
}
{
var ah = header.PromoteHeader<ArchiveHeader>(reader);
IsEncrypted = ah.HasPassword;
return ah;
}
case HeaderType.MarkHeader:
{
return header.PromoteHeader<MarkHeader>(reader);
}
{
return header.PromoteHeader<MarkHeader>(reader);
}
case HeaderType.ProtectHeader:
{
ProtectHeader ph = header.PromoteHeader<ProtectHeader>(reader);
// skip the recovery record data, we do not use it.
switch (StreamingMode)
{
case StreamingMode.Seekable:
{
reader.BaseStream.Position += ph.DataSize;
}
break;
case StreamingMode.Streaming:
{
reader.BaseStream.Skip(ph.DataSize);
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
{
ProtectHeader ph = header.PromoteHeader<ProtectHeader>(reader);
return ph;
// skip the recovery record data, we do not use it.
switch (StreamingMode)
{
case StreamingMode.Seekable:
{
reader.BaseStream.Position += ph.DataSize;
}
break;
case StreamingMode.Streaming:
{
reader.BaseStream.Skip(ph.DataSize);
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
return ph;
}
case HeaderType.NewSubHeader:
{
FileHeader fh = header.PromoteHeader<FileHeader>(reader);
switch (StreamingMode)
{
FileHeader fh = header.PromoteHeader<FileHeader>(reader);
switch (StreamingMode)
case StreamingMode.Seekable:
{
case StreamingMode.Seekable:
{
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming:
{
//skip the data because it's useless?
reader.BaseStream.Skip(fh.CompressedSize);
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming:
{
//skip the data because it's useless?
reader.BaseStream.Skip(fh.CompressedSize);
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
return fh;
}
return fh;
}
case HeaderType.FileHeader:
{
FileHeader fh = header.PromoteHeader<FileHeader>(reader);
switch (StreamingMode)
{
FileHeader fh = header.PromoteHeader<FileHeader>(reader);
switch (StreamingMode)
case StreamingMode.Seekable:
{
case StreamingMode.Seekable:
{
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming:
{
var ms = new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize);
if (fh.Salt == null)
{
fh.PackedStream = ms;
}
else
{
#if !NO_CRYPTO
fh.PackedStream = new RarCryptoWrapper(ms, Password, fh.Salt);
#else
throw new NotSupportedException("RarCrypto not supported");
#endif
}
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming:
{
var ms = new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize);
if (fh.Salt == null)
{
fh.PackedStream = ms;
}
else
{
#if !NO_CRYPTO
fh.PackedStream = new RarCryptoWrapper(ms, Options.Password, fh.Salt);
#else
throw new NotSupportedException("RarCrypto not supported");
#endif
}
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
return fh;
}
return fh;
}
case HeaderType.EndArchiveHeader:
{
return header.PromoteHeader<EndArchiveHeader>(reader);
}
{
return header.PromoteHeader<EndArchiveHeader>(reader);
}
default:
{
throw new InvalidFormatException("Invalid Rar Header: " + header.HeaderType.ToString());
}
{
throw new InvalidFormatException("Invalid Rar Header: " + header.HeaderType);
}
}
}
}

View File

@@ -1,4 +1,5 @@
#if !NO_CRYPTO
#if !NO_CRYPTO
using System.Collections.Generic;
using System.IO;
using SharpCompress.IO;

View File

@@ -1,3 +1,4 @@
#if !NO_CRYPTO
using System;
using System.Collections.Generic;

View File

@@ -10,71 +10,44 @@ namespace SharpCompress.Common.Rar
/// <summary>
/// The File's 32 bit CRC Hash
/// </summary>
public override long Crc
{
get { return FileHeader.FileCRC; }
}
public override long Crc { get { return FileHeader.FileCRC; } }
/// <summary>
/// The path of the file internal to the Rar Archive.
/// </summary>
public override string Key
{
get { return FileHeader.FileName; }
}
public override string Key { get { return FileHeader.FileName; } }
/// <summary>
/// The entry last modified time in the archive, if recorded
/// </summary>
public override DateTime? LastModifiedTime
{
get { return FileHeader.FileLastModifiedTime; }
}
public override DateTime? LastModifiedTime { get { return FileHeader.FileLastModifiedTime; } }
/// <summary>
/// The entry create time in the archive, if recorded
/// </summary>
public override DateTime? CreatedTime
{
get { return FileHeader.FileCreatedTime; }
}
public override DateTime? CreatedTime { get { return FileHeader.FileCreatedTime; } }
/// <summary>
/// The entry last accessed time in the archive, if recorded
/// </summary>
public override DateTime? LastAccessedTime
{
get { return FileHeader.FileLastAccessedTime; }
}
public override DateTime? LastAccessedTime { get { return FileHeader.FileLastAccessedTime; } }
/// <summary>
/// The entry time whend archived, if recorded
/// </summary>
public override DateTime? ArchivedTime
{
get { return FileHeader.FileArchivedTime; }
}
public override DateTime? ArchivedTime { get { return FileHeader.FileArchivedTime; } }
/// <summary>
/// Entry is password protected and encrypted and cannot be extracted.
/// </summary>
public override bool IsEncrypted
{
get { return FileHeader.FileFlags.HasFlag(FileFlags.PASSWORD); }
}
public override bool IsEncrypted { get { return FileHeader.FileFlags.HasFlag(FileFlags.PASSWORD); } }
/// <summary>
/// Entry is password protected and encrypted and cannot be extracted.
/// </summary>
public override bool IsDirectory
{
get { return FileHeader.FileFlags.HasFlag(FileFlags.DIRECTORY); }
}
public override bool IsDirectory { get { return FileHeader.FileFlags.HasFlag(FileFlags.DIRECTORY); } }
public override bool IsSplit
{
get { return FileHeader.FileFlags.HasFlag(FileFlags.SPLIT_AFTER); }
}
public override bool IsSplit { get { return FileHeader.FileFlags.HasFlag(FileFlags.SPLIT_AFTER); } }
public override string ToString()
{

View File

@@ -74,13 +74,16 @@ namespace SharpCompress.Common.Rar
byte[] aesKey = new byte[CRYPTO_BLOCK_SIZE];
for (int i = 0; i < 4; i++)
{
for (int j = 0; j < 4; j++)
{
aesKey[i*4 + j] = (byte)
(((digest[i*4]*0x1000000) & 0xff000000 |
(uint) ((digest[i*4 + 1]*0x10000) & 0xff0000) |
(uint) ((digest[i*4 + 2]*0x100) & 0xff00) |
(uint) (digest[i*4 + 3] & 0xff)) >> (j*8));
}
}
rijndael.Init(false, new KeyParameter(aesKey));
@@ -100,10 +103,14 @@ namespace SharpCompress.Common.Rar
rijndael.ProcessBlock(cipherText, 0, plainText, 0);
for (int j = 0; j < plainText.Length; j++)
{
decryptedBytes.Add((byte) (plainText[j] ^ aesInitializationVector[j%16])); //32:114, 33:101
}
for (int j = 0; j < aesInitializationVector.Length; j++)
{
aesInitializationVector[j] = cipherText[j];
}
return decryptedBytes.ToArray();
}

View File

@@ -4,6 +4,7 @@ using System.IO;
using System.Linq;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common.Rar
{
@@ -13,20 +14,14 @@ namespace SharpCompress.Common.Rar
public abstract class RarVolume : Volume
{
private readonly RarHeaderFactory headerFactory;
internal RarVolume(StreamingMode mode, Stream stream, string password, Options options)
internal RarVolume(StreamingMode mode, Stream stream, ReaderOptions options)
: base(stream, options)
{
headerFactory = new RarHeaderFactory(mode, options, password);
Password = password;
headerFactory = new RarHeaderFactory(mode, options);
}
internal string Password { get; private set; }
internal StreamingMode Mode
{
get { return headerFactory.StreamingMode; }
}
internal StreamingMode Mode { get { return headerFactory.StreamingMode; } }
internal abstract IEnumerable<RarFilePart> ReadFileParts();
@@ -35,26 +30,26 @@ namespace SharpCompress.Common.Rar
internal IEnumerable<RarFilePart> GetVolumeFileParts()
{
MarkHeader previousMarkHeader = null;
foreach (RarHeader header in headerFactory.ReadHeaders(this.Stream))
foreach (RarHeader header in headerFactory.ReadHeaders(Stream))
{
switch (header.HeaderType)
{
case HeaderType.ArchiveHeader:
{
ArchiveHeader = header as ArchiveHeader;
}
{
ArchiveHeader = header as ArchiveHeader;
}
break;
case HeaderType.MarkHeader:
{
previousMarkHeader = header as MarkHeader;
}
{
previousMarkHeader = header as MarkHeader;
}
break;
case HeaderType.FileHeader:
{
FileHeader fh = header as FileHeader;
RarFilePart fp = CreateFilePart(fh, previousMarkHeader);
yield return fp;
}
{
FileHeader fh = header as FileHeader;
RarFilePart fp = CreateFilePart(fh, previousMarkHeader);
yield return fp;
}
break;
}
}
@@ -70,6 +65,7 @@ namespace SharpCompress.Common.Rar
{
throw new InvalidOperationException("ArchiveHeader should never been null in a streaming read.");
}
//we only want to load the archive header to avoid overhead but have to do the nasty thing and reset the stream
GetVolumeFileParts().First();
Stream.Position = 0;

View File

@@ -8,6 +8,7 @@ namespace SharpCompress.Common
{
Item = entry;
}
public T Item { get; private set; }
}
}
}

View File

@@ -1,8 +1,8 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Compressor.LZMA;
using SharpCompress.Compressor.LZMA.Utilites;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.LZMA.Utilites;
namespace SharpCompress.Common.SevenZip
{
@@ -80,15 +80,19 @@ namespace SharpCompress.Common.SevenZip
{
// v3.13 incorrectly worked with empty folders
// v4.07: Loop for skipping empty folders
for (; ; )
for (;;)
{
if (folderIndex >= Folders.Count)
{
throw new InvalidOperationException();
}
FolderStartFileIndex.Add(i); // check it
if (NumUnpackStreamsVector[folderIndex] != 0)
{
break;
}
folderIndex++;
}
@@ -97,7 +101,9 @@ namespace SharpCompress.Common.SevenZip
FileIndexToFolderIndexMap.Add(folderIndex);
if (emptyStream)
{
continue;
}
indexInFolder++;
@@ -128,7 +134,9 @@ namespace SharpCompress.Common.SevenZip
long size = 0;
for (int i = 0; i < folder.PackStreams.Count; i++)
{
size += PackSizes[packStreamIndex + i];
}
return size;
}
@@ -139,7 +147,9 @@ namespace SharpCompress.Common.SevenZip
long folderStartPackPos = GetFolderStreamPos(folder, 0);
List<long> packSizes = new List<long>();
for (int j = 0; j < folder.PackStreams.Count; j++)
{
packSizes.Add(PackSizes[packStreamIndex + j]);
}
return DecoderStreamHelper.CreateDecoderStream(stream, folderStartPackPos, packSizes.ToArray(), folder, pw);
}
@@ -153,8 +163,12 @@ namespace SharpCompress.Common.SevenZip
{
int folderIndex = FileIndexToFolderIndexMap[fileIndex];
if (folderIndex != -1)
{
if (FolderStartFileIndex[folderIndex] == fileIndex)
{
return GetFolderFullPackSize(folderIndex);
}
}
return 0;
}
}

View File

@@ -1,9 +1,10 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using SharpCompress.Compressor.LZMA;
using SharpCompress.Compressor.LZMA.Utilites;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.LZMA.Utilites;
using SharpCompress.IO;
namespace SharpCompress.Common.SevenZip
@@ -17,7 +18,7 @@ namespace SharpCompress.Common.SevenZip
internal long _streamEnding;
internal byte[] _header;
private Dictionary<int, Stream> _cachedStreams = new Dictionary<int, Stream>();
private readonly Dictionary<int, Stream> _cachedStreams = new Dictionary<int, Stream>();
internal void AddByteStream(byte[] buffer, int offset, int length)
{
@@ -66,7 +67,9 @@ namespace SharpCompress.Common.SevenZip
{
ulong id = _currentReader.ReadNumber();
if (id > 25)
{
return null;
}
#if DEBUG
Log.WriteLine("ReadId: {0}", (BlockType)id);
#endif
@@ -85,13 +88,17 @@ namespace SharpCompress.Common.SevenZip
private void WaitAttribute(BlockType attribute)
{
for (; ; )
for (;;)
{
BlockType? type = ReadId();
if (type == attribute)
{
return;
}
if (type == BlockType.End)
{
throw new InvalidOperationException();
}
SkipData();
}
}
@@ -99,7 +106,9 @@ namespace SharpCompress.Common.SevenZip
private void ReadArchiveProperties()
{
while (ReadId() != BlockType.End)
{
SkipData();
}
}
#endregion
@@ -122,7 +131,9 @@ namespace SharpCompress.Common.SevenZip
}
if ((data & mask) != 0)
{
bits.SetBit(i);
}
mask >>= 1;
}
@@ -134,7 +145,9 @@ namespace SharpCompress.Common.SevenZip
{
byte allTrue = ReadByte();
if (allTrue != 0)
{
return new BitVector(length, true);
}
return ReadBitVector(length);
}
@@ -150,9 +163,13 @@ namespace SharpCompress.Common.SevenZip
for (int i = 0; i < numFiles; i++)
{
if (defined[i])
{
action(i, checked((long)ReadUInt64()));
}
else
{
action(i, null);
}
}
}
}
@@ -166,9 +183,10 @@ namespace SharpCompress.Common.SevenZip
private DateTime? TranslateTime(long? time)
{
if (time.HasValue)
{
return TranslateTime(time.Value);
else
return null;
}
return null;
}
private void ReadDateTimeVector(List<byte[]> dataVector, int numFiles, Action<int, DateTime?> action)
@@ -185,9 +203,13 @@ namespace SharpCompress.Common.SevenZip
for (int i = 0; i < numFiles; i++)
{
if (boolVector[i])
{
action(i, ReadUInt32());
}
else
{
action(i, null);
}
}
}
}
@@ -230,10 +252,14 @@ namespace SharpCompress.Common.SevenZip
Log.WriteLine("MethodId: " + String.Join("", Enumerable.Range(0, idSize).Select(x => longID[x].ToString("x2")).ToArray()));
#endif
if (idSize > 8)
{
throw new NotSupportedException();
}
ulong id = 0;
for (int j = 0; j < idSize; j++)
{
id |= (ulong)longID[idSize - 1 - j] << (8 * j);
}
coder.MethodId = new CMethodId(id);
if ((mainByte & 0x10) != 0)
@@ -264,7 +290,9 @@ namespace SharpCompress.Common.SevenZip
}
if ((mainByte & 0x80) != 0)
{
throw new NotSupportedException();
}
numInStreams += coder.NumInStreams;
numOutStreams += coder.NumOutStreams;
@@ -298,9 +326,12 @@ namespace SharpCompress.Common.SevenZip
#endif
if (numInStreams < numBindPairs)
{
throw new NotSupportedException();
}
int numPackStreams = numInStreams - numBindPairs;
//folder.PackStreams.Reserve(numPackStreams);
if (numPackStreams == 1)
{
@@ -317,7 +348,9 @@ namespace SharpCompress.Common.SevenZip
}
if (folder.PackStreams.Count != 1)
{
throw new NotSupportedException();
}
}
else
{
@@ -417,11 +450,13 @@ namespace SharpCompress.Common.SevenZip
#endif
BlockType? type;
for (; ; )
for (;;)
{
type = ReadId();
if (type == BlockType.End)
{
break;
}
if (type == BlockType.CRC)
{
packCRCs = ReadHashDigests(numPackStreams);
@@ -434,7 +469,9 @@ namespace SharpCompress.Common.SevenZip
{
packCRCs = new List<uint?>(numPackStreams);
for (int i = 0; i < numPackStreams; i++)
{
packCRCs.Add(null);
}
}
}
finally
@@ -462,13 +499,14 @@ namespace SharpCompress.Common.SevenZip
using (CStreamSwitch streamSwitch = new CStreamSwitch())
{
streamSwitch.Set(this, dataVector);
//folders.Clear();
//folders.Reserve(numFolders);
folders = new List<CFolder>(numFolders);
int index = 0;
for (int i = 0; i < numFolders; i++)
{
var f = new CFolder { FirstPackStreamId = index };
var f = new CFolder {FirstPackStreamId = index};
folders.Add(f);
GetNextFolderItem(f);
index += f.PackStreams.Count;
@@ -499,17 +537,21 @@ namespace SharpCompress.Common.SevenZip
#endif
}
for (; ; )
for (;;)
{
BlockType? type = ReadId();
if (type == BlockType.End)
{
return;
}
if (type == BlockType.CRC)
{
List<uint?> crcs = ReadHashDigests(numFolders);
for (int i = 0; i < numFolders; i++)
{
folders[i].UnpackCRC = crcs[i];
}
continue;
}
@@ -536,7 +578,7 @@ namespace SharpCompress.Common.SevenZip
numUnpackStreamsInFolders = null;
BlockType? type;
for (; ; )
for (;;)
{
type = ReadId();
if (type == BlockType.NumUnpackStream)
@@ -559,9 +601,13 @@ namespace SharpCompress.Common.SevenZip
continue;
}
if (type == BlockType.CRC || type == BlockType.Size)
{
break;
}
if (type == BlockType.End)
{
break;
}
SkipData();
}
@@ -569,7 +615,9 @@ namespace SharpCompress.Common.SevenZip
{
numUnpackStreamsInFolders = new List<int>(folders.Count);
for (int i = 0; i < folders.Count; i++)
{
numUnpackStreamsInFolders.Add(1);
}
}
unpackSizes = new List<long>(folders.Count);
@@ -579,7 +627,9 @@ namespace SharpCompress.Common.SevenZip
// v4.07: we check that folder is empty
int numSubstreams = numUnpackStreamsInFolders[i];
if (numSubstreams == 0)
{
continue;
}
#if DEBUG
Log.Write("#{0} StreamSizes:", i);
#endif
@@ -602,7 +652,9 @@ namespace SharpCompress.Common.SevenZip
#endif
}
if (type == BlockType.Size)
{
type = ReadId();
}
int numDigests = 0;
int numDigestsTotal = 0;
@@ -610,13 +662,15 @@ namespace SharpCompress.Common.SevenZip
{
int numSubstreams = numUnpackStreamsInFolders[i];
if (numSubstreams != 1 || !folders[i].UnpackCRCDefined)
{
numDigests += numSubstreams;
}
numDigestsTotal += numSubstreams;
}
digests = null;
for (; ; )
for (;;)
{
if (type == BlockType.CRC)
{
@@ -636,12 +690,16 @@ namespace SharpCompress.Common.SevenZip
else
{
for (int j = 0; j < numSubstreams; j++, digestIndex++)
{
digests.Add(digests2[digestIndex]);
}
}
}
if (digestIndex != numDigests || numDigestsTotal != digests.Count)
System.Diagnostics.Debugger.Break();
{
Debugger.Break();
}
}
else if (type == BlockType.End)
{
@@ -649,7 +707,9 @@ namespace SharpCompress.Common.SevenZip
{
digests = new List<uint?>(numDigestsTotal);
for (int i = 0; i < numDigestsTotal; i++)
{
digests.Add(null);
}
}
return;
}
@@ -693,7 +753,7 @@ namespace SharpCompress.Common.SevenZip
unpackSizes = null;
digests = null;
for (; ; )
for (;;)
{
switch (ReadId())
{
@@ -768,12 +828,18 @@ namespace SharpCompress.Common.SevenZip
byte[] data = new byte[unpackSize];
outStream.ReadExact(data, 0, data.Length);
if (outStream.ReadByte() >= 0)
{
throw new InvalidOperationException("Decoded stream is longer than expected.");
}
dataVector.Add(data);
if (folder.UnpackCRCDefined)
{
if (CRC.Finish(CRC.Update(CRC.kInitCRC, data, 0, unpackSize)) != folder.UnpackCRC)
{
throw new InvalidOperationException("Decoded stream does not match expected CRC.");
}
}
}
return dataVector;
}
@@ -842,10 +908,14 @@ namespace SharpCompress.Common.SevenZip
db.Files.Clear();
if (type == BlockType.End)
{
return;
}
if (type != BlockType.FilesInfo)
{
throw new InvalidOperationException();
}
int numFiles = ReadNum();
#if DEBUG
@@ -853,18 +923,22 @@ namespace SharpCompress.Common.SevenZip
#endif
db.Files = new List<CFileItem>(numFiles);
for (int i = 0; i < numFiles; i++)
{
db.Files.Add(new CFileItem());
}
BitVector emptyStreamVector = new BitVector(numFiles);
BitVector emptyFileVector = null;
BitVector antiFileVector = null;
int numEmptyStreams = 0;
for (; ; )
for (;;)
{
type = ReadId();
if (type == BlockType.End)
{
break;
}
long size = checked((long)ReadNumber()); // TODO: throw invalid data on negative
int oldPos = _currentReader.Offset;
@@ -894,32 +968,34 @@ namespace SharpCompress.Common.SevenZip
Log.Write("WinAttributes:");
#endif
ReadAttributeVector(dataVector, numFiles, delegate(int i, uint? attr)
{
// Some third party implementations established an unofficial extension
// of the 7z archive format by placing posix file attributes in the high
// bits of the windows file attributes. This makes use of the fact that
// the official implementation does not perform checks on this value.
//
// Newer versions of the official 7z GUI client will try to parse this
// extension, thus acknowledging the unofficial use of these bits.
//
// For us it is safe to just discard the upper bits if they are set and
// keep the windows attributes from the lower bits (which should be set
// properly even if posix file attributes are present, in order to be
// compatible with older 7z archive readers)
//
// Note that the 15th bit is used by some implementations to indicate
// presence of the extension, but not all implementations do that so
// we can't trust that bit and must ignore it.
//
if (attr.HasValue && (attr.Value >> 16) != 0)
{
// Some third party implementations established an unofficial extension
// of the 7z archive format by placing posix file attributes in the high
// bits of the windows file attributes. This makes use of the fact that
// the official implementation does not perform checks on this value.
//
// Newer versions of the official 7z GUI client will try to parse this
// extension, thus acknowledging the unofficial use of these bits.
//
// For us it is safe to just discard the upper bits if they are set and
// keep the windows attributes from the lower bits (which should be set
// properly even if posix file attributes are present, in order to be
// compatible with older 7z archive readers)
//
// Note that the 15th bit is used by some implementations to indicate
// presence of the extension, but not all implementations do that so
// we can't trust that bit and must ignore it.
//
if (attr.HasValue && (attr.Value >> 16) != 0)
attr = attr.Value & 0x7FFFu;
attr = attr.Value & 0x7FFFu;
}
db.Files[i].Attrib = attr;
db.Files[i].Attrib = attr;
#if DEBUG
Log.Write(" " + (attr.HasValue ? attr.Value.ToString("x8") : "n/a"));
Log.Write(" " + (attr.HasValue ? attr.Value.ToString("x8") : "n/a"));
#endif
});
});
#if DEBUG
Log.WriteLine();
#endif
@@ -958,7 +1034,9 @@ namespace SharpCompress.Common.SevenZip
#if DEBUG
Log.Write("EmptyFile: ");
for (int i = 0; i < numEmptyStreams; i++)
{
Log.Write(emptyFileVector[i] ? "x" : ".");
}
Log.WriteLine();
#endif
break;
@@ -967,7 +1045,9 @@ namespace SharpCompress.Common.SevenZip
#if DEBUG
Log.Write("Anti: ");
for (int i = 0; i < numEmptyStreams; i++)
{
Log.Write(antiFileVector[i] ? "x" : ".");
}
Log.WriteLine();
#endif
break;
@@ -976,12 +1056,12 @@ namespace SharpCompress.Common.SevenZip
Log.Write("StartPos:");
#endif
ReadNumberVector(dataVector, numFiles, delegate(int i, long? startPos)
{
db.Files[i].StartPos = startPos;
{
db.Files[i].StartPos = startPos;
#if DEBUG
Log.Write(" " + (startPos.HasValue ? startPos.Value.ToString() : "n/a"));
Log.Write(" " + (startPos.HasValue ? startPos.Value.ToString() : "n/a"));
#endif
});
});
#if DEBUG
Log.WriteLine();
#endif
@@ -991,12 +1071,12 @@ namespace SharpCompress.Common.SevenZip
Log.Write("CTime:");
#endif
ReadDateTimeVector(dataVector, numFiles, delegate(int i, DateTime? time)
{
db.Files[i].CTime = time;
{
db.Files[i].CTime = time;
#if DEBUG
Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a"));
Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a"));
#endif
});
});
#if DEBUG
Log.WriteLine();
#endif
@@ -1006,12 +1086,12 @@ namespace SharpCompress.Common.SevenZip
Log.Write("ATime:");
#endif
ReadDateTimeVector(dataVector, numFiles, delegate(int i, DateTime? time)
{
db.Files[i].ATime = time;
{
db.Files[i].ATime = time;
#if DEBUG
Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a"));
Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a"));
#endif
});
});
#if DEBUG
Log.WriteLine();
#endif
@@ -1021,12 +1101,12 @@ namespace SharpCompress.Common.SevenZip
Log.Write("MTime:");
#endif
ReadDateTimeVector(dataVector, numFiles, delegate(int i, DateTime? time)
{
db.Files[i].MTime = time;
{
db.Files[i].MTime = time;
#if DEBUG
Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a"));
Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a"));
#endif
});
});
#if DEBUG
Log.WriteLine();
#endif
@@ -1036,8 +1116,12 @@ namespace SharpCompress.Common.SevenZip
Log.Write("Dummy: " + size);
#endif
for (long j = 0; j < size; j++)
{
if (ReadByte() != 0)
{
throw new InvalidOperationException();
}
}
break;
default:
SkipData(size);
@@ -1047,7 +1131,9 @@ namespace SharpCompress.Common.SevenZip
// since 0.3 record sizes must be correct
bool checkRecordsSize = (db.MajorVersion > 0 || db.MinorVersion > 2);
if (checkRecordsSize && _currentReader.Offset - oldPos != size)
{
throw new InvalidOperationException();
}
}
int emptyFileIndex = 0;
@@ -1095,11 +1181,13 @@ namespace SharpCompress.Common.SevenZip
// TODO: Check Signature!
_header = new byte[0x20];
for (int offset = 0; offset < 0x20; )
for (int offset = 0; offset < 0x20;)
{
int delta = stream.Read(_header, offset, 0x20 - offset);
if (delta == 0)
{
throw new EndOfStreamException();
}
offset += delta;
}
@@ -1109,10 +1197,14 @@ namespace SharpCompress.Common.SevenZip
public void Close()
{
if (_stream != null)
{
_stream.Dispose();
}
foreach (var stream in _cachedStreams.Values)
{
stream.Dispose();
}
_cachedStreams.Clear();
}
@@ -1126,7 +1218,9 @@ namespace SharpCompress.Common.SevenZip
db.MinorVersion = _header[7];
if (db.MajorVersion != 0)
{
throw new InvalidOperationException();
}
uint crcFromArchive = DataReader.Get32(_header, 8);
long nextHeaderOffset = (long)DataReader.Get64(_header, 0xC);
@@ -1140,7 +1234,9 @@ namespace SharpCompress.Common.SevenZip
crc = CRC.Finish(crc);
if (crc != crcFromArchive)
{
throw new InvalidOperationException();
}
db.StartPositionAfterHeader = _streamOrigin + 0x20;
@@ -1151,12 +1247,15 @@ namespace SharpCompress.Common.SevenZip
return db;
}
if (nextHeaderOffset < 0 || nextHeaderSize < 0 || nextHeaderSize > Int32.MaxValue)
{
throw new InvalidOperationException();
}
if (nextHeaderOffset > _streamEnding - db.StartPositionAfterHeader)
{
throw new IndexOutOfRangeException();
}
_stream.Seek(nextHeaderOffset, SeekOrigin.Current);
@@ -1164,7 +1263,9 @@ namespace SharpCompress.Common.SevenZip
_stream.ReadExact(header, 0, header.Length);
if (CRC.Finish(CRC.Update(CRC.kInitCRC, header, 0, header.Length)) != nextHeaderCrc)
{
throw new InvalidOperationException();
}
using (CStreamSwitch streamSwitch = new CStreamSwitch())
{
@@ -1174,7 +1275,9 @@ namespace SharpCompress.Common.SevenZip
if (type != BlockType.Header)
{
if (type != BlockType.EncodedHeader)
{
throw new InvalidOperationException();
}
var dataVector = ReadAndDecodePackedStreams(db.StartPositionAfterHeader, pass);
@@ -1186,12 +1289,16 @@ namespace SharpCompress.Common.SevenZip
}
if (dataVector.Count != 1)
{
throw new InvalidOperationException();
}
streamSwitch.Set(this, dataVector[0]);
if (ReadId() != BlockType.Header)
{
throw new InvalidOperationException();
}
}
ReadHeader(db, pass);
@@ -1211,55 +1318,41 @@ namespace SharpCompress.Common.SevenZip
FileIndex = fileIndex;
FolderIndex = folderIndex;
if (fileIndex != -1)
{
ExtractStatuses.Add(true);
}
}
}
private class FolderUnpackStream : Stream
{
private ArchiveDatabase _db;
private int _startIndex;
private List<bool> _extractStatuses;
private readonly ArchiveDatabase _db;
private readonly int _startIndex;
private readonly List<bool> _extractStatuses;
public FolderUnpackStream(ArchiveDatabase db, int p, int startIndex, List<bool> list)
{
this._db = db;
this._startIndex = startIndex;
this._extractStatuses = list;
_db = db;
_startIndex = startIndex;
_extractStatuses = list;
}
#region Stream
public override bool CanRead
{
get { return true; }
}
public override bool CanRead { get { return true; } }
public override bool CanSeek
{
get { return false; }
}
public override bool CanSeek { get { return false; } }
public override bool CanWrite
{
get { return false; }
}
public override bool CanWrite { get { return false; } }
public override void Flush()
{
throw new NotSupportedException();
}
public override long Length
{
get { throw new NotSupportedException(); }
}
public override long Length { get { throw new NotSupportedException(); } }
public override long Position
{
get { throw new NotSupportedException(); }
set { throw new NotSupportedException(); }
}
public override long Position { get { throw new NotSupportedException(); } set { throw new NotSupportedException(); } }
public override int Read(byte[] buffer, int offset, int count)
{
@@ -1298,9 +1391,13 @@ namespace SharpCompress.Common.SevenZip
Log.WriteLine(_db.Files[index].Name);
#endif
if (_db.Files[index].CrcDefined)
{
_stream = new CrcCheckStream(_db.Files[index].Crc.Value);
}
else
{
_stream = new MemoryStream();
}
_rem = _db.Files[index].Size;
}
@@ -1312,7 +1409,9 @@ namespace SharpCompress.Common.SevenZip
{
int write = count;
if (write > _rem)
{
write = (int)_rem;
}
_stream.Write(buffer, offset, write);
count -= write;
_rem -= write;
@@ -1331,7 +1430,7 @@ namespace SharpCompress.Common.SevenZip
if (_currentIndex == _extractStatuses.Count)
{
// we support partial extracting
System.Diagnostics.Debugger.Break();
Debugger.Break();
throw new NotSupportedException();
}
OpenFile();
@@ -1352,7 +1451,9 @@ namespace SharpCompress.Common.SevenZip
long folderStartPackPos = _db.GetFolderStreamPos(folderInfo, 0);
List<long> packSizes = new List<long>();
for (int j = 0; j < folderInfo.PackStreams.Count; j++)
{
packSizes.Add(_db.PackSizes[packStreamIndex + j]);
}
s = DecoderStreamHelper.CreateDecoderStream(_stream, folderStartPackPos, packSizes.ToArray(), folderInfo,
pw);
@@ -1367,12 +1468,16 @@ namespace SharpCompress.Common.SevenZip
int numFilesInFolder = _db.NumUnpackStreamsVector[folderIndex];
int firstFileIndex = _db.FolderStartFileIndex[folderIndex];
if (firstFileIndex > fileIndex || fileIndex - firstFileIndex >= numFilesInFolder)
{
throw new InvalidOperationException();
}
int skipCount = fileIndex - firstFileIndex;
long skipSize = 0;
for (int i = 0; i < skipCount; i++)
{
skipSize += _db.Files[firstFileIndex + i].Size;
}
Stream s = GetCachedDecoderStream(_db, folderIndex, pw);
s.Position = skipSize;
@@ -1384,12 +1489,18 @@ namespace SharpCompress.Common.SevenZip
int numItems;
bool allFilesMode = (indices == null);
if (allFilesMode)
{
numItems = _db.Files.Count;
}
else
{
numItems = indices.Length;
}
if (numItems == 0)
{
return;
}
List<CExtractFolderInfo> extractFolderInfoVector = new List<CExtractFolderInfo>();
for (int i = 0; i < numItems; i++)
@@ -1404,27 +1515,37 @@ namespace SharpCompress.Common.SevenZip
}
if (extractFolderInfoVector.Count == 0 || folderIndex != extractFolderInfoVector.Last().FolderIndex)
{
extractFolderInfoVector.Add(new CExtractFolderInfo(-1, folderIndex));
}
CExtractFolderInfo efi = extractFolderInfoVector.Last();
int startIndex = _db.FolderStartFileIndex[folderIndex];
for (int index = efi.ExtractStatuses.Count; index <= fileIndex - startIndex; index++)
{
efi.ExtractStatuses.Add(index == fileIndex - startIndex);
}
}
foreach (CExtractFolderInfo efi in extractFolderInfoVector)
{
int startIndex;
if (efi.FileIndex != -1)
{
startIndex = efi.FileIndex;
}
else
{
startIndex = _db.FolderStartFileIndex[efi.FolderIndex];
}
var outStream = new FolderUnpackStream(_db, 0, startIndex, efi.ExtractStatuses);
if (efi.FileIndex != -1)
{
continue;
}
int folderIndex = efi.FolderIndex;
CFolder folderInfo = _db.Folders[folderIndex];
@@ -1434,18 +1555,22 @@ namespace SharpCompress.Common.SevenZip
List<long> packSizes = new List<long>();
for (int j = 0; j < folderInfo.PackStreams.Count; j++)
{
packSizes.Add(_db.PackSizes[packStreamIndex + j]);
}
// TODO: If the decoding fails the last file may be extracted incompletely. Delete it?
Stream s = DecoderStreamHelper.CreateDecoderStream(_stream, folderStartPackPos, packSizes.ToArray(),
folderInfo, pw);
byte[] buffer = new byte[4 << 10];
for (; ; )
for (;;)
{
int processed = s.Read(buffer, 0, buffer.Length);
if (processed == 0)
{
break;
}
outStream.Write(buffer, 0, processed);
}
}

View File

@@ -12,19 +12,13 @@ namespace SharpCompress.Common.SevenZip
public bool HasStream { get; internal set; }
public bool IsDir { get; internal set; }
public bool CrcDefined
{
get { return Crc != null; }
}
public bool CrcDefined { get { return Crc != null; } }
public bool AttribDefined
{
get { return Attrib != null; }
}
public bool AttribDefined { get { return Attrib != null; } }
public void SetAttrib(uint attrib)
{
this.Attrib = attrib;
Attrib = attrib;
}
public DateTime? CTime { get; internal set; }

View File

@@ -1,6 +1,6 @@
using System;
using System.Collections.Generic;
using SharpCompress.Compressor.LZMA;
using SharpCompress.Compressors.LZMA;
namespace SharpCompress.Common.SevenZip
{
@@ -13,19 +13,22 @@ namespace SharpCompress.Common.SevenZip
internal List<long> UnpackSizes = new List<long>();
internal uint? UnpackCRC;
internal bool UnpackCRCDefined
{
get { return UnpackCRC != null; }
}
internal bool UnpackCRCDefined { get { return UnpackCRC != null; } }
public long GetUnpackSize()
{
if (UnpackSizes.Count == 0)
{
return 0;
}
for (int i = UnpackSizes.Count - 1; i >= 0; i--)
{
if (FindBindPairForOutStream(i) < 0)
{
return UnpackSizes[i];
}
}
throw new Exception();
}
@@ -34,7 +37,9 @@ namespace SharpCompress.Common.SevenZip
{
int count = 0;
for (int i = 0; i < Coders.Count; i++)
{
count += Coders[i].NumOutStreams;
}
return count;
}
@@ -42,8 +47,12 @@ namespace SharpCompress.Common.SevenZip
public int FindBindPairForInStream(int inStreamIndex)
{
for (int i = 0; i < BindPairs.Count; i++)
{
if (BindPairs[i].InIndex == inStreamIndex)
{
return i;
}
}
return -1;
}
@@ -51,8 +60,12 @@ namespace SharpCompress.Common.SevenZip
public int FindBindPairForOutStream(int outStreamIndex)
{
for (int i = 0; i < BindPairs.Count; i++)
{
if (BindPairs[i].OutIndex == outStreamIndex)
{
return i;
}
}
return -1;
}
@@ -60,8 +73,12 @@ namespace SharpCompress.Common.SevenZip
public int FindPackStreamArrayIndex(int inStreamIndex)
{
for (int i = 0; i < PackStreams.Count; i++)
{
if (PackStreams[i] == inStreamIndex)
{
return i;
}
}
return -1;
}
@@ -69,8 +86,12 @@ namespace SharpCompress.Common.SevenZip
public bool IsEncrypted()
{
for (int i = Coders.Count - 1; i >= 0; i--)
{
if (Coders[i].MethodId == CMethodId.kAES)
{
return true;
}
}
return false;
}
@@ -82,25 +103,39 @@ namespace SharpCompress.Common.SevenZip
const int kNumBindsMax = 32;
if (Coders.Count > kNumCodersMax || BindPairs.Count > kNumBindsMax)
{
return false;
}
{
var v = new BitVector(BindPairs.Count + PackStreams.Count);
for (int i = 0; i < BindPairs.Count; i++)
{
if (v.GetAndSet(BindPairs[i].InIndex))
{
return false;
}
}
for (int i = 0; i < PackStreams.Count; i++)
{
if (v.GetAndSet(PackStreams[i]))
{
return false;
}
}
}
{
var v = new BitVector(UnpackSizes.Count);
for (int i = 0; i < BindPairs.Count; i++)
{
if (v.GetAndSet(BindPairs[i].OutIndex))
{
return false;
}
}
}
uint[] mask = new uint[kMaskSize];
@@ -112,9 +147,13 @@ namespace SharpCompress.Common.SevenZip
{
CCoderInfo coder = Coders[i];
for (int j = 0; j < coder.NumInStreams; j++)
{
inStreamToCoder.Add(i);
}
for (int j = 0; j < coder.NumOutStreams; j++)
{
outStreamToCoder.Add(i);
}
}
for (int i = 0; i < BindPairs.Count; i++)
@@ -125,13 +164,23 @@ namespace SharpCompress.Common.SevenZip
}
for (int i = 0; i < kMaskSize; i++)
{
for (int j = 0; j < kMaskSize; j++)
{
if (((1u << j) & mask[i]) != 0)
{
mask[i] |= mask[j];
}
}
}
for (int i = 0; i < kMaskSize; i++)
{
if (((1u << i) & mask[i]) != 0)
{
return false;
}
}
return true;
}

View File

@@ -16,7 +16,7 @@
public CMethodId(ulong id)
{
this.Id = id;
Id = id;
}
public override int GetHashCode()
@@ -26,7 +26,7 @@
public override bool Equals(object obj)
{
return obj is CMethodId && (CMethodId) obj == this;
return obj is CMethodId && (CMethodId)obj == this;
}
public bool Equals(CMethodId other)
@@ -48,7 +48,9 @@
{
int bytes = 0;
for (ulong value = Id; value != 0; value >>= 8)
{
bytes++;
}
return bytes;
}
}

View File

@@ -1,7 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Compressor.LZMA;
using SharpCompress.Compressors.LZMA;
namespace SharpCompress.Common.SevenZip
{
@@ -47,7 +46,9 @@ namespace SharpCompress.Common.SevenZip
{
int dataIndex = archive.ReadNum();
if (dataIndex < 0 || dataIndex >= dataVector.Count)
{
throw new InvalidOperationException();
}
#if DEBUG
Log.WriteLine("[switch to stream {0}]", dataIndex);

View File

@@ -1,7 +1,7 @@
using System;
using System.IO;
using System.Text;
using SharpCompress.Compressor.LZMA;
using SharpCompress.Compressors.LZMA;
namespace SharpCompress.Common.SevenZip
{
@@ -11,31 +11,30 @@ namespace SharpCompress.Common.SevenZip
public static uint Get32(byte[] buffer, int offset)
{
return (uint) buffer[offset]
+ ((uint) buffer[offset + 1] << 8)
+ ((uint) buffer[offset + 2] << 16)
+ ((uint) buffer[offset + 3] << 24);
return buffer[offset]
+ ((uint)buffer[offset + 1] << 8)
+ ((uint)buffer[offset + 2] << 16)
+ ((uint)buffer[offset + 3] << 24);
}
public static ulong Get64(byte[] buffer, int offset)
{
return (ulong) buffer[offset]
+ ((ulong) buffer[offset + 1] << 8)
+ ((ulong) buffer[offset + 2] << 16)
+ ((ulong) buffer[offset + 3] << 24)
+ ((ulong) buffer[offset + 4] << 32)
+ ((ulong) buffer[offset + 5] << 40)
+ ((ulong) buffer[offset + 6] << 48)
+ ((ulong) buffer[offset + 7] << 56);
return buffer[offset]
+ ((ulong)buffer[offset + 1] << 8)
+ ((ulong)buffer[offset + 2] << 16)
+ ((ulong)buffer[offset + 3] << 24)
+ ((ulong)buffer[offset + 4] << 32)
+ ((ulong)buffer[offset + 5] << 40)
+ ((ulong)buffer[offset + 6] << 48)
+ ((ulong)buffer[offset + 7] << 56);
}
#endregion
#region Variables
private byte[] _buffer;
private int _offset;
private int _ending;
private readonly byte[] _buffer;
private readonly int _ending;
#endregion
@@ -44,38 +43,43 @@ namespace SharpCompress.Common.SevenZip
public DataReader(byte[] buffer, int offset, int length)
{
_buffer = buffer;
_offset = offset;
Offset = offset;
_ending = offset + length;
}
public int Offset
{
get { return _offset; }
}
public int Offset { get; private set; }
public Byte ReadByte()
{
if (_offset >= _ending)
if (Offset >= _ending)
{
throw new EndOfStreamException();
}
return _buffer[_offset++];
return _buffer[Offset++];
}
public void ReadBytes(byte[] buffer, int offset, int length)
{
if (length > _ending - _offset)
if (length > _ending - Offset)
{
throw new EndOfStreamException();
}
while (length-- > 0)
buffer[offset++] = _buffer[_offset++];
{
buffer[offset++] = _buffer[Offset++];
}
}
public void SkipData(long size)
{
if (size > _ending - _offset)
if (size > _ending - Offset)
{
throw new EndOfStreamException();
}
_offset += (int) size;
Offset += (int)size;
#if DEBUG
Log.WriteLine("SkipData {0}", size);
#endif
@@ -83,15 +87,17 @@ namespace SharpCompress.Common.SevenZip
public void SkipData()
{
SkipData(checked((long) ReadNumber()));
SkipData(checked((long)ReadNumber()));
}
public ulong ReadNumber()
{
if (_offset >= _ending)
if (Offset >= _ending)
{
throw new EndOfStreamException();
}
byte firstByte = _buffer[_offset++];
byte firstByte = _buffer[Offset++];
byte mask = 0x80;
ulong value = 0;
@@ -100,14 +106,16 @@ namespace SharpCompress.Common.SevenZip
if ((firstByte & mask) == 0)
{
ulong highPart = firstByte & (mask - 1u);
value += highPart << (i*8);
value += highPart << (i * 8);
return value;
}
if (_offset >= _ending)
if (Offset >= _ending)
{
throw new EndOfStreamException();
}
value |= (ulong) _buffer[_offset++] << (8*i);
value |= (ulong)_buffer[Offset++] << (8 * i);
mask >>= 1;
}
@@ -118,48 +126,58 @@ namespace SharpCompress.Common.SevenZip
{
ulong value = ReadNumber();
if (value > Int32.MaxValue)
{
throw new NotSupportedException();
}
return (int) value;
return (int)value;
}
public uint ReadUInt32()
{
if (_offset + 4 > _ending)
if (Offset + 4 > _ending)
{
throw new EndOfStreamException();
}
uint res = Get32(_buffer, _offset);
_offset += 4;
uint res = Get32(_buffer, Offset);
Offset += 4;
return res;
}
public ulong ReadUInt64()
{
if (_offset + 8 > _ending)
if (Offset + 8 > _ending)
{
throw new EndOfStreamException();
}
ulong res = Get64(_buffer, _offset);
_offset += 8;
ulong res = Get64(_buffer, Offset);
Offset += 8;
return res;
}
public string ReadString()
{
int ending = _offset;
int ending = Offset;
for (;;)
{
if (ending + 2 > _ending)
{
throw new EndOfStreamException();
}
if (_buffer[ending] == 0 && _buffer[ending + 1] == 0)
{
break;
}
ending += 2;
}
string str = Encoding.Unicode.GetString(_buffer, _offset, ending - _offset);
_offset = ending + 2;
string str = Encoding.Unicode.GetString(_buffer, Offset, ending - Offset);
Offset = ending + 2;
return str;
}

View File

@@ -7,79 +7,37 @@ namespace SharpCompress.Common.SevenZip
{
internal SevenZipEntry(SevenZipFilePart filePart)
{
this.FilePart = filePart;
FilePart = filePart;
}
internal SevenZipFilePart FilePart { get; private set; }
internal SevenZipFilePart FilePart { get; }
public override CompressionType CompressionType
{
get { return FilePart.CompressionType; }
}
public override CompressionType CompressionType { get { return FilePart.CompressionType; } }
public override long Crc
{
get { return FilePart.Header.Crc ?? 0; }
}
public override long Crc { get { return FilePart.Header.Crc ?? 0; } }
public override string Key
{
get { return FilePart.Header.Name; }
}
public override string Key { get { return FilePart.Header.Name; } }
public override long CompressedSize
{
get { return 0; }
}
public override long CompressedSize { get { return 0; } }
public override long Size
{
get { return (long) FilePart.Header.Size; }
}
public override long Size { get { return FilePart.Header.Size; } }
public override DateTime? LastModifiedTime
{
get { return FilePart.Header.MTime; }
}
public override DateTime? LastModifiedTime { get { return FilePart.Header.MTime; } }
public override DateTime? CreatedTime
{
get { return null; }
}
public override DateTime? CreatedTime { get { return null; } }
public override DateTime? LastAccessedTime
{
get { return null; }
}
public override DateTime? LastAccessedTime { get { return null; } }
public override DateTime? ArchivedTime
{
get { return null; }
}
public override DateTime? ArchivedTime { get { return null; } }
public override bool IsEncrypted
{
get { return false; }
}
public override bool IsEncrypted { get { return false; } }
public override bool IsDirectory
{
get { return FilePart.Header.IsDir; }
}
public override bool IsDirectory { get { return FilePart.Header.IsDir; } }
public override bool IsSplit
{
get { return false; }
}
public override bool IsSplit { get { return false; } }
public override int? Attrib
{
get { return (int) FilePart.Header.Attrib; }
}
public override int? Attrib { get { return (int)FilePart.Header.Attrib; } }
internal override IEnumerable<FilePart> Parts
{
get { return FilePart.AsEnumerable<FilePart>(); }
}
internal override IEnumerable<FilePart> Parts { get { return FilePart.AsEnumerable<FilePart>(); } }
}
}

View File

@@ -8,8 +8,8 @@ namespace SharpCompress.Common.SevenZip
internal class SevenZipFilePart : FilePart
{
private CompressionType? type;
private Stream stream;
private ArchiveDatabase database;
private readonly Stream stream;
private readonly ArchiveDatabase database;
internal SevenZipFilePart(Stream stream, ArchiveDatabase database, int index, CFileItem fileEntry)
{
@@ -24,14 +24,11 @@ namespace SharpCompress.Common.SevenZip
}
internal Stream BaseStream { get; private set; }
internal CFileItem Header { get; private set; }
internal CFolder Folder { get; private set; }
internal int Index { get; private set; }
internal CFileItem Header { get; }
internal CFolder Folder { get; }
internal int Index { get; }
internal override string FilePartName
{
get { return Header.Name; }
}
internal override string FilePartName { get { return Header.Name; } }
internal override Stream GetRawStream()
{
@@ -90,17 +87,17 @@ namespace SharpCompress.Common.SevenZip
{
case k_LZMA:
case k_LZMA2:
{
return CompressionType.LZMA;
}
{
return CompressionType.LZMA;
}
case k_PPMD:
{
return CompressionType.PPMd;
}
{
return CompressionType.PPMd;
}
case k_BZip2:
{
return CompressionType.BZip2;
}
{
return CompressionType.BZip2;
}
default:
throw new NotImplementedException();
}

View File

@@ -1,11 +1,13 @@
using System.IO;
using SharpCompress.Archives;
using SharpCompress.Readers;
namespace SharpCompress.Common.SevenZip
{
public class SevenZipVolume : Volume
{
public SevenZipVolume(Stream stream, Options options)
: base(stream, options)
public SevenZipVolume(Stream stream, ReaderOptions readerFactoryOptions)
: base(stream, readerFactoryOptions)
{
}
}

View File

@@ -3,17 +3,17 @@
internal enum EntryType : byte
{
File = 0,
OldFile = (byte) '0',
HardLink = (byte) '1',
SymLink = (byte) '2',
CharDevice = (byte) '3',
BlockDevice = (byte) '4',
Directory = (byte) '5',
Fifo = (byte) '6',
LongLink = (byte) 'K',
LongName = (byte) 'L',
SparseFile = (byte) 'S',
VolumeHeader = (byte) 'V',
GlobalExtendedHeader = (byte) 'g',
OldFile = (byte)'0',
HardLink = (byte)'1',
SymLink = (byte)'2',
CharDevice = (byte)'3',
BlockDevice = (byte)'4',
Directory = (byte)'5',
Fifo = (byte)'6',
LongLink = (byte)'K',
LongName = (byte)'L',
SparseFile = (byte)'S',
VolumeHeader = (byte)'V',
GlobalExtendedHeader = (byte)'g'
}
}

View File

@@ -1,7 +1,7 @@
using System;
using System.IO;
using System.Text;
using SharpCompress.Converter;
using SharpCompress.Converters;
namespace SharpCompress.Common.Tar.Headers
{
@@ -10,6 +10,7 @@ namespace SharpCompress.Common.Tar.Headers
internal static readonly DateTime Epoch = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
internal string Name { get; set; }
//internal int Mode { get; set; }
//internal int UserId { get; set; }
//internal string UserName { get; set; }
@@ -77,7 +78,9 @@ namespace SharpCompress.Common.Tar.Headers
// pad to multiple of BlockSize bytes, and make sure a terminating null is added
int numPaddingBytes = BlockSize - (nameBytes.Length % BlockSize);
if (numPaddingBytes == 0)
{
numPaddingBytes = BlockSize;
}
output.Write(new byte[numPaddingBytes], 0, numPaddingBytes);
}
@@ -131,11 +134,15 @@ namespace SharpCompress.Common.Tar.Headers
private string ReadLongName(BinaryReader reader, byte[] buffer)
{
var size = ReadSize(buffer);
var nameLength = (int) size;
var nameLength = (int)size;
var nameBytes = reader.ReadBytes(nameLength);
var remainingBytesToRead = BlockSize - (nameLength%BlockSize);
var remainingBytesToRead = BlockSize - (nameLength % BlockSize);
// Read the rest of the block and discard the data
if (remainingBytesToRead < BlockSize) reader.ReadBytes(remainingBytesToRead);
if (remainingBytesToRead < BlockSize)
{
reader.ReadBytes(remainingBytesToRead);
}
return ArchiveEncoding.Default.GetString(nameBytes, 0, nameBytes.Length).TrimNulls();
}
@@ -157,9 +164,9 @@ namespace SharpCompress.Common.Tar.Headers
{
byte[] buffer = reader.ReadBytes(BlockSize);
if (buffer.Length < BlockSize)
if (buffer.Length != 0 && buffer.Length < BlockSize)
{
throw new InvalidOperationException();
throw new InvalidOperationException("Buffer is invalid size");
}
return buffer;
}

View File

@@ -9,78 +9,38 @@ namespace SharpCompress.Common.Tar
public class TarEntry : Entry
{
private readonly TarFilePart filePart;
private readonly CompressionType type;
internal TarEntry(TarFilePart filePart, CompressionType type)
{
this.filePart = filePart;
this.type = type;
CompressionType = type;
}
public override CompressionType CompressionType
{
get { return type; }
}
public override CompressionType CompressionType { get; }
public override long Crc
{
get { return 0; }
}
public override long Crc { get { return 0; } }
public override string Key
{
get { return filePart.Header.Name; }
}
public override string Key { get { return filePart.Header.Name; } }
public override long CompressedSize
{
get { return filePart.Header.Size; }
}
public override long CompressedSize { get { return filePart.Header.Size; } }
public override long Size
{
get { return filePart.Header.Size; }
}
public override long Size { get { return filePart.Header.Size; } }
public override DateTime? LastModifiedTime
{
get { return filePart.Header.LastModifiedTime; }
}
public override DateTime? LastModifiedTime { get { return filePart.Header.LastModifiedTime; } }
public override DateTime? CreatedTime
{
get { return null; }
}
public override DateTime? CreatedTime { get { return null; } }
public override DateTime? LastAccessedTime
{
get { return null; }
}
public override DateTime? LastAccessedTime { get { return null; } }
public override DateTime? ArchivedTime
{
get { return null; }
}
public override DateTime? ArchivedTime { get { return null; } }
public override bool IsEncrypted
{
get { return false; }
}
public override bool IsEncrypted { get { return false; } }
public override bool IsDirectory
{
get { return filePart.Header.EntryType == EntryType.Directory; }
}
public override bool IsDirectory { get { return filePart.Header.EntryType == EntryType.Directory; } }
public override bool IsSplit
{
get { return false; }
}
public override bool IsSplit { get { return false; } }
internal override IEnumerable<FilePart> Parts
{
get { return filePart.AsEnumerable<FilePart>(); }
}
internal override IEnumerable<FilePart> Parts { get { return filePart.AsEnumerable<FilePart>(); } }
internal static IEnumerable<TarEntry> GetEntries(StreamingMode mode, Stream stream,
CompressionType compressionType)

View File

@@ -14,12 +14,9 @@ namespace SharpCompress.Common.Tar
Header = header;
}
internal TarHeader Header { get; private set; }
internal TarHeader Header { get; }
internal override string FilePartName
{
get { return Header.Name; }
}
internal override string FilePartName { get { return Header.Name; } }
internal override Stream GetCompressedStream()
{

View File

@@ -23,21 +23,22 @@ namespace SharpCompress.Common.Tar
switch (mode)
{
case StreamingMode.Seekable:
{
header.DataStartPosition = reader.BaseStream.Position;
//skip to nearest 512
reader.BaseStream.Position += PadTo512(header.Size);
}
{
header.DataStartPosition = reader.BaseStream.Position;
//skip to nearest 512
reader.BaseStream.Position += PadTo512(header.Size);
}
break;
case StreamingMode.Streaming:
{
header.PackedStream = new TarReadOnlySubStream(stream, header.Size);
}
{
header.PackedStream = new TarReadOnlySubStream(stream, header.Size);
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
}
catch
@@ -50,7 +51,7 @@ namespace SharpCompress.Common.Tar
private static long PadTo512(long size)
{
int zeros = (int) (size%512);
int zeros = (int)(size % 512);
if (zeros == 0)
{
return size;

View File

@@ -1,4 +1,5 @@
using System.IO;
using System;
using System.IO;
namespace SharpCompress.Common.Tar
{
@@ -39,38 +40,22 @@ namespace SharpCompress.Common.Tar
private long BytesLeftToRead { get; set; }
public Stream Stream { get; private set; }
public Stream Stream { get; }
public override bool CanRead
{
get { return true; }
}
public override bool CanRead { get { return true; } }
public override bool CanSeek
{
get { return false; }
}
public override bool CanSeek { get { return false; } }
public override bool CanWrite
{
get { return false; }
}
public override bool CanWrite { get { return false; } }
public override void Flush()
{
throw new System.NotSupportedException();
throw new NotSupportedException();
}
public override long Length
{
get { throw new System.NotSupportedException(); }
}
public override long Length { get { throw new NotSupportedException(); } }
public override long Position
{
get { throw new System.NotSupportedException(); }
set { throw new System.NotSupportedException(); }
}
public override long Position { get { throw new NotSupportedException(); } set { throw new NotSupportedException(); } }
public override int Read(byte[] buffer, int offset, int count)
{
@@ -89,17 +74,17 @@ namespace SharpCompress.Common.Tar
public override long Seek(long offset, SeekOrigin origin)
{
throw new System.NotSupportedException();
throw new NotSupportedException();
}
public override void SetLength(long value)
{
throw new System.NotSupportedException();
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new System.NotSupportedException();
throw new NotSupportedException();
}
}
}

View File

@@ -1,11 +1,12 @@
using System.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common.Tar
{
public class TarVolume : Volume
{
public TarVolume(Stream stream, Options options)
: base(stream, options)
public TarVolume(Stream stream, ReaderOptions readerOptions)
: base(stream, readerOptions)
{
}
}

View File

@@ -1,5 +1,6 @@
using System.IO;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common
{
@@ -7,41 +8,32 @@ namespace SharpCompress.Common
{
private readonly Stream actualStream;
internal Volume(Stream stream, Options options)
internal Volume(Stream stream, ReaderOptions readerOptions)
{
actualStream = stream;
Options = options;
ReaderOptions = readerOptions;
}
internal Stream Stream
{
get { return new NonDisposingStream(actualStream); }
}
internal Stream Stream { get { return new NonDisposingStream(actualStream); } }
internal Options Options { get; private set; }
protected ReaderOptions ReaderOptions { get; }
/// <summary>
/// RarArchive is the first volume of a multi-part archive.
/// Only Rar 3.0 format and higher
/// </summary>
public virtual bool IsFirstVolume
{
get { return true; }
}
public virtual bool IsFirstVolume { get { return true; } }
/// <summary>
/// RarArchive is part of a multi-part archive.
/// </summary>
public virtual bool IsMultiVolume
{
get { return true; }
}
public virtual bool IsMultiVolume { get { return true; } }
private bool disposed;
public void Dispose()
{
if (!Options.HasFlag(Options.KeepStreamsOpen) && !disposed)
if (!ReaderOptions.LeaveStreamOpen && !disposed)
{
actualStream.Dispose();
disposed = true;

View File

@@ -14,8 +14,8 @@ namespace SharpCompress.Common.Zip.Headers
{
Version = reader.ReadUInt16();
VersionNeededToExtract = reader.ReadUInt16();
Flags = (HeaderFlags) reader.ReadUInt16();
CompressionMethod = (ZipCompressionMethod) reader.ReadUInt16();
Flags = (HeaderFlags)reader.ReadUInt16();
CompressionMethod = (ZipCompressionMethod)reader.ReadUInt16();
LastModifiedTime = reader.ReadUInt16();
LastModifiedDate = reader.ReadUInt16();
Crc = reader.ReadUInt32();
@@ -47,8 +47,8 @@ namespace SharpCompress.Common.Zip.Headers
{
writer.Write(Version);
writer.Write(VersionNeededToExtract);
writer.Write((ushort) Flags);
writer.Write((ushort) CompressionMethod);
writer.Write((ushort)Flags);
writer.Write((ushort)CompressionMethod);
writer.Write(LastModifiedTime);
writer.Write(LastModifiedDate);
writer.Write(Crc);
@@ -56,10 +56,11 @@ namespace SharpCompress.Common.Zip.Headers
writer.Write(UncompressedSize);
byte[] nameBytes = EncodeString(Name);
writer.Write((ushort) nameBytes.Length);
writer.Write((ushort)nameBytes.Length);
//writer.Write((ushort)Extra.Length);
writer.Write((ushort) 0);
writer.Write((ushort) Comment.Length);
writer.Write((ushort)0);
writer.Write((ushort)Comment.Length);
writer.Write(DiskNumberStart);
writer.Write(InternalFileAttributes);
@@ -67,6 +68,7 @@ namespace SharpCompress.Common.Zip.Headers
writer.Write(RelativeOffsetOfEntryHeader);
writer.Write(nameBytes);
// writer.Write(Extra);
writer.Write(Comment);
}

View File

@@ -13,8 +13,8 @@ namespace SharpCompress.Common.Zip.Headers
internal override void Read(BinaryReader reader)
{
Version = reader.ReadUInt16();
Flags = (HeaderFlags) reader.ReadUInt16();
CompressionMethod = (ZipCompressionMethod) reader.ReadUInt16();
Flags = (HeaderFlags)reader.ReadUInt16();
CompressionMethod = (ZipCompressionMethod)reader.ReadUInt16();
LastModifiedTime = reader.ReadUInt16();
LastModifiedDate = reader.ReadUInt16();
Crc = reader.ReadUInt32();
@@ -28,17 +28,17 @@ namespace SharpCompress.Common.Zip.Headers
LoadExtra(extra);
var unicodePathExtra = Extra.FirstOrDefault(u => u.Type == ExtraDataType.UnicodePathExtraField);
if (unicodePathExtra!=null)
if (unicodePathExtra != null)
{
Name = ((ExtraUnicodePathExtraField) unicodePathExtra).UnicodeName;
Name = ((ExtraUnicodePathExtraField)unicodePathExtra).UnicodeName;
}
}
internal override void Write(BinaryWriter writer)
{
writer.Write(Version);
writer.Write((ushort) Flags);
writer.Write((ushort) CompressionMethod);
writer.Write((ushort)Flags);
writer.Write((ushort)CompressionMethod);
writer.Write(LastModifiedTime);
writer.Write(LastModifiedDate);
writer.Write(Crc);
@@ -47,8 +47,9 @@ namespace SharpCompress.Common.Zip.Headers
byte[] nameBytes = EncodeString(Name);
writer.Write((ushort) nameBytes.Length);
writer.Write((ushort) 0);
writer.Write((ushort)nameBytes.Length);
writer.Write((ushort)0);
//if (Extra != null)
//{
// writer.Write(Extra);

View File

@@ -8,6 +8,7 @@ namespace SharpCompress.Common.Zip.Headers
WinZipAes = 0x9901,
NotImplementedExtraData = 0xFFFF,
// Third Party Mappings
// -Info-ZIP Unicode Path Extra Field
UnicodePathExtraField = 0x7075
@@ -22,17 +23,14 @@ namespace SharpCompress.Common.Zip.Headers
internal class ExtraUnicodePathExtraField : ExtraData
{
internal byte Version
{
get { return this.DataBytes[0]; }
}
internal byte Version { get { return DataBytes[0]; } }
internal byte[] NameCRC32
{
get
{
var crc = new byte[4];
Buffer.BlockCopy(this.DataBytes, 1, crc, 0, 4);
Buffer.BlockCopy(DataBytes, 1, crc, 0, 4);
return crc;
}
}
@@ -42,8 +40,8 @@ namespace SharpCompress.Common.Zip.Headers
get
{
// PathNamelength = dataLength - Version(1 byte) - NameCRC32(4 bytes)
var length = this.Length - 5;
var nameStr = Encoding.UTF8.GetString(this.DataBytes, 5, length);
var length = Length - 5;
var nameStr = Encoding.UTF8.GetString(DataBytes, 5, length);
return nameStr;
}
}
@@ -51,25 +49,25 @@ namespace SharpCompress.Common.Zip.Headers
internal static class LocalEntryHeaderExtraFactory
{
internal static ExtraData Create(ExtraDataType type,ushort length, byte[] extraData)
internal static ExtraData Create(ExtraDataType type, ushort length, byte[] extraData)
{
switch (type)
{
case ExtraDataType.UnicodePathExtraField:
return new ExtraUnicodePathExtraField()
{
Type = type,
Length = length,
DataBytes = extraData
};
return new ExtraUnicodePathExtraField
{
Type = type,
Length = length,
DataBytes = extraData
};
default:
return new ExtraData
{
Type = type,
Length = length,
DataBytes = extraData
};
{
Type = type,
Length = length,
DataBytes = extraData
};
}
}
}
}
}

View File

@@ -1,4 +1,5 @@
using System;
using System.IO;
namespace SharpCompress.Common.Zip.Headers
{
@@ -9,12 +10,12 @@ namespace SharpCompress.Common.Zip.Headers
{
}
internal override void Read(System.IO.BinaryReader reader)
internal override void Read(BinaryReader reader)
{
throw new NotImplementedException();
}
internal override void Write(System.IO.BinaryWriter writer)
internal override void Write(BinaryWriter writer)
{
throw new NotImplementedException();
}

View File

@@ -2,7 +2,7 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using SharpCompress.Converter;
using SharpCompress.Converters;
namespace SharpCompress.Common.Zip.Headers
{
@@ -19,13 +19,14 @@ namespace SharpCompress.Common.Zip.Headers
get
{
if (Name.EndsWith("/"))
{
return true;
}
//.NET Framework 4.5 : System.IO.Compression::CreateFromDirectory() probably writes backslashes to headers
return CompressedSize == 0
&& UncompressedSize == 0
&& Name.EndsWith("\\");
&& UncompressedSize == 0
&& Name.EndsWith("\\");
}
}
@@ -77,10 +78,10 @@ namespace SharpCompress.Common.Zip.Headers
protected void LoadExtra(byte[] extra)
{
for (int i = 0; i < extra.Length-4;)
for (int i = 0; i < extra.Length - 4;)
{
ExtraDataType type = (ExtraDataType)DataConverter.LittleEndian.GetUInt16(extra, i);
if (!Enum.IsDefined(typeof (ExtraDataType), type))
if (!Enum.IsDefined(typeof(ExtraDataType), type))
{
type = ExtraDataType.NotImplementedExtraData;
}
@@ -88,7 +89,7 @@ namespace SharpCompress.Common.Zip.Headers
ushort length = DataConverter.LittleEndian.GetUInt16(extra, i + 2);
byte[] data = new byte[length];
Buffer.BlockCopy(extra, i + 4, data, 0, length);
Extra.Add(LocalEntryHeaderExtraFactory.Create(type,length,data));
Extra.Add(LocalEntryHeaderExtraFactory.Create(type, length, data));
i += length + 4;
}

View File

@@ -6,6 +6,6 @@
LocalEntry,
DirectoryEntry,
DirectoryEnd,
Split,
Split
}
}

View File

@@ -9,7 +9,6 @@ namespace SharpCompress.Common.Zip
Decrypt
}
internal class PkwareTraditionalCryptoStream : Stream
{
private readonly PkwareTraditionalEncryptionData encryptor;
@@ -24,40 +23,27 @@ namespace SharpCompress.Common.Zip
this.mode = mode;
}
public override bool CanRead { get { return (mode == CryptoMode.Decrypt); } }
public override bool CanRead
{
get { return (mode == CryptoMode.Decrypt); }
}
public override bool CanSeek { get { return false; } }
public override bool CanSeek
{
get { return false; }
}
public override bool CanWrite { get { return (mode == CryptoMode.Encrypt); } }
public override bool CanWrite
{
get { return (mode == CryptoMode.Encrypt); }
}
public override long Length { get { throw new NotSupportedException(); } }
public override long Length
{
get { throw new NotSupportedException(); }
}
public override long Position
{
get { throw new NotSupportedException(); }
set { throw new NotSupportedException(); }
}
public override long Position { get { throw new NotSupportedException(); } set { throw new NotSupportedException(); } }
public override int Read(byte[] buffer, int offset, int count)
{
if (mode == CryptoMode.Encrypt)
{
throw new NotSupportedException("This stream does not encrypt via Read()");
}
if (buffer == null)
{
throw new ArgumentNullException("buffer");
}
byte[] temp = new byte[count];
int readBytes = stream.Read(temp, 0, count);
@@ -69,7 +55,9 @@ namespace SharpCompress.Common.Zip
public override void Write(byte[] buffer, int offset, int count)
{
if (mode == CryptoMode.Decrypt)
{
throw new NotSupportedException("This stream does not Decrypt via Write()");
}
if (count == 0)
{

View File

@@ -1,7 +1,7 @@
using System;
using System.Text;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressor.Deflate;
using SharpCompress.Compressors.Deflate;
namespace SharpCompress.Common.Zip
{
@@ -9,18 +9,19 @@ namespace SharpCompress.Common.Zip
{
private static readonly CRC32 crc32 = new CRC32();
private readonly UInt32[] _Keys = {0x12345678, 0x23456789, 0x34567890};
private readonly string password;
private PkwareTraditionalEncryptionData(string password)
{
Initialize(password);
this.password = password;
}
private byte MagicByte
{
get
{
ushort t = (ushort) ((ushort) (_Keys[2] & 0xFFFF) | 2);
return (byte) ((t*(t ^ 1)) >> 8);
ushort t = (ushort)((ushort)(_Keys[2] & 0xFFFF) | 2);
return (byte)((t * (t ^ 1)) >> 8);
}
}
@@ -28,14 +29,15 @@ namespace SharpCompress.Common.Zip
byte[] encryptionHeader)
{
var encryptor = new PkwareTraditionalEncryptionData(password);
encryptor.InitializeKeys();
byte[] plainTextHeader = encryptor.Decrypt(encryptionHeader, encryptionHeader.Length);
if (plainTextHeader[11] != (byte) ((header.Crc >> 24) & 0xff))
if (plainTextHeader[11] != (byte)((header.Crc >> 24) & 0xff))
{
if (!FlagUtility.HasFlag(header.Flags, HeaderFlags.UsePostDataDescriptor))
{
throw new CryptographicException("The password did not match.");
}
if (plainTextHeader[11] != (byte) ((header.LastModifiedTime >> 8) & 0xff))
if (plainTextHeader[11] != (byte)((header.LastModifiedTime >> 8) & 0xff))
{
throw new CryptographicException("The password did not match.");
}
@@ -43,17 +45,18 @@ namespace SharpCompress.Common.Zip
return encryptor;
}
public byte[] Decrypt(byte[] cipherText, int length)
{
if (length > cipherText.Length)
{
throw new ArgumentOutOfRangeException("length",
"Bad length during Decryption: the length parameter must be smaller than or equal to the size of the destination array.");
}
var plainText = new byte[length];
for (int i = 0; i < length; i++)
{
var C = (byte) (cipherText[i] ^ MagicByte);
var C = (byte)(cipherText[i] ^ MagicByte);
UpdateKeys(C);
plainText[i] = C;
}
@@ -63,27 +66,33 @@ namespace SharpCompress.Common.Zip
public byte[] Encrypt(byte[] plainText, int length)
{
if (plainText == null)
{
throw new ArgumentNullException("plaintext");
}
if (length > plainText.Length)
{
throw new ArgumentOutOfRangeException("length",
"Bad length during Encryption: The length parameter must be smaller than or equal to the size of the destination array.");
}
var cipherText = new byte[length];
for (int i = 0; i < length; i++)
{
byte C = plainText[i];
cipherText[i] = (byte) (plainText[i] ^ MagicByte);
cipherText[i] = (byte)(plainText[i] ^ MagicByte);
UpdateKeys(C);
}
return cipherText;
}
private void Initialize(string password)
internal void InitializeKeys()
{
byte[] p = StringToByteArray(password);
for (int i = 0; i < password.Length; i++)
{
UpdateKeys(p[i]);
}
}
internal static byte[] StringToByteArray(string value, Encoding encoding)
@@ -99,10 +108,10 @@ namespace SharpCompress.Common.Zip
private void UpdateKeys(byte byteValue)
{
_Keys[0] = (UInt32) crc32.ComputeCrc32((int) _Keys[0], byteValue);
_Keys[1] = _Keys[1] + (byte) _Keys[0];
_Keys[1] = _Keys[1]*0x08088405 + 1;
_Keys[2] = (UInt32) crc32.ComputeCrc32((int) _Keys[2], (byte) (_Keys[1] >> 24));
_Keys[0] = (UInt32)crc32.ComputeCrc32((int)_Keys[0], byteValue);
_Keys[1] = _Keys[1] + (byte)_Keys[0];
_Keys[1] = _Keys[1] * 0x08088405 + 1;
_Keys[2] = (UInt32)crc32.ComputeCrc32((int)_Keys[2], (byte)(_Keys[1] >> 24));
}
}
}

View File

@@ -24,10 +24,7 @@ namespace SharpCompress.Common.Zip
return base.GetCompressedStream();
}
internal string Comment
{
get { return (Header as DirectoryEntryHeader).Comment; }
}
internal string Comment { get { return (Header as DirectoryEntryHeader).Comment; } }
private void LoadLocalHeader()
{

View File

@@ -35,9 +35,10 @@ namespace SharpCompress.Common.Zip
if (iterationCount > MAX_ITERATIONS_FOR_DIRECTORY_HEADER)
{
throw new ArchiveException(
"Could not find Zip file Directory at the end of the file. File may be corrupted.");
"Could not find Zip file Directory at the end of the file. File may be corrupted.");
}
} while (signature != DIRECTORY_END_HEADER_BYTES);
}
while (signature != DIRECTORY_END_HEADER_BYTES);
var entry = new DirectoryEndHeader();
entry.Read(reader);
@@ -55,6 +56,7 @@ namespace SharpCompress.Common.Zip
{
yield break;
}
//entry could be zero bytes so we need to know that.
directoryEntryHeader.HasData = directoryEntryHeader.CompressedSize != 0;
yield return directoryEntryHeader;

View File

@@ -1,6 +1,6 @@
using System.IO;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressor.Deflate;
using SharpCompress.Compressors.Deflate;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip

Some files were not shown because too many files have changed in this diff Show More