Compare commits

..

674 Commits

Author SHA1 Message Date
Adam Hathcock
acf66c5195 Merge pull request #440 from adamhathcock/release-023
Bump release for 0.23 and update dependencies
2019-02-27 12:57:30 +00:00
Adam Hathcock
880c9fa97a Bump release and update dependencies 2019-02-27 12:55:16 +00:00
Adam Hathcock
e5c111f2be Merge pull request #436 from Numpsy/populate_zip_volume_comment
Changes to populate ZipArchive.ZipVolumne.Comment
2019-01-14 08:14:03 +00:00
Richard Webb
4e9cd064dd Unit test to show reading of a Zip volume/archive comment 2019-01-13 21:05:55 +00:00
Richard Webb
12a6d3977e Return the DirectoryEndHeader from SeekableZipHeaderFactory.ReadSeekable so that it can be used by ZipArchive 2018-12-14 22:44:44 +00:00
Adam Hathcock
a95bbaf820 Merge pull request #423 from markfinal/tarsymlink
Tar symlink support
2018-11-05 08:31:48 +00:00
Mark Final
70bafa653b Tar symlink extraction
Extended SharpCompress.Common.ExtractionOptions with a delegate to write symbolic links. If not is null, and a symbolic link is encountered, an exception is thrown.
Removed Mono.Posix.NETStandard from the library, but added to the .NET Core 2.1 test application.
Extended the test to implement the delegate.
2018-11-03 09:45:12 +00:00
Mark Final
3f4338489c Removed unnecessary code 2018-11-01 21:57:49 +00:00
Mark Final
d91e58f2cc Refs #248. Refs #132. Added a test case of a tar containing symlinks
This is a source archive of the MoltenVK project from github, which is my use-case for SharpCompress.
I added a test case in the project, which should extract the tar, and validate any symlink targets with what the tar thinks it ought to be.
2018-11-01 21:51:14 +00:00
Mark Final
192b9c1e8b Ref #248. Ref #132. Tar reader support for symlinks for .NET standard 2 and Posix platforms
Extracts linkname from the tar header, and exposes this on IEntry as the LinkTarget (string) property. If an entry is not a symlink, then that property is null.

Uses Mono.Posix.NETStandard nuget to create a symlink. However, this is only applicable to .NET standard 2.0+. So far, unable to find a nuget that works for older versions.

Also, not sure what to do on Windows.
2018-11-01 21:48:51 +00:00
Adam Hathcock
0941239454 Merge pull request #417 from KyotoFox/fix-entrystream-flush
Allow Flush on EntryStream
2018-10-04 12:48:08 +01:00
Knut Ørland
53ad00cdc4 Use soft tabs 2018-10-04 13:13:14 +02:00
Knut Ørland
6dd5da48f7 Added test that calls EntryStream.Flush() 2018-10-04 13:08:53 +02:00
Knut Ørland
efae8328a9 Don't throw an exception when flushing an EntryStream
From Microsoft docs: “In a class derived from Stream that doesn't
support writing, Flush is typically implemented as an empty method to
ensure full compatibility with other Stream types since it's valid to
flush a read-only stream.”
2018-10-04 13:05:36 +02:00
Adam Hathcock
f1facc51de Merge pull request #409 from G-Research/RecogniseEmptyTarArchives
Recognise empty tar archives.
2018-09-25 13:20:59 +01:00
Adam Hathcock
a471ca6a76 Use Cake tool on circle. Update test packages 2018-08-31 09:27:04 +01:00
Elliot Prior
83f6690576 Recognise empty tar archives.
Currently, when ArchiveFactory.Open is called on an empty tar archive, it throws due to being unable to determine the stream type. This fix allows it to recognise empty tar files by checking for whether the filename is empty, the size is empty and the entry type is defined. Add a test to try opening an empty archive.
2018-08-16 10:25:47 +01:00
Adam Hathcock
1850ea67f6 Merge pull request #408 from majoreffort/master
Test and fix for #407
2018-07-24 09:42:03 +01:00
majoreffort
2fd6178aa9 Fixed length in TarHeader#WriteStringBytes 2018-07-23 19:58:37 +02:00
majoreffort
ec044e6f42 Added Tar test for 100 char filename issue. 2018-07-23 19:48:01 +02:00
Adam Hathcock
bd96279649 Merge pull request #404 from MattKotsenas/bugfix/idisposable
Enable parallel test execution
2018-07-12 19:53:50 +01:00
Matt Kotsenas
f7ad595945 Enable test parallelization and remove garbage collection workaround
Now that the sources of file locking are fixed, enable test parallelization
and the forced garbage collection workaround.

Lastly, remove the `IsLocked` check because it doesn't work in a
parallel test running world - the file may be locked due to another test
running.
2018-07-12 10:33:19 -07:00
Matt Kotsenas
93c0b91de9 Refactor TestSharpCompressWithEmptyStream
Refactor `TestSharpCompressWithEmptyStream` so it asserts that the files
and bytes are the same.
2018-07-12 10:32:08 -07:00
Matt Kotsenas
138038b08f Move RarReaderTests over to user ReaderFactory
- Refactor `RarReaderTests` to use `ReaderFactory`
- Update `ReaderTests.Read` to support Rar tests
2018-07-12 10:32:08 -07:00
Matt Kotsenas
e9a6fed607 FIXUP decouple UseReader from VerifyFiles 2018-07-11 16:53:34 -07:00
Matt Kotsenas
87a1440382 Decouple UseReader from VerifyFiles 2018-07-11 16:49:49 -07:00
Matt Kotsenas
3c2f4ebe9b Combine ForwardOnlyStream and NonSeekableStream
Delete `NonSeekableStream` used in Zip64 tests in favor
of `ForwardOnlyStream` used in Mocks.

Additionally, delete the `ForwardOnlyStream.ReadByte` implementation
as the implementation on the base Stream is sufficient.
2018-07-11 16:42:03 -07:00
Matt Kotsenas
933ffe7828 Remove unused code from ArchiveTests 2018-07-11 16:33:46 -07:00
Matt Kotsenas
7d20ba5243 Simplify RarHeaderTests 2018-07-11 16:21:19 -07:00
Matt Kotsenas
44dc36af48 Update ReaderTests base class to validate Dispose
Update the `ReaderTests` base class to validate that `Dispose` is
called appropriately in both the close and the leave open cases.
2018-07-11 16:21:19 -07:00
Matt Kotsenas
98558c5ba9 Refactor TestStream constructor
Refactor the `TestStream` constructor so by default it defers to
the underlying Stream
2018-07-11 16:21:19 -07:00
Matt Kotsenas
6c25322465 Follow best-practices for Dispose in Volume and ForwardOnlyStream
Update `Volume` and `ForwardOnlyStream` to follow the project's
general pattern and best-practices for `Dispose`
2018-07-11 16:21:19 -07:00
Matt Kotsenas
6d1d62fd32 Delete AppendingStream
`AppendingStream` is unused, so rather than refactor it, just delete it.
2018-07-11 16:21:19 -07:00
Matt Kotsenas
ee4ae661d7 Refactor ListeningStream
Refactor `ListeningStream`:

- Override of `WriteByte` was redundant and removed
- Make `Dispose` delegate to base class
2018-07-11 16:21:19 -07:00
Matt Kotsenas
0473ec1626 Open test archives as read
Update `RarHeaderFactoryTests` and `GZipArchiveTests` to open the test
readers as `FileAccess.Read` and `FileShare.Read` to prevent issues with
multiple test from trying to open exclusive access to files.
2018-07-11 16:21:19 -07:00
Matt Kotsenas
c6cf0d40ee Simplify ReaderTests
The `IEnumerable<string>` version of `ReaderTests` is unused, so delete
it to simplify the code.
2018-07-11 16:21:19 -07:00
Matt Kotsenas
4cd80e96f3 Simplify GZip bad compression test 2018-07-11 16:21:19 -07:00
Matt Kotsenas
16524717ba Fix Stream leak in ArchiveFactory
`ArchiveFactory.Open` has two overloads that take `string` or
`FileInfo` (string delegates to FileInfo). Both of these implementations
open a `Stream` with the default `ReaderOptions`, which leaves the
stream open, resulting in a leak.

The fix is to set `LeaveOpen` to `false` if no options were provided.
Note that if a user was provding options and `LeaveOpen` was set to
`true`, the code did and will still leak.
2018-07-11 16:21:19 -07:00
Matt Kotsenas
cab1ce3d0c Update sub-streams to uniformly inherit from NonDisposingStream
Update the sub-stream classes to all inherit from `NonDisposingStream`.
This allows them to correctly implement the `Dispose` pattern, and delegate
the actual disposal to `NonDisposingStream`.

In doing so, we need to remove some redundant overrides from
`NonDisposingStream`, otherwise `BufferedSubStream` would use the
overrides inherited from `NonDisposingStream` instead of the ones
inherited from `Stream` (i.e. delegate `ReadByte` to `Read`).
2018-07-11 16:17:49 -07:00
Matt Kotsenas
6c2e5e1164 Cleanup NonDisposingStream for reuse
- Remove the duplicate `GC.SuppressFinalization` call
(called in `System.IO.Stream)
- Improve the `ThrowOnDispose` error message
2018-07-11 12:19:34 -07:00
Matt Kotsenas
c2bf540057 Close verification streams in TestBase.CompareArchivesByPath 2018-07-11 12:12:30 -07:00
Matt Kotsenas
a35c66e166 Move RewindableStreamTest.cs to the Streams/ folder 2018-07-10 12:07:33 -07:00
Matt Kotsenas
084c5e2686 Rename StreamTests.cs --> LzmaStreamTests.cs 2018-07-10 12:07:32 -07:00
Matt Kotsenas
6ae715b153 Move the TestStream and ForwardOnlyStream to Mocks folder
Move the `TestStream` and `ForwardOnlyStream` to Mocks/ to separate them
from the test classes.
2018-07-10 12:07:32 -07:00
Adam Hathcock
9c8692806a Merge pull request #403 from MattKotsenas/bugfix/parallel-tests
Fix and re-enable tests
2018-07-10 20:01:20 +01:00
Matt Kotsenas
2d87351d2a Add tests back to AppVeyor 2018-07-10 11:52:00 -07:00
Matt Kotsenas
3114afde0e Add workaround for in-use files
The `TestBase` is not always able to delete the scratch folder in
`Dispose()` because sometimes the files are still in use.

This problem appears to be leaked file handles (likely due to incorrect
handling of `IDisposable`). To avoid the problem for now, force a
garbage collection prior to deleting the scratch folder.
2018-07-10 11:49:38 -07:00
Matt Kotsenas
7b338511cc Create unique scratch path per test
Tests fail in Visual Studio because they try to reuse the same scratch
working space, and each test is responsible for resetting the space. To
simplify the test code:

1. Make `TestBase` `IDisposable` and have it create the scratch space
2. Remove `ResetScratch()` as it is now handled by the base class
3. Add a unique ID to each scrach space folder to prevent collisions
2018-07-10 11:46:44 -07:00
Adam Hathcock
09c27681e1 Merge pull request #402 from a764578566/master
file search support linq Pattern
2018-07-10 13:21:09 +01:00
zhoujr
4ebc1f82b7 file search support linq Pattern 2018-07-10 19:58:59 +08:00
Adam Hathcock
4640ca497a Merge pull request #400 from MattKotsenas/feature/avoid-exception-in-readerfactory
Avoid throwing NotSupportedException in ReaderFactory hot path
2018-07-10 08:47:13 +01:00
Matt Kotsenas
bebccaae28 Avoid throwing NotSupportedException in ReaderFactory hot path
`ReaderFactory.Open()` calls `ZipArchive.IsZipFile()` to determine if
the `Stream` is a zip archive, which calls into
`ZipHeaderFactory.ReadHeader()`, which throws a `NotSupportedException`
when the `Stream` is not a zip archive.

To be clear, this exception is caught and `IsZipFile()` returns `false`,
but when called in a hot-path, these exceptions can become expensive.

To address this issue, `ReadHeader` now returns `null` in the default
cause instead of throwing. All callsites were already checking for and
handling `null`, so no behavior changes.
2018-07-09 18:44:46 -07:00
Adam Hathcock
7ee53373c6 Remove tests as AppVeyor can’t handle them at the moment 2018-07-09 09:05:10 +01:00
Adam Hathcock
d577fe1ac6 Merge pull request #385 from twirpx/master
Fixed EFS flag handling
2018-07-09 08:48:34 +01:00
Adam Hathcock
9f976aaf78 Merge branch 'master' into master 2018-07-09 08:48:26 +01:00
Adam Hathcock
8a7d7e366f Merge pull request #396 from andreas-eriksson/Rar5IsEncrypted
Correctly set IsEncrypted for entries in Rar5.
2018-07-09 08:48:12 +01:00
Adam Hathcock
540ab1c6fa Merge branch 'master' into master 2018-07-09 08:47:32 +01:00
Adam Hathcock
6792afbdb1 Merge branch 'master' into Rar5IsEncrypted 2018-07-09 08:44:32 +01:00
Adam Hathcock
e5a7185671 Mark for 0.22 2018-07-09 08:42:45 +01:00
Adam Hathcock
cdaf453b2d Update dependencies and tests to .NET Core 2.1 2018-07-09 08:39:37 +01:00
Andreas Eriksson
f9cc80e1de Correctly set IsEncrypted for entries in Rar5. 2018-06-29 15:51:40 +02:00
Adam Hathcock
7beff9e83c Merge pull request #395 from adamhathcock/zip-slip-readers
Zip slip for Readers
2018-06-28 11:56:44 +01:00
Adam Hathcock
8f49f1b6f8 Merge remote-tracking branch 'origin/master' into zip-slip-readers 2018-06-28 11:52:43 +01:00
Adam Hathcock
7e336a0247 Slip in new SDK 2018-06-28 11:51:17 +01:00
Adam Hathcock
e37e8bdadc Move path handling for extraction to be common
Reader and Archive now share more extraction logic
2018-06-28 11:46:51 +01:00
Adam Hathcock
40bd61b16b Merge pull request #389 from frankyjuang/patch-1
Fix comment
2018-06-08 08:59:52 +01:00
Juang, Yi-Lin
87fbb45099 Fix comment 2018-06-08 11:27:43 +08:00
twirpx
e822f9a95c Tests fixed to use explicit use of 866 encoding because of usage file named in Russian in several tests 2018-05-30 22:17:27 +05:00
twirpx
8a5a9159e1 Fixed DirectoryEntryHeader Name/Comment decoding in case of EFS flags set 2018-05-30 21:47:31 +05:00
twirpx
73b3c6b419 Merge branch 'master' of https://github.com/adamhathcock/sharpcompress 2018-05-30 20:28:15 +05:00
Adam Hathcock
f9bd7ebdb0 Merge pull request #384 from MrJul/perf-readbyte
Implemented ReadByte/WriteByte on streams to improve performance
2018-05-28 09:21:28 +01:00
Julien Lebosquain
540618c062 Implemented ReadByte/WriteByte on streams to improve performance 2018-05-27 16:31:44 +02:00
Adam Hathcock
9e96dec8c9 Merge pull request #383 from itn3000/add-filename-encoding-example
add example for custom file encoding
2018-05-23 09:14:46 +01:00
itn3000
7b7af612ba add example for custom file encoding 2018-05-23 09:46:36 +09:00
Adam Hathcock
3a747ba87e Update USAGE with new stream handling 2018-05-16 08:51:33 +01:00
Adam Hathcock
87e57e3a9a Mark for 0.21.1 2018-05-15 09:14:56 +01:00
Adam Hathcock
785d0dcebf Merge pull request #381 from adamhathcock/issue-380
Allow forced encoding to override default encoding
2018-05-15 09:13:16 +01:00
Adam Hathcock
2314776f55 Also check for CustomDecoder 2018-05-15 08:28:11 +01:00
Adam Hathcock
473f5d8189 Make GetDecoder use GetEncoding for forced 2018-05-14 16:20:57 +01:00
Adam Hathcock
be971cb6f7 Allow forced encoding to override default encoding 2018-05-14 16:08:31 +01:00
Adam Hathcock
3f94c1a50d Remove lingering uses of non disposing stream 2018-05-08 14:10:49 +01:00
Adam Hathcock
2919ec293a mark for 0.21 2018-05-06 09:39:36 +01:00
Adam Hathcock
19d25152e5 Merge pull request #378 from adamhathcock/fix_crypto_namespace
Fix namespaces to not interfere with bouncy castle users
2018-05-06 09:20:17 +01:00
Adam Hathcock
747180203c Rider isn’t quite good enough in refactoring 2018-05-06 09:12:18 +01:00
Adam Hathcock
9f89a0844d Fix namespaces to not interfere with bouncy castle users 2018-05-06 09:09:45 +01:00
Adam Hathcock
c64282e915 more naming 2018-05-06 09:07:06 +01:00
Adam Hathcock
c44a80bab2 Merge remote-tracking branch 'origin/master' 2018-05-06 08:59:23 +01:00
Adam Hathcock
8dfc4a2ffb more variable naming 2018-05-06 08:59:00 +01:00
Adam Hathcock
c341c626a5 variable naming clean up 2018-05-06 08:49:32 +01:00
Adam Hathcock
173a0fe659 Some naming clean up 2018-05-05 19:35:58 +01:00
Adam Hathcock
5fdae1cf82 Make readonly and fix visibility 2018-05-05 19:23:34 +01:00
Adam Hathcock
9e892ab397 Merge pull request #376 from leezer3/master
Fix broken link in usage.md
2018-05-05 16:18:17 +01:00
Christopher Lees
e95559b4fc Fix broken link in usage.md 2018-05-05 15:08:56 +01:00
Adam Hathcock
18475cc86d Use proper xunit single threading 2018-05-05 09:38:56 +01:00
Adam Hathcock
88b59600cd Merge pull request #369 from adamhathcock/leaveOpen
Rework LeaveOpen to be consistent
2018-05-05 09:27:32 +01:00
Adam Hathcock
9a9d64bcbe Merge branch 'master' into leaveOpen
# Conflicts:
#	src/SharpCompress/Compressors/LZMA/LZipStream.cs
2018-05-05 09:25:26 +01:00
Adam Hathcock
4f3408ec25 Merge pull request #375 from adamhathcock/issue_360
Fixes lzip stream disposal
2018-05-05 09:20:35 +01:00
Adam Hathcock
e9d0fb85ac Merge branch 'master' into leaveOpen 2018-05-05 09:19:38 +01:00
Adam Hathcock
1ce37ef7a8 Fixes lzip stream disposal 2018-05-05 09:18:01 +01:00
Adam Hathcock
ecad356e30 Merge pull request #363 from sridhar6668/sridhar6668/support_extended_ascii
ZipArchive Reader: Uses IBM PC character encoding to decode filename …
2018-05-05 09:12:22 +01:00
Adam Hathcock
fafd8da91d Merge branch 'master' into leaveOpen 2018-05-05 09:10:19 +01:00
Adam Hathcock
2fb31d4b84 Merge branch 'master' into sridhar6668/support_extended_ascii 2018-05-05 09:09:09 +01:00
Adam Hathcock
8b478451ac Evil zip is a windows only test because of paths 2018-05-05 09:05:32 +01:00
Adam Hathcock
42b1205fb4 Merge pull request #374 from odinn1984/feat/fail_on_outside_target_files
fix: prevent extracting archived files outside of target path
2018-05-02 22:51:02 +01:00
odinn1986
80ceb1c375 fix: prevent extracting archived files outside of target path
This PR is meant to fix an arbitrary file write vulnerability, that can be
achieved using a specially crafted zip archive, that holds path traversal
filenames. When the filename gets concatenated to the target extraction
directory, the final path ends up outside of the target folder.

A sample malicious zip file named Zip.Evil.zip was used,
and when running the code below, resulted in the creation of C:/Temp/evil.txt
outside of the intended target directory.

There are various possible ways to avoid this issue, some include checking
for .. (dot dot) characters in the filename, but the best solution in our
opinion is to check if the final target filename, starts with the target
folder (after both are resolved to their absolute path).

Stay secure,
Snyk Team
2018-05-02 23:12:33 +03:00
Adam Hathcock
501407c3fe Change flag name to be closer to spec 2018-04-29 16:33:15 +01:00
Adam Hathcock
abddabf18e Proper fixes for all platforms 2018-04-29 16:27:26 +01:00
Adam Hathcock
91d753cbdb Merge branch 'master' into sridhar6668/support_extended_ascii 2018-04-29 15:12:02 +01:00
Adam Hathcock
259acd0694 misc additions 2018-04-29 15:09:26 +01:00
Adam Hathcock
33f7258ea2 Merge branch 'master' into leaveOpen
# Conflicts:
#	src/SharpCompress/Common/Rar/Headers/RarHeaderFactory.cs
#	src/SharpCompress/Readers/Rar/RarReader.cs
2018-04-29 14:47:08 +01:00
Adam Hathcock
1ea7bb57e5 Merge branch 'master' into sridhar6668/support_extended_ascii 2018-04-29 11:39:09 +01:00
Adam Hathcock
3e60e796fb Merge pull request #340 from adamhathcock/rar5
Rar5 Feature
2018-04-29 11:36:49 +01:00
Adam Hathcock
d9c178cbee Fix all platform support 2018-04-29 11:33:49 +01:00
Adam Hathcock
031b3c55f6 FIx solid support. I did it wrong 2018-04-29 11:12:28 +01:00
Adam Hathcock
b43d2c3d95 Disabled decryption tests 2018-04-29 10:55:51 +01:00
Adam Hathcock
d865120480 ArchiveCryptHeader renamed 2018-04-29 10:13:46 +01:00
Adam Hathcock
15534f466a Add basic rar5 crypt header 2018-04-28 18:20:40 +01:00
Adam Hathcock
9d63dcb8d6 Uncommit some tests 2018-04-28 18:14:47 +01:00
Adam Hathcock
6efe30bd6e Merge branch 'master' into rar5
# Conflicts:
#	.gitignore
2018-04-28 18:09:10 +01:00
Adam Hathcock
52dd9f0609 Merge pull request #371 from adamhathcock/Issue-370
Expose stream length.  Clean up entry stream
2018-04-26 11:33:23 +01:00
Adam Hathcock
bee7f43880 Expose stream length. Clean up entry stream 2018-04-26 09:46:01 +01:00
Adam Hathcock
d38276e8cf Fix solid and some other tests 2018-04-23 10:29:46 +01:00
Adam Hathcock
f3daaeb200 Try to use both for Rarv5 support 2018-04-23 09:39:50 +01:00
Adam Hathcock
9b152a40a9 Merge branch 'master' into rar5 2018-04-22 11:35:33 +01:00
Adam Hathcock
89ae8ca526 Rejigger read only substream 2018-04-22 11:32:47 +01:00
Adam Hathcock
68a5e474a6 More testing of file handling 2018-04-22 11:19:11 +01:00
Adam Hathcock
bf58742ddf rework of leave stream open for readers 2018-04-22 11:09:03 +01:00
Adam Hathcock
f18e5b75bb Archives set up correctly 2018-04-22 10:06:30 +01:00
Adam Hathcock
e919c99b14 First pass of removing explicit leaveOpen on streams. 2018-04-22 10:02:18 +01:00
Adam Hathcock
b960f2e5ba Minor build updates 2018-04-22 09:17:03 +01:00
srperias@microsoft.com
5d8728d592 Decode without setting the default Encoding type 2018-03-28 13:12:54 -07:00
srperias@microsoft.com
04ba6c2d73 ZipArchive Reader: Uses IBM PC character encoding to decode filename and comment if the general purpose bit 11 is not set in the header 2018-03-27 13:54:16 -07:00
Adam Hathcock
0cab9bd4b4 Mark for 0.20.0 2018-03-24 07:42:20 +00:00
Adam Hathcock
279d305013 Merge pull request #359 from prettierci-commits/prettierci-master-1521104105
PrettierCI master Sync
2018-03-15 08:56:49 +00:00
PrettierCI
750c1fb069 Sync with Prettier 2018-03-15 08:55:06 +00:00
Adam Hathcock
359a6042cd Merge pull request #352 from adamhathcock/cake-026
Cake 0.26
2018-03-01 15:40:30 +00:00
Adam Hathcock
e27d2ec660 Remove netcoreapp1.x testing 2018-03-01 15:35:55 +00:00
Adam Hathcock
da56bfc01f Merge pull request #354 from frabar666/deflate64-decompress
Support Deflate64 decompression
2018-03-01 09:14:06 +00:00
frabar666
6e2c7d2857 support Deflate64 decompression 2018-02-27 23:31:11 +01:00
Adam Hathcock
5481609554 Build with new cake 2018-02-27 08:52:55 +00:00
Frederik Carlier
a62f4df0b1 Implement entry.ToString(), let it return entry.Key (#351) 2018-02-16 13:43:23 +00:00
Adam Hathcock
f893c1272c Merge pull request #337 from 4ybaka/issue-323-tar-archive-finalization
Added ability to leave tar archive open after stream is closed
2018-01-14 19:52:08 +00:00
Dmitry
e701f5277e Merge branch 'master' into issue-323-tar-archive-finalization 2018-01-13 00:47:04 +01:00
Dmitry Nesterov
f85fd1f6a4 Added ability to leave tar archive open after stream is closed 2018-01-13 00:44:42 +01:00
Dmitry Nesterov
8f7ea420b3 Revert "Added ability to leave tar archive open after stream is closed"
This reverts commit 9092ecf331.
2018-01-13 00:41:35 +01:00
Adam Hathcock
b39f389a67 Merge branch 'master' into rar5 2018-01-10 14:58:40 +00:00
Adam Hathcock
d8c8dabb52 Merge pull request #336 from diontools/ImproveStreamSkipping
Utility.Skip uses seek
2018-01-10 11:23:24 +00:00
Dmitry Nesterov
9092ecf331 Added ability to leave tar archive open after stream is closed 2018-01-04 22:57:32 +01:00
diontools
2fd9fe96ad Utility.Skip uses seek 2018-01-03 00:23:34 +09:00
coderb
554153e6a0 rar5: port old algos 2017-12-20 19:10:06 -05:00
coderb
3c29122dfe rar5: porting old algos 2017-12-20 19:04:41 -05:00
coderb
c42dc646ae rar5: reporting older algos 2017-12-20 18:51:48 -05:00
coderb
edd6206a03 rar5: fix checkin 2017-12-20 18:32:36 -05:00
coderb
a1e7f55b95 rar5: unpack working!!! 2017-12-20 18:32:12 -05:00
coderb
218823e9b2 rar5: unpack bugfix 2017-12-20 18:13:04 -05:00
coderb
3e8f52689e unrar5: unpack wip 2017-12-20 16:01:55 -05:00
coderb
0219fc3ea9 unrar5: unpack wip 2017-12-20 14:58:49 -05:00
coderb
66cb2ab662 unrar5: unpack wip 2017-12-20 14:28:22 -05:00
coderb
91c85f0aa6 unrar5: unpack wip 2017-12-20 14:22:47 -05:00
coderb
fd9790cc36 unrar5: unpack wip 2017-12-20 14:15:20 -05:00
coderb
0d40883176 unrar5: unpack wip 2017-12-20 11:19:30 -05:00
coderb
2d0319f779 rar5: unpack wip 2017-12-20 02:11:45 -05:00
coderb
aaeaa44c0b rar5: unpack wip 2017-12-20 01:55:54 -05:00
coderb
9fc77db073 rar5: unpack wip 2017-12-20 01:39:13 -05:00
coderb
bdaa060003 rar5: unpack wip 2017-12-20 01:21:53 -05:00
coderb
b5cb9901b1 rar5: wip 2017-12-20 00:29:07 -05:00
coderb
fb4d503c9a rar5: wip 2017-12-20 00:17:47 -05:00
coderb
c3c2fcf4d0 rar5: unpack wip 2017-12-19 21:41:36 -05:00
coderb
f2d2d94d1b rar5: unpack wip 2017-12-19 14:03:53 -05:00
coderb
11c1b45335 rar5: unpack wip 2017-12-19 13:52:57 -05:00
coderb
b8a308012f rar5: unpack work 2017-12-18 11:01:37 -05:00
coderb
122a732206 unrar5: pull in fast memset code 2017-12-18 10:18:17 -05:00
coderb
59e4383209 rar5: unpack wip 2017-12-18 10:02:34 -05:00
coderb
22b008f723 rar5: unpack wip 2017-12-18 09:51:24 -05:00
coderb
c770094425 rar5: fileheader fixes 2017-12-18 09:46:18 -05:00
coderb
093a5fdf31 rar5: oops 2017-12-18 09:25:55 -05:00
coderb
f811835f02 rar5: rename Entry.IsSplit -> IsSplitAfter, misc wip 2017-12-18 09:20:20 -05:00
coderb
5dda13b800 rar5: reorg code for side by side implementations 2017-12-18 08:23:18 -05:00
coderb
a20306afe0 rar5: port wip 2017-12-17 21:23:32 -05:00
coderb
476b1df323 rar5: clean port unrarsrc 5.5.8 wip 2017-12-17 20:59:34 -05:00
coderb
aec5a913da rar5: checkin missing file 2017-12-17 20:54:54 -05:00
coderb
e13795651d rar: import unrarsrc-5.5.8 for reference
this will allow us to diff against newer version of unrarsrc if we wish to update our port
2017-12-17 17:52:00 -05:00
coderb
cc175da0d8 rar5: unpack wip 2017-12-17 17:08:53 -05:00
coderb
a2369fb531 rar5: uppercase method names 2017-12-17 12:08:09 -05:00
coderb
c5b8a444f2 rar5: unpack tighten up access modifiers 2017-12-17 12:03:41 -05:00
coderb
f450608073 rar5: unpack use partial class instead of inheritance 2017-12-17 11:46:17 -05:00
coderb
0e57537167 rar5: unpack wip 2017-12-17 11:36:33 -05:00
coderb
1445b0a48b rar5: refactor unpack classes 2017-12-17 11:33:31 -05:00
coderb
d268cc7685 rar5: wip unpack50() 2017-12-17 11:16:42 -05:00
coderb
7969bbaac4 rar5: working on decompression 2017-12-17 11:04:13 -05:00
coderb
b9e89ca64b rar5: wip 2017-12-17 10:40:41 -05:00
coderb
f802b41665 rar5: wip 2017-12-17 10:08:21 -05:00
coderb
f0eac57bb9 rar5: more cleanup 2017-12-17 09:14:08 -05:00
coderb
c9d3563f31 rar5: some cleanup 2017-12-17 09:00:11 -05:00
coderb
8563179592 rar5: wip 2017-12-17 08:35:41 -05:00
coderb
1c49ff63e2 rar5: replicate rar test cases 2017-12-17 08:09:41 -05:00
coderb
5f121c5da4 rar5 wip 2017-12-17 07:39:02 -05:00
coderb
72f52359e6 rar5: wip 2017-12-17 01:58:40 -05:00
coderb
9f549b98da rar5: support ArchiveHeader, additional rar5 header implementation 2017-12-16 20:39:53 -05:00
coderb
c346a4ca94 rar5: change rar MarkHeader detection logic to support rar5 2017-12-16 17:35:07 -05:00
coderb
d334b54846 rar5: add test archives 2017-12-16 12:54:32 -05:00
Adam Hathcock
02f68b793c Mark for 0.19.2 2017-12-16 09:08:17 +00:00
Adam Hathcock
57b9133a0f Change namespace and visibility to avoid collisions (#333) 2017-12-16 09:05:21 +00:00
Adam Hathcock
815f5e09e8 Mark for 0.19.1 2017-12-15 14:46:14 +00:00
Adam Hathcock
5bdf01ee59 Absorb arraypool from CoreFX (#331) 2017-12-15 14:45:02 +00:00
Adam Hathcock
bd9417e74c Mark for 0.19 2017-12-12 11:17:57 +00:00
Adam Hathcock
694e869162 Use arraypool for transfer/skip (#326)
* Use arraypool for transfer/skip

* Merge fixes

* Remove redundant constant
2017-12-08 13:58:38 +00:00
Adam Hathcock
45845f8963 Add Circle CI build 2017-12-08 12:03:28 +00:00
Adam Hathcock
a8b6def76a Netcore2 (#302)
* Add netstandard 2.0 target and netcoreapp2.0 tests

* Update xunit

* set tests explicitly to netcore2

* update travis

* Don't say build as netcoreapp1.0

* try adding dotnet 1 too

* Remove .NET Core 1 support

* switch to circle

* update cake

* fix circle build

* try fix file ending test again

* Fix casing on files

* Another casing fix

* Add back netstandard1.0

* Finish adding netstandard 1.0 back

* Add netstandard1.3 back
2017-12-08 12:00:29 +00:00
Sors
a4ebd5fb3d Rar 5 format (#310)
Fix rar 5 format comment
2017-12-04 18:59:49 +00:00
Adam Hathcock
3da3b212fa create new memorystream to allow proper resizing as memorystream could be a user provided buffer. Update xunit (#307) 2017-12-04 18:48:38 +00:00
Martijn Kant
c2528cf93e Mk/add support for extracting password protected LZMA(2) 7z archives (#324)
* Added possibility to decompress a password protected 7z LZMA archive

* Fix tests
2017-12-04 10:55:30 +00:00
coderb
550fecd4d3 bugfix: eliminate spurious rar crc exception when Read() is called with count = 0 (#313) 2017-10-23 11:58:02 +01:00
Adam Hathcock
50b01428b4 Mark for 0.18.2 2017-09-22 09:16:42 +01:00
Thritton
bb59f28b22 Update ArchiveReader.cs (#303)
#227
Added check if argument is in range in method TranslateTime(long? time)
2017-09-19 15:25:10 +01:00
François
7064cda6de Zlib: fix Adler32 implementation (#301) 2017-09-17 22:21:09 +01:00
Adam Hathcock
525c1873e8 Fix merge 2017-09-17 22:16:57 +01:00
François
3d91b4eb5e XZ: fix padding issues (#300)
* XZ: fix variable-length integers decoding

* XZ: fix block and index padding issues

* cleanup in XZStreamTests
2017-09-17 22:14:23 +01:00
François
f20c03180e XZ: fix variable-length integers decoding (#299) 2017-09-17 22:05:20 +01:00
Vladimir Kozlov
08fee76b4e Fixes Double Dispose() of ZipWritingStream #294 https://github.com/adamhathcock/sharpcompress/issues/294 (#295) 2017-09-08 13:25:53 +01:00
twirpx
149f5e4fb5 Minor fixes 2017-08-22 11:46:32 +05:00
Adam Hathcock
0f511c4b2a Mark for 0.18.1 2017-08-17 11:43:34 +01:00
twirpx
42d9dfd117 Fixed bug: Passing default ReaderOptions when creating ZipReader for solid extraction (#287) 2017-08-16 08:19:23 +01:00
twirpx
1793fc949d Fixed bug: Passing default ReaderOptions when creating ZipReader for solid extraction 2017-08-16 08:57:36 +05:00
Adam Hathcock
3983db08ff Use nameof 2017-07-27 11:05:33 -05:00
Adam Hathcock
72114bceea Add release link 2017-07-17 10:22:58 -05:00
Adam Hathcock
c303f96682 mark for 0.18 2017-07-17 10:11:27 -05:00
Adam Hathcock
0e785968c4 Rework usage of WriterOptions for writers since it was inconsistently used. (#271) 2017-07-17 11:05:42 -04:00
Adam Hathcock
15110e18e2 Don't skip ZipReader data twice. (#272)
* Don't skip ZipReader data twice.

* Add archive for a new test
2017-07-17 11:05:21 -04:00
Adam Hathcock
5465af041b Use Skip and ReadFully extension methods where possible. (#276) 2017-07-17 10:55:22 -04:00
Adam Hathcock
310d56fc16 Made ArchiveEncoding a non-static class that is used with options. (#274)
* Made ArchiveEncoding a non-static class that is used with options.

* Revert some formatting.

* Optional string decoder delegate (#278)
2017-07-17 10:53:20 -04:00
eklann
231258ef69 Force encoding (#266)
* Fixing build

* Fixing build

* Fixing build

* Fixed build (seems working now)

* Added support to force specific encoding when reading or writing an archive

* Minor fixed related to force encoding

* Removed obsolete project file not present in master
2017-07-05 10:15:49 -05:00
Sam Bott
16b7e3ffc8 Add XZ tests (#258)
* tests added and converted to xunit

* reordered two assertions
2017-06-11 13:44:00 +01:00
Adam Hathcock
513e59f830 Mark for 0.17.1 2017-06-09 08:28:35 +01:00
Adam Hathcock
b10a1cf2bd Bug on Windows on .NET Core fix (#257)
* Bug on Windows on .NET Core fix: https://github.com/dotnet/corefx/issues/20676

* Add comment
2017-06-09 08:22:47 +01:00
Adam Hathcock
1656edaa29 Add some more details to nuget package 2017-06-01 12:36:01 +01:00
Adam Hathcock
cff49aacba Added explicit tar skip check. Caught skip issue. 2017-06-01 11:25:32 +01:00
Adam Hathcock
19c32aff6c README fixes 2017-06-01 10:56:11 +01:00
Adam Hathcock
db3ec8337f Mark for 0.17 2017-06-01 10:54:50 +01:00
Adam Hathcock
e7bfc40461 Fix Skipping when compressed size is unknown (fallback to decompressing) 2017-06-01 09:26:08 +01:00
Adam Hathcock
3d3ca254ba Zip64 introduced seekable behavior into ZipWriter. The position may … (#252)
* Zip64 introduced seekable behavior into ZipWriter.  The position may not be zero.

* Remove some dead code

* Update formats for zip64

* Make version created by and version needed to extract the same

* Running tests is faster than skipping
2017-05-31 16:55:49 +01:00
Adam Hathcock
b45bc859a4 XZ Format (#247)
* Started integrated XZ format from https://github.com/sambott/XZ.NET

* Add readme line as it was copy/pasted

* Tar used with XZ

* update formats
2017-05-31 16:55:26 +01:00
Adam Hathcock
912d7a8775 Lzip (#245)
* First pass.  Writing isn't implemented on stream.  Tests are busted.

* LZipReader works...no file name :(

* LZipWriter works

* Writing tests are actually correct now.  LZipStream correctly writes trailer now.  lzip command line tool likes it.

* Add recommendation blurb

* Update notes for formats

* LZip isn't an archive format

* Attempting to fix and implement crc32

* LZip writing test passes

* Had to invert crc to check uncompressed data.
2017-05-31 16:51:24 +01:00
Adam Hathcock
16885da1b5 Mark for 0.16.2 2017-05-31 14:47:51 +01:00
Adam Hathcock
26714052eb Merge pull request #249 from adamhathcock/zip_entry_compression_fix
Per entry compression was being written out incorrectly on the centra…
2017-05-31 12:55:37 +01:00
Adam Hathcock
3df763a783 Merge branch 'master' into zip_entry_compression_fix 2017-05-31 11:15:30 +01:00
Adam Hathcock
925842bc4b Merge pull request #251 from dbaumber/Issue-250
Fix for Issue #250: remove extra build flags for .NET 3.5
2017-05-31 10:54:52 +01:00
Dan Baumberger
cead62704e Fix for Issue #250: remove extra build flags for .NET 3.5 as to
enable WinZipAes for .NET 3.5.
2017-05-30 13:43:48 -07:00
Adam Hathcock
3f24a744c0 Merge branch 'master' into zip_entry_compression_fix 2017-05-30 16:10:41 +01:00
Adam Hathcock
cce97548a2 Merge pull request #212 from kenkendk/remove_unused_code
Removed the unused code to write entries in Zip Headers
2017-05-30 16:09:04 +01:00
Adam Hathcock
9270d7cabf Add cache for dotnet packages 2017-05-30 16:04:55 +01:00
Adam Hathcock
264aa6d366 Merge branch 'master' into remove_unused_code 2017-05-30 15:58:44 +01:00
Adam Hathcock
69fc74e376 Per entry compression was being written out incorrectly on the central directory. Fix for that. 2017-05-30 15:37:41 +01:00
Adam Hathcock
a361d41e68 Fix test namespaces 2017-05-30 15:14:02 +01:00
Adam Hathcock
38766dac99 Wrong logic for skipping tests 2017-05-30 12:50:03 +01:00
Adam Hathcock
c30bc65281 Don't run tests on travis either 2017-05-30 12:46:34 +01:00
Adam Hathcock
296ebd942a Shrink script a bit 2017-05-30 12:37:16 +01:00
Adam Hathcock
afa19f7ad8 Add xplat cake and travis build 2017-05-30 12:35:12 +01:00
Adam Hathcock
a193b2d3b1 Add xplat build 2017-05-29 10:35:55 +01:00
Adam Hathcock
be4a65e572 update readme 2017-05-24 08:52:12 +01:00
Adam Hathcock
6832918e71 Mark for 0.16.1 2017-05-23 16:21:07 +01:00
Adam Hathcock
fd9a3ffbcc Merge commit '18641d4f9b849daea7b6fbb7edad51369534ffa3'
* commit '18641d4f9b849daea7b6fbb7edad51369534ffa3':
  Normalize Rar keys
2017-05-23 16:15:58 +01:00
Adam Hathcock
41added690 Private setter clean up 2017-05-23 16:15:47 +01:00
Adam Hathcock
18641d4f9b Merge pull request #238 from adamhathcock/issue_201
Normalize Rar keys
2017-05-23 16:14:55 +01:00
Adam Hathcock
4d0c5099d4 Merge branch 'master' into issue_201 2017-05-23 16:13:09 +01:00
Adam Hathcock
9d9d491245 Slightly better fix for https://github.com/adamhathcock/sharpcompress/pull/235 2017-05-23 16:10:15 +01:00
Adam Hathcock
7b81d18071 Merge pull request #235 from dbaumber/Issue-230
Issue #230: preserve the compression method when getting a compressed…
2017-05-23 15:50:32 +01:00
Dan Baumberger
7d0acbc988 Merge branch 'Issue-230' of https://github.com/dbaumber/sharpcompress into Issue-230 2017-05-23 07:46:48 -07:00
Dan Baumberger
313c044c41 Added a unit test for the WinZipAes multiple OpenEntryStream() bug. 2017-05-23 07:44:45 -07:00
Dan Baumberger
f6f8adf97e Merge branch 'master' into Issue-230 2017-05-23 07:43:02 -07:00
Adam Hathcock
bc97d325ca Normalize Rar keys 2017-05-22 10:55:15 +01:00
Adam Hathcock
0f2d325f20 oh yeah, appveyor doesn't like the tests 2017-05-22 09:08:16 +01:00
Adam Hathcock
63d5503e12 forgot to actually add tests to script 2017-05-22 09:06:33 +01:00
Adam Hathcock
e53f2cac4a Mark for 0.16.0 2017-05-22 08:58:52 +01:00
Adam Hathcock
3b73464233 Merge pull request #236 from damieng/zip-min-version-of-20
Default zip ver to 20 (deflate/encyption), fixes #164
2017-05-22 08:38:18 +01:00
Damien Guard
575f10f766 Default zip ver to 20 (deflate/encyption), fixes #164 2017-05-19 16:37:20 -07:00
Dan Baumberger
8d3fc3533b Issue #230: preserve the compression method when getting a compressed stream for encrypted ZIP archives. 2017-05-19 08:36:11 -07:00
Adam Hathcock
60370b8539 don't run appveyor tests 2017-05-19 15:51:06 +01:00
Adam Hathcock
f6db114865 Remove console writelines 2017-05-19 15:47:53 +01:00
Adam Hathcock
1c6c344b6b Tests don't run on appveyor 2017-05-19 15:45:29 +01:00
Adam Hathcock
d0302898e0 Add back net45,net35 and cake 2017-05-19 13:33:12 +01:00
Adam Hathcock
057ac9b001 Enable test 2017-05-19 11:03:31 +01:00
Adam Hathcock
8be931bbcb Doing some resharper clean up 2017-05-19 10:52:49 +01:00
Adam Hathcock
3197ef289c Forgot to hit save 2017-05-19 10:15:19 +01:00
Adam Hathcock
631578c175 Update to next version. Stop Zip64 tests from running all the time and some clean up 2017-05-19 10:10:23 +01:00
Adam Hathcock
f1809163c7 correct gitignore 2017-05-19 09:44:45 +01:00
Adam Hathcock
60e1fe86f2 Fix test running 2017-05-19 09:40:37 +01:00
Adam Hathcock
59d7de5bfc Try again appveyor 2017-05-19 09:36:05 +01:00
Adam Hathcock
6e95c1d84a Drop net35 support as dot net tooling doesn’t support it currently 2017-05-19 09:34:02 +01:00
Adam Hathcock
ee64670755 Move test folder to be tests 2017-05-19 09:19:37 +01:00
Adam Hathcock
3f7d0f5b68 Update test project 2017-05-19 09:14:43 +01:00
Adam Hathcock
e3514c5c4b Don’t attempt to autodeploy 2017-05-19 09:06:18 +01:00
Adam Hathcock
cc3a9cff88 Merge pull request #231 from adamhathcock/VS2017
Vs2017
2017-05-19 09:02:12 +01:00
Adam Hathcock
15e821aa39 Remove unused events 2017-05-19 08:49:44 +01:00
Adam Hathcock
8dd1dbab5f Remove Cake as it’s unnecessary for basic build/test/publish 2017-05-19 08:47:17 +01:00
Adam Hathcock
65ce91ddf6 Update. Only use net35, net standard 1.0 and net standard 1.3 2017-05-19 08:46:27 +01:00
Adam Hathcock
bf55595d6f Merge pull request #226 from gardebring/master
Add new event handler to allow tracking of progress of extraction progress for individual entry
2017-04-25 13:07:44 +01:00
Anders Gardebring
2aa123ccd7 Remove begin and end events since this can now be tracked via progress instead 2017-04-25 13:21:04 +02:00
Anders Gardebring
0990b06cc9 Create new TransferTo method and pass Entry and IReaderExtractionListener instead of passing an action lambda. 2017-04-25 12:48:56 +02:00
Anders Gardebring
e05f9843ba Use strongly typed ReaderProgress instead of object[] 2017-04-25 12:36:32 +02:00
Anders Gardebring
683d2714d0 Add new event to be able to track progress of extraction of individual entry when extracting an archive. This allows for showing or logging progress of the extraction process, especially useful for large files that might take a long time to extract. 2017-04-24 13:50:45 +02:00
Anders Gardebring
b8ef1ecafc Revert "Add new feature to allow injection of an action into the extraction process. This allows for showing or logging progress of the extraction process, especially useful for large files that might take a long time to extract."
This reverts commit 467fc2d03d.
2017-04-24 10:22:49 +02:00
Anders Gardebring
467fc2d03d Add new feature to allow injection of an action into the extraction process. This allows for showing or logging progress of the extraction process, especially useful for large files that might take a long time to extract. 2017-04-20 11:45:53 +02:00
Adam Hathcock
58b4fe4f28 Merge pull request #220 from coderb/master
verify RAR crc on header and file data
2017-04-07 11:56:06 +01:00
Brien Oberstein
97d5e0aac4 verify rar CRC on header and file data 2017-04-04 12:20:06 -04:00
Adam Hathcock
356c977cff Merge pull request #215 from mnadareski/master
Removed restriction on 7zip file entries
2017-03-17 09:20:59 +00:00
Matt Nadareski
99d6062376 Removed restriction on 7zip file entries 2017-03-16 15:55:20 -07:00
Adam Hathcock
f8538403e4 Merge pull request #211 from kenkendk/add_zip64
Add zip64
2017-03-13 10:23:26 +00:00
Kenneth Skovhede
ba12019bc7 Removed the unused code to write entries in Zip Headers 2017-03-11 08:05:49 +01:00
Kenneth Skovhede
726b9c80f6 Fixed compiling the unittest 2017-03-11 01:05:58 +01:00
Kenneth Skovhede
2894711c51 Added a test suite to verify zip64 write support is working, and can be read in both Archive and Stream mode 2017-03-11 00:54:06 +01:00
Kenneth Skovhede
85280f6f4f Changed the logic to throw exceptions when sizes exceed the zip archive limits, and zip64 is not enabled.
This changes the logic, such that archives larger than 4GiB are still automatically written correct (only the central header is special).
Archives with individual streams larger than 4 GiB must set the zip64 flag, either on the archive or the individual streams.
2017-03-11 00:53:42 +01:00
Kenneth Skovhede
d7f4c0ee32 Fixed an error in the zip64 central end of header: the signature + length (12 bytes) are not included in the reported length. 2017-03-10 23:10:06 +01:00
Kenneth Skovhede
1263c0d976 Added support for writing zip64 headers 2017-03-09 23:56:42 +01:00
Kenneth Skovhede
cd3cbd2b32 Support for writing zip64 headers in the unused code 2017-03-09 23:18:57 +01:00
Adam Hathcock
b3a4fed8be Mark for 0.15.2 2017-03-09 11:02:44 +00:00
Adam Hathcock
d0b4af6666 Merge pull request #210 from kenkendk/fix_invalid_headers
Fix invalid headers
2017-03-09 10:41:18 +00:00
Kenneth Skovhede
81ab5c189d Fixed writing correct headers in zip archives 2017-03-09 11:34:24 +01:00
Kenneth Skovhede
6ef3be4b5c Fixed writing correct headers in zip archives 2017-03-09 11:32:20 +01:00
Adam Hathcock
9f90a1d651 Mark for 0.15.1 2017-01-25 09:31:01 +00:00
Adam Hathcock
ce9a3fd1ef Add file ordering fix for OS X 2017-01-25 09:29:13 +00:00
Adam Hathcock
7c6f05058e Merge pull request #206 from markryd/zip64-extraction
Zip64 extending information and ZipReader
2017-01-25 09:03:43 +00:00
Mark Rydstrom
a8c3a7439e Add support for zip64 to ZipReader 2017-01-25 17:05:48 +10:00
Mark Rydstrom
839b3ab0cf Add support for zip64 extended information field 2017-01-25 16:51:15 +10:00
Adam Hathcock
44d54db80e Fix some path issues on OS X when running tests. 2017-01-24 17:36:51 +00:00
Adam Hathcock
a67d7bc429 Mark for 0.15 2017-01-24 17:25:19 +00:00
Adam Hathcock
079a818c6c Merge pull request #205 from markryd/zip64-extraction
Add zip64 support for ZipArchive extraction
2017-01-24 16:56:42 +00:00
Mark Rydstrom
6be6ef0b5c Add zip64 support for ZipArchive extraction 2017-01-24 13:04:03 +10:00
Adam Hathcock
8e51d9d646 0.14.1 2016-11-30 14:26:18 +00:00
Adam Hathcock
ea206f4f02 Merge pull request #199 from adamhathcock/Issue-198
Gzip entry can't be read multiple times
2016-11-25 09:33:56 +00:00
Adam Hathcock
f175a2a252 Merge branch 'master' into Issue-198 2016-11-25 09:21:44 +00:00
Adam Hathcock
3f7e559b86 Merge pull request #200 from ITnent/bug/Issue-197
Open branch, to fix multiple crashes on repeated zip archives reading…
2016-11-25 09:21:34 +00:00
Vladimir Demidov
2959b4d701 Modified check integrity condition for the encrypted file. 2016-11-24 20:41:08 +03:00
Vladimir Demidov
031286c5eb Fixed defects after review. 2016-11-24 18:01:49 +03:00
Vladimir Demidov
e181fa8c4a Restored original tabs. 2016-11-24 17:11:43 +03:00
Vladimir Demidov
7b035bec5d Fixed some issues after review. 2016-11-24 16:21:02 +03:00
Vladimir Demidov
f39d2bf53a Open branch, to fix multiple crashes on repeated zip archives reading. Added fix. 2016-11-24 15:14:29 +03:00
Adam Hathcock
7c8e407182 Merge branch 'master' into Issue-198 2016-11-21 12:21:29 +00:00
Adam Hathcock
a09136d46b Merge pull request #195 from jskeet/strong-naming
Strong-name both the main and test projects
2016-11-21 12:06:13 +00:00
Adam Hathcock
5fe1363ee1 Gzip entry can't be read multiple times https://github.com/adamhathcock/sharpcompress/issues/198 2016-11-21 12:04:35 +00:00
Jon Skeet
b41823fc10 Strong-name both the main and test projects
It's not clear whether SharpCompress.Test.Portable (as referenced
in AssemblyInfo.cs) still exists, but build.ps1 certainly works.
2016-11-15 18:42:56 +00:00
Adam Hathcock
0a64fe28b0 Oops, removed too much from project.json 2016-10-14 09:03:15 +01:00
Adam Hathcock
e320ccfa9a 0.14.0 2016-10-14 08:59:19 +01:00
Adam Hathcock
9628ff9456 Merge pull request #191 from jskeet/lzip
Initial read-only support for LZip
2016-10-14 08:50:32 +01:00
Jon Skeet
d540f78cfc Initial read-only support for LZip
LZip has no notion of flienames, so an LzipReader wouldn't make very much sense;
I've just implemented the stream, and hooked it into tar support.
2016-10-12 15:08:56 +01:00
Adam Hathcock
66420cd299 Merge pull request #189 from ziaa/master
Remove unbalanced parentheses in code samples
2016-10-08 18:25:30 +01:00
Seyed Zia Azimi
dd0594471f Remove unbalanced parentheses in samples 2016-10-07 19:33:41 +03:30
Adam Hathcock
844ba228ee Make 0.13.1 2016-10-03 13:44:19 +01:00
Adam Hathcock
7efc701b32 Merge pull request #188 from adamhathcock/fix_nulls
Fix null password on ReaderFactory.  Fix null options on SevenZipArchive
2016-10-03 13:41:55 +01:00
Adam Hathcock
d7e29f7c4d Fix occasionally failing test 2016-10-03 13:37:04 +01:00
Adam Hathcock
f26ba91386 Fix null password on ReaderFactory. Fix null options on SevenZipArchive 2016-10-03 13:32:53 +01:00
Adam Hathcock
c73ac2039c Merge pull request #185 from adamhathcock/ppmd_allocation_zipwriter
Make PpmdProperties lazy to avoid unnecessary allocations.
2016-10-03 13:04:14 +01:00
Adam Hathcock
671f9cd0cb Empty commit to kick build 2016-10-03 12:58:23 +01:00
Adam Hathcock
131b5b9714 Can't use Lazy on .NET 3.5 :( 2016-10-03 11:20:29 +01:00
Adam Hathcock
74af0889b9 Make PpmdProperties lazy to avoid unnecessary allocations. 2016-10-03 10:16:26 +01:00
Adam Hathcock
e5ee399045 Merge pull request #181 from claunia/patch-1
Update FORMATS.md
2016-09-30 07:08:52 +01:00
deeb7a0f64 Update FORMATS.md
Add ADC to formats list.
2016-09-29 22:53:51 +01:00
Adam Hathcock
5af3bab1dc Merge pull request #180 from adamhathcock/documenting
Add Markdown files to document things.
2016-09-29 11:58:19 +01:00
Adam Hathcock
28be84d315 For all branches 2016-09-29 11:35:54 +01:00
Adam Hathcock
a0528c737d Trying just to build once 2016-09-29 11:34:50 +01:00
Adam Hathcock
b506e488e8 Add build badge 2016-09-29 11:32:31 +01:00
Adam Hathcock
58eb0e08d6 Don't save artifacts for PRs 2016-09-29 11:22:26 +01:00
Adam Hathcock
562701894f Save nupkgs 2016-09-29 11:13:05 +01:00
Adam Hathcock
54a562273b Incomplete refactoring 2016-09-29 11:10:11 +01:00
Adam Hathcock
3f8c9c4cb0 Update for 0.13.0 2016-09-29 11:03:11 +01:00
Adam Hathcock
3e7d28b043 Can I fix tables? 2016-09-29 10:57:49 +01:00
Adam Hathcock
40b10d4a26 Add Markdown files to document things. 2016-09-29 10:55:04 +01:00
Adam Hathcock
f367630a2a Merge pull request #179 from adamhathcock/tar_fix
Allow empty tar header to be read to know there are no more tar heade…
2016-09-28 13:57:09 +01:00
Adam Hathcock
b9e4f00862 Merge branch 'master' into tar_fix 2016-09-28 13:50:45 +01:00
Adam Hathcock
d6e74d6163 Merge pull request #178 from adamhathcock/7zip_deflate
Allow deflate decoder for 7zip
2016-09-28 13:50:35 +01:00
Adam Hathcock
4a4522b842 Merge branch 'master' into 7zip_deflate 2016-09-28 13:44:46 +01:00
Adam Hathcock
710ba4423d Merge branch 'master' into tar_fix 2016-09-28 13:43:21 +01:00
Adam Hathcock
2a5494a804 Merge pull request #174 from adamhathcock/redo_options
Redo options
2016-09-28 13:40:54 +01:00
Adam Hathcock
568909800c Allow empty tar header to be read to know there are no more tar headers to read 2016-09-28 12:00:48 +01:00
Adam Hathcock
7513a608b1 Allow deflate decoder 2016-09-28 11:59:31 +01:00
Adam Hathcock
911e9878bd Merge branch 'master' into redo_options 2016-09-27 13:09:07 +01:00
Adam Hathcock
899d7d6e61 Appveyor (#175)
* First pass of Cake build

* Update Cake but still need cake itself to run on full CLR

* Test out appveyor

* 3.5 build fix

* Build master and PRs differently.  Still scared to auto publish to nuget.
2016-09-27 13:08:42 +01:00
Adam Hathcock
260c0ee776 Add SaveTo overload for zip archives 2016-09-27 11:19:52 +01:00
Adam Hathcock
d71520808d Helps if I rename everything 2016-09-27 11:08:54 +01:00
Adam Hathcock
177fc2a12c Flags were a better idea when I was younger. It's not clear though. 2016-09-27 10:50:36 +01:00
Adam Hathcock
5dafcb02d4 Redo options classes 2016-09-27 10:23:35 +01:00
Adam Hathcock
c4fde80c5e Create proper options objects to remove flags from API 2016-09-27 10:14:08 +01:00
Adam Hathcock
06e3486ec4 Bump version 2016-09-26 11:53:35 +01:00
Adam Hathcock
bd7c783aaf Test fixes 2016-09-26 11:51:35 +01:00
Adam Hathcock
d732e3cfa4 Renamespace for proper pluralization 2016-09-26 11:49:49 +01:00
Adam Hathcock
c24cdc66ed Clean up from clean up 2016-09-26 11:03:15 +01:00
Adam Hathcock
efa6f7a82e Huge Resharper clean up. Fixed up test project.json 2016-09-26 10:55:52 +01:00
ddbbc3b847 Adds support for Apple Data Compression. (#168) 2016-09-12 17:41:31 +01:00
Adam Hathcock
7037161c07 Update README 2016-08-12 12:15:45 +01:00
Adam Hathcock
b0b62fcf91 Try to fix frameworks again by matching JSON.NET 2016-08-12 12:14:22 +01:00
Adam Hathcock
bd8ba7b854 Test with ForwardOnlyStream. RewindableStream shouldn't corrupt a ForwardOnlyStream (#161) 2016-08-12 11:56:49 +01:00
Adam Hathcock
3a52c68270 0.12.3 2016-07-22 16:50:33 +01:00
Adam Hathcock
89fd778bd8 Make all framework assemblies be build targets as a fix https://github.com/NuGet/Home/issues/3103 2016-07-22 16:47:59 +01:00
Tobias Käs
6e3e8343a8 Ignore unofficial extension of file attributes. (#153)
The high bits may contain posix file attributes when the archive was written by certain third party 7z implementations. These must be removed before we can interpret the attributes as windows (or .NET) file attributes.
2016-07-21 11:06:42 +01:00
Tobias Käs
9224237a99 Fix for issue #73 (#154)
7z archives may require alternating reads from multiple substreams so it is important to seek before reading from the underlying stream. To keep performance at an acceptable level it is necessary to perform buffering because seeking on every single one-byte-read will destroy performance.
2016-07-21 11:06:25 +01:00
Adam Hathcock
8d16925662 Add Profile259 2016-07-18 14:37:39 +01:00
Adam Hathcock
ef0bf2758e Add Silverlight 5 2016-07-13 13:26:01 +01:00
Adam Hathcock
351a7552b9 0.12.0 2016-07-13 10:01:32 +01:00
Adam Hathcock
9dcc127454 .NET Core RTM 2016-07-13 09:58:13 +01:00
Adam Hathcock
81ff334aae Fix line endings 2016-07-13 09:54:42 +01:00
Pawel Pabich
e76ed60483 Tar long paths (#147)
* Now we can handle long file names

* Refactored code a bit
2016-07-06 09:57:52 +01:00
Adam Hathcock
04d04576bf Merge pull request #84 from adamhathcock/dnx
Using DNX and project.json
2016-06-06 10:04:02 -04:00
Adam Hathcock
dbd713756f Introducing the lock again for tests 2016-06-06 14:59:28 +01:00
Adam Hathcock
6d821dffa6 Everything builds and basically runs 2016-06-06 14:35:33 +01:00
Adam Hathcock
40e559e608 First pass. Doesn't work yet. 2016-05-20 17:31:35 +01:00
Adam Hathcock
b3fe26fc56 Merge branch 'master' into dnx
Conflicts:
	NuGet/sharpcompress.nuspec
	SharpCompress/VersionInfo.cs
	src/SharpCompress/Common/Tar/Headers/TarHeader.cs
	src/SharpCompress/Common/Zip/Headers/ZipFileEntry.cs
	src/SharpCompress/Compressor/BZip2/BZip2Stream.cs
	src/SharpCompress/Reader/ReaderFactory.cs
	src/SharpCompress/Writer/GZip/GZipWriter.cs
	src/SharpCompress/Writer/Tar/TarWriter.cs
	src/SharpCompress/Writer/WriterFactory.cs
	src/SharpCompress/Writer/Zip/ZipCentralDirectoryEntry.cs
	src/SharpCompress/Writer/Zip/ZipWriter.cs
	test/SharpCompress.Test/WriterTests.cs
2016-05-20 17:19:43 +01:00
Adam Hathcock
bb930da2f1 0.11.6 packaging 2016-04-07 09:44:28 +01:00
Adam Hathcock
732e352261 Merge pull request #138 from adamhathcock/leaveopen_writers
Leave open writers
2016-04-06 08:17:09 +01:00
Adam Hathcock
8f2ada3f20 Update README.md 2016-03-31 10:10:20 +01:00
Adam Hathcock
762381fac6 Explicit finish for BZip2 writing 2016-03-31 08:54:23 +01:00
Adam Hathcock
c440fdf88d Give writers leaveOpen option 2016-03-30 20:18:46 +01:00
Adam Hathcock
7f3f6bb135 Merge pull request #136 from zentron/master
Fix for `System.IO.Compression` created empty directory in zip
2016-03-03 13:13:59 +00:00
Rob
cb7853174d Fix for System.IO.Compression created empty directory in zip 2016-03-03 14:40:14 +10:00
Adam Hathcock
ef8575a107 Matching 77b034cbe7 to have 20 as the version made by 2016-02-26 09:22:23 +00:00
Adam Hathcock
6f3f82e0ad Merge pull request #129 from benshoof/tar-globalextendedheader
Support tars with global extended headers
2016-02-18 07:18:55 +00:00
benshoof
7b1609abe2 Support tars with global extended headers 2016-02-17 15:39:17 -09:00
Adam Hathcock
691c44a3b5 Clean up targets 2016-02-13 19:59:05 +00:00
Adam Hathcock
98c629c789 Fix tests and DataConverter compile issue 2016-02-13 19:56:20 +00:00
Adam Hathcock
8367bff9fe Fix up DataConverter...removed pack 2016-02-13 09:42:59 +00:00
Adam Hathcock
ff22cd774e Merge branch 'master' into dnx
Conflicts:
	NuGet/sharpcompress.nuspec
	SharpCompress/Common/Zip/WinzipAesCryptoStream.Portable.cs
	SharpCompress/Common/Zip/WinzipAesEncryptionData.Portable.cs
	SharpCompress/Crypto/PBKDF2.cs
	SharpCompress/SharpCompress.Portable.csproj
	SharpCompress/SharpCompress.PortableTest.csproj
	SharpCompress/SharpCompress.Unsigned.csproj
	SharpCompress/SharpCompress.WindowsStore.csproj
	SharpCompress/SharpCompress.csproj
	src/SharpCompress/Archive/ArchiveFactory.cs
	src/SharpCompress/Archive/Rar/FileInfoRarFilePart.cs
	src/SharpCompress/Archive/Rar/RarArchive.cs
	src/SharpCompress/Archive/Rar/RarArchiveEntry.cs
	src/SharpCompress/Archive/SevenZip/SevenZipArchiveEntry.cs
	src/SharpCompress/Common/GZip/GZipFilePart.cs
	src/SharpCompress/Common/GZip/GZipVolume.cs
	src/SharpCompress/Common/Rar/Headers/MarkHeader.cs
	src/SharpCompress/Common/Rar/Headers/RarHeaderFactory.cs
	src/SharpCompress/Common/SevenZip/ArchiveReader.cs
	src/SharpCompress/Common/SevenZip/DataReader.cs
	src/SharpCompress/Common/Tar/Headers/TarHeader.cs
	src/SharpCompress/Common/Zip/Headers/ZipFileEntry.cs
	src/SharpCompress/Common/Zip/WinzipAesCryptoStream.cs
	src/SharpCompress/Common/Zip/WinzipAesEncryptionData.cs
	src/SharpCompress/Common/Zip/ZipFilePart.cs
	src/SharpCompress/Compressor/Deflate/GZipStream.cs
	src/SharpCompress/Compressor/Deflate/Inflate.cs
	src/SharpCompress/Compressor/Deflate/ZlibBaseStream.cs
	src/SharpCompress/Compressor/LZMA/Bcj2DecoderStream.cs
	src/SharpCompress/Compressor/LZMA/LzmaStream.cs
	src/SharpCompress/Compressor/PPMd/H/FreqData.cs
	src/SharpCompress/Compressor/PPMd/H/PPMContext.cs
	src/SharpCompress/Compressor/PPMd/H/RarMemBlock.cs
	src/SharpCompress/Compressor/PPMd/H/RarNode.cs
	src/SharpCompress/Compressor/PPMd/H/State.cs
	src/SharpCompress/Compressor/PPMd/PpmdProperties.cs
	src/SharpCompress/Compressor/Rar/VM/RarVM.cs
	src/SharpCompress/EnumExtensions.cs
	src/SharpCompress/IO/MarkingBinaryReader.cs
	src/SharpCompress/Reader/ReaderFactory.cs
	src/SharpCompress/Utility.cs
	src/SharpCompress/Writer/IWriter.Extensions.cs
	src/SharpCompress/Writer/Zip/ZipCentralDirectoryEntry.cs
	src/SharpCompress/Writer/Zip/ZipWriter.cs
	test/SharpCompress.Test/Tar/TarArchiveTests.cs
	test/SharpCompress.Test/Tar/TarReaderTests.cs
2016-02-13 09:24:44 +00:00
Adam Hathcock
ee5e3fbc1d Update to 0.11.5 2016-02-13 09:17:29 +00:00
Adam Hathcock
d13b2ad073 Do less than equals 2016-02-13 09:16:43 +00:00
Adam Hathcock
e9a7efc371 Merge pull request #127 from eklann/bugfix-incomplete-rar
Fixed bug triggered by incomplete rar file.
2016-02-10 19:38:10 +00:00
Josef Eklann
9b8ddda191 Fixed bug triggered by incomplete rar file. 2016-02-10 14:56:05 +01:00
Adam Hathcock
1fc14e1075 Update to 0.11.4 2016-02-07 10:17:45 +00:00
Adam Hathcock
b3a5204e74 Bug fix for previous PR 2016-02-07 10:14:07 +00:00
Adam Hathcock
0fab1ff976 Merge pull request #125 from kenkendk/set_compression_info_on_stream
Fix setting compressioninfo on Zip streams
2016-02-04 10:00:20 +00:00
Kenneth Skovhede
a05b692fc3 More whitespace fix 2016-02-04 10:39:58 +01:00
Kenneth Skovhede
ed7f140364 Whitespace fix 2016-02-04 10:39:02 +01:00
Kenneth Skovhede
a4b594121e Added code to forward the compression info to the stream, such that it is possible to override the compression level and method on a per-stream basis. 2016-02-04 10:35:36 +01:00
Adam Hathcock
fe8da55c95 Merge pull request #122 from benshoof/net35_compat
Added support for .NET 3.5
2016-01-26 21:00:20 +00:00
benshoof
42c4eab4be Fix conflicts 2016-01-26 09:56:54 -09:00
Adam Hathcock
2e8844c896 This doesn't work :) 2016-01-26 11:57:40 +00:00
Adam Hathcock
aed7ff003d Merge pull request #121 from benshoof/rar_detection_fix
Fix false positives in Rar file detection
2016-01-26 11:54:07 +00:00
Adam Hathcock
681b28f654 Merge pull request #119 from benshoof/endian-neutral
SharpCompress now endian neutral
2016-01-26 11:51:30 +00:00
Adam Hathcock
0de64b1551 Use ExtractAllEntries if archives are SOLID types 2016-01-26 11:49:57 +00:00
Adam Hathcock
526df2404e Merge pull request #123 from kenkendk/remove_warnings
Fix various warnings
2016-01-26 09:21:03 +00:00
Kenneth Skovhede
f20274aac7 Removed debug comments 2016-01-26 10:18:23 +01:00
Kenneth Skovhede
08b899fdac Deleted unused variables 2016-01-26 10:18:13 +01:00
Kenneth Skovhede
7b91b6e7c8 Commented out various unused fields that cause warnings and makes the WarningsAsErrors directive abort the build 2016-01-26 09:48:00 +01:00
benshoof
1661b7ec36 Added support for .NET 3.5
SharpCompress can now be compiled for .NET 3.5 by defining symbol NET35
2016-01-25 09:02:19 -09:00
benshoof
e5ab9dc883 Fix false positives in Rar file detection 2016-01-25 08:03:40 -09:00
Adam Hathcock
25d22e33a7 Merge pull request #118 from kenkendk/master
Bugfix for inflate algorithm cutting of the end of certain blocks
2016-01-23 10:24:50 +00:00
benshoof
8ceac9000c SharpCompress now endian neutral
SharpCompress can now be used on machines with big endian architecture
such as powerpc. All byte conversions now run through Mono's
DataConverter (or a portable version for builds that don't allow unsafe
code) instead of BitConverter, as BitConverter's behavior depends on the
host cpu.
2016-01-22 14:32:35 -09:00
Kenneth Skovhede
ecceec8e1a Tabs -> spaces fix 2016-01-22 15:06:55 +01:00
Kenneth Skovhede
d5c88ebab3 Bugfix for inflate algorithm cutting of the end of certain blocks 2016-01-22 14:51:05 +01:00
Adam Hathcock
0a2adbc205 Merge pull request #115 from maxpiva/master
Adds Non Lineal "Solid Rar" Extraction to RarArchive.
2016-01-17 13:54:24 +00:00
mpiva
3be7f9da37 Adds Non Lineal "Solid Rar" Extraction to RarArchive. 2016-01-17 02:31:55 -03:00
Adam Hathcock
3f2ca67416 Forgot the file overload 2016-01-15 10:04:28 +00:00
Adam Hathcock
21087323af Make Tar last in detection as it contains other files 2016-01-09 13:29:29 +00:00
Adam Hathcock
505838a32a Fixes from last merge 2016-01-04 21:08:30 +00:00
Adam Hathcock
fd3f9eb382 Merge remote-tracking branch 'origin/master' into dnx
Conflicts:
	SharpCompress/SharpCompress.PortableTest.csproj
	SharpCompress/SharpCompress.Unsigned.csproj
	SharpCompress/SharpCompress.csproj
	src/SharpCompress/Archive/IArchiveEntry.Extensions.cs
	src/SharpCompress/Reader/IReader.Extensions.cs
2016-01-04 21:01:12 +00:00
Adam Hathcock
05f92018c3 Merge pull request #113 from zentron/master
Preserve File Timestamps with IReader
2016-01-04 12:04:05 +00:00
Rob
b8fc4a2415 Preserve File Timestamps with IReader 2016-01-04 16:26:43 +10:00
Adam Hathcock
ccd2fc6568 Merge branch 'master' into dnx
Conflicts:
	src/SharpCompress/Archive/Rar/RarArchive.cs
2016-01-03 11:19:04 +00:00
Adam Hathcock
a30872809d Merge pull request #99 from Rovak/rarreader-add-password
Add password when opening RarReader
2016-01-03 03:18:12 -08:00
Adam Hathcock
7abf2ed58b Update README 2016-01-03 11:15:36 +00:00
Adam Hathcock
4822f571c3 Add explicit target for NET 4.5 2016-01-02 15:04:04 +00:00
Adam Hathcock
b0fdac3e6f Have NO_CRYPTO and add Profile259 2016-01-02 15:02:45 +00:00
Adam Hathcock
df62c1d3b2 NO_FILE tag 2016-01-02 14:57:05 +00:00
Adam Hathcock
56912ade7a reorg test project.json 2016-01-01 12:37:37 +00:00
Adam Hathcock
43698b41a7 Need RijndaelEngine for some reason 2016-01-01 12:21:23 +00:00
Adam Hathcock
fd2beeab48 trying to fix RarRijndael 2016-01-01 11:47:38 +00:00
Adam Hathcock
5647a424e7 Fix test execution 2016-01-01 11:23:01 +00:00
Adam Hathcock
a0a418e90b Remove bad character 2016-01-01 11:16:42 +00:00
Adam Hathcock
75e09b24c0 Remove bouncy castle code 2016-01-01 11:06:14 +00:00
Adam Hathcock
0b06023b82 Moved tests and fixed up nuget metadata 2015-12-30 16:59:27 +00:00
Adam Hathcock
49707498a5 move files around 2015-12-30 11:19:42 +00:00
Adam Hathcock
a4c2d27985 tests run...need parallel none 2015-12-30 11:04:16 +00:00
Adam Hathcock
9fac34fb21 intermediate checkin of xunit conversion 2015-12-29 15:26:38 +00:00
Adam Hathcock
d4b22936af fix net 35 2015-12-29 14:06:35 +00:00
Adam Hathcock
f0d0143be0 Remove csproj and start converting tests 2015-12-29 13:52:55 +00:00
Adam Hathcock
7d1593e887 Support NET3.5+ and DNXCORE50 and DOTNET 5.1 + 2015-12-28 23:40:33 +00:00
Adam Hathcock
2588444948 Using DOTNET51 and DNXCORE50 targets 2015-12-28 23:33:18 +00:00
Adam Hathcock
69abb8446f Merge branch 'master' into dnx 2015-12-28 18:47:12 +00:00
Adam Hathcock
bec2662d23 Update version 2015-12-28 18:40:35 +00:00
Adam Hathcock
dd35052de9 Merge pull request #105 from benshoof/fix-tests-release-build
Fix Release build of Tests
2015-12-17 08:54:31 +00:00
Adam Hathcock
2a630e04b2 Merge pull request #107 from benshoof/fix-nonzip-perf-regression
Fixed serious performance regression (revert 0f12a073af)
2015-12-15 16:33:06 +00:00
benshoof
231b78e096 Revert 0f12a073af
Revert commit that caused all non-zip files to be read entirely upon
opening.
IsZipArchive() would read and process the entire file looking for a zip
header.
2015-12-15 07:28:50 -09:00
Adam Hathcock
ce6e1d26f4 Merge pull request #104 from benshoof/fix-vs2013-build
Fix VS2013 compiler warnings (errors)
2015-12-15 09:10:49 +00:00
benshoof
69a25cd142 Fix Release build of Tests
Fixes release builds of SharpCompress.Test and
SharpCompress.Test.Portable. The UNSIGNED symbol was missing from the
Release configurations of SharpCompress.Unsigned and
SharpCompress.PortableTest
2015-12-14 15:32:49 -09:00
benshoof
cc2ad7d8d5 Fix VS2013 compiler warnings (errors)
Fixes broken build in VS2013 introduced by
18bd810228. That commit attempted to fix a
compiler warning from VS2015, but this turns out to be a compiler bug:
https://github.com/dotnet/roslyn/issues/4027 . That commit added code
which VS2013 correctly treats as a compiler warning, breaking the VS2013
build.
I have reverted this unnecessary change to the deflate code, fixing the
VS2013 build, and disabled warning CS0675 on send_bits() which will
satisfy VS2015.
2015-12-14 15:24:33 -09:00
Adam Hathcock
1aa0498e5d update nuspec 2015-11-30 19:40:24 +00:00
Adam Hathcock
1ce5e15fd2 Minor cleanup 2015-11-30 19:40:16 +00:00
Adam Hathcock
b40131736a Merge pull request #103 from pappe82/android-compatibility
Zip entry header version for Deflate compression set to 20
2015-11-30 19:37:38 +00:00
pappe82
c2b15b9c09 Zip entry header version for Deflate compression set to 20
The java runtime on Android cannot process file entries with the fixed
version 63 - it can only process entries up to version 20, which should
be fine if the entry was compressed using deflate.
2015-11-30 14:43:28 +01:00
Adam Hathcock
6fee1f6dc3 Merge branch 'master' into dnx 2015-11-28 15:35:09 +00:00
Adam Hathcock
27a4f78712 version 0.11.2 2015-11-20 18:59:43 +00:00
Adam Hathcock
2b5ee6e8cb resharper update 2015-11-20 18:59:29 +00:00
Adam Hathcock
cd8ea28576 last write time shouldn't equal...right? 2015-11-20 18:59:15 +00:00
Adam Hathcock
b2b6934499 Merge pull request #101 from twirpx/master
Non-compilable statements + zip archive handling
2015-11-19 16:46:26 +00:00
twirpx
0f12a073af Eliminated "throw - catch all" logic in ZipArchive 2015-11-19 21:08:55 +05:00
twirpx
18bd810228 Fixed non-compilable statements 2015-11-19 21:07:52 +05:00
twirpx
13bbb202c7 Changed MAX_ITERATIONS_FOR_DIRECTORY_HEADER to deal with archives that have larger comments 2015-11-19 21:06:56 +05:00
Roy van Kaathoven
6e0f4ecbc9 Add password when opening RarReader 2015-10-27 18:52:06 +01:00
Adam Hathcock
9a638e7aa5 Merge pull request #97 from Icenium/natanasova/add-explicit-compressioninfo
Add explicit compressioninfo when writing file to zip
2015-10-01 09:33:26 -07:00
Adam Hathcock
7a11dc4385 Merge pull request #96 from Icenium/natanasova/fix-extract-options-as-flag
Use enum as flag correctly
2015-10-01 09:30:46 -07:00
Nadya Atanasova
66816ce390 Add explicit compressioninfo when writing file to zip 2015-10-01 17:15:37 +03:00
Nadya Atanasova
5d8bd7b69b Use enum as flag correctly
Check PreserveFileTime when file times are initialized.
2015-10-01 17:02:58 +03:00
Adam Hathcock
0132c85ec7 Merge pull request #83 from haykpetros/issue_80
Added additional check to make sure that data is properly copied to a…
2015-08-30 09:46:38 +01:00
Adam Hathcock
e5d10e3dba Use only dotnet 2015-08-22 21:36:05 +01:00
Adam Hathcock
0ba87b6c62 Trying to get netcore45 in 2015-08-06 15:42:31 +01:00
Adam Hathcock
da47306f04 Use bouncy Sha256Digest 2015-08-06 14:51:36 +01:00
Adam Hathcock
1930126a59 Add Sha256Digest and Update other classes 2015-08-06 14:46:36 +01:00
Adam Hathcock
baf9f391f1 Setup bouncy castle crypto for dnx core 2015-08-06 14:46:07 +01:00
Adam Hathcock
979703dd1f ignore lock file 2015-08-06 14:25:54 +01:00
Adam Hathcock
ab9e8063dc remove lock file 2015-08-06 14:24:00 +01:00
Adam Hathcock
68c09d7221 Making a pass at DNX 2015-08-05 14:21:34 +01:00
haykpetros
9bf5df72a6 Added additional check to make sure that data is properly copied to array regardless fo computer/CPU platform (little-endian or big-endian). In case of big-endian platform intermediate array will be reversed prior to copying to destination array. 2015-08-05 05:27:30 -07:00
Adam Hathcock
91fc241358 Merge pull request #82 from haykpetros/issue_79
Issue 79
2015-08-04 15:05:37 +01:00
haykpetros
35a8b444b8 I feel there is no need to use unsafe version, so conditional compilation has been removed and only safe version kept. 2015-08-04 03:47:09 -07:00
haykpetros
2e928e86fd Removed unused method. 2015-08-04 03:24:59 -07:00
Adam Hathcock
6648f33c4e Merge pull request #81 from haykpetros/issue_78
Move closing parentheses to where they should be, so build does not b…
2015-08-04 10:05:17 +01:00
haykpetros
2a70ec8100 Move closing parentheses to where they should be, so build does not break for non-DEBUG configurations. 2015-08-03 13:11:13 -07:00
Adam Hathcock
05e0d591a5 Merge pull request #72 from hodm/master
Extract Options And Total Sizes
2015-07-27 10:07:22 +01:00
Adam Hathcock
1d30a1b51d Update README 2015-07-27 10:03:39 +01:00
Adam Hathcock
315c138c05 Removing .NET 2.0 support and LinqBridge dies a firey death 2015-07-27 09:48:36 +01:00
hodm
b0c514d87c Extract Options And Total Sizes
Fixed TotalSize For 7z
added TotalUncompressSize Tested for 7z
this enables to show progress for the entire archive
Added 2 Extract Options: PreserveFileTime And PreserveAttributes.
Put All the Log Command under DEBUG Condition.
2015-07-26 23:36:28 +03:00
Adam Hathcock
8e5cb77af2 Merge pull request #69 from pnewman8/master
Skip entry stream on dispose
2015-07-20 16:27:55 +01:00
Paul Newman
8faebc78d0 Cancel moved from EntryStream to Reader
Relates to previous commit. Following discussion with Adam, moved the Cancel() to the reader.

Example:

while (reader.MoveToNextEntry())
{
  using (var data = new StreamReader(reader.OpenEntryStream()))
  {
    try
    {
      DoSomething(data.ReadLine());
    }
    catch
    {
      reader.Cancel();
      throw;
    }
  }
}
2015-07-15 18:13:46 +01:00
Paul Newman
afff386622 Skip entry stream on dispose
Until now the caller had to completely consume each entry stream, or call SkipEntry(), before disposing the stream. If not, exception was thrown: "EntryStream has not been fully consumed". Hugely inconvenient; a user-thrown exception inside a "using (EntryStream)" block would be discarded.

Now automatically skips the entry on dispose.

Added method EntryStream.Cancel(). Call this if entry stream is unfinished, and no further entries are required. Helps with efficiency, as it avoids reading data that is not needed.
2015-07-15 13:44:20 +01:00
Adam Hathcock
9eb43156e8 Update license URL 2015-06-12 12:06:21 +01:00
Adam Hathcock
5dd9994d34 Update for 0.11 2015-06-12 12:03:03 +01:00
Adam Hathcock
f18771904e Made unsigned csproj for testing. Sign the main DLL again. 2015-06-12 11:59:31 +01:00
Adam Hathcock
ff1cdbfff2 Fix portable tests 2015-06-12 11:53:37 +01:00
Adam Hathcock
332d71d40d UTF8 is the default encoding for all platforms 2015-06-12 11:37:42 +01:00
Adam Hathcock
d9c31dace8 Fixing lingering build issues 2015-06-12 11:31:04 +01:00
Adam Hathcock
33c83e3893 Merge pull request #63 from mrgleba/master
ZipWriter: use ArchiveEncoding.Default
2015-06-11 09:30:05 +01:00
mrgleba
c7fc5f8819 ZipWriter: use ArchiveEncoding.Default
ShaprCompress is used internally in Mono to implement
System.IO.Compression, which allows the user to specify path encoding.
The change allows for specifying the encoding via
ArchiveEncoding.Default.
2015-06-11 08:36:33 +02:00
Adam Hathcock
8df6243807 Merge pull request #59 from KOLANICH/ZipFileEntry-LoadExtra-ArrayOverflowFix
Zip file entry load extra array overflow fix
2015-05-22 08:52:04 +01:00
Adam Hathcock
8bf5c99386 Merge pull request #56 from KOLANICH/test-path-fix
Improved testability
2015-05-22 08:51:44 +01:00
KOLANICH
83eae05e0c Fixed oveflow Zip/Headers/ZipFileEntry.cs 2015-05-22 00:07:32 +03:00
KOLANICH
7c70a7aafd Improved testability
1) disabled requirement of signature
2) added runtime discovery of folder with archives
3) disabled signing of test assembly
2015-05-22 00:00:59 +03:00
Adam Hathcock
dae13c782f Merge pull request #55 from benshoof/rar_protect_header
Added read support for RARs with Protect Headers
2015-05-08 13:58:05 +01:00
benshoof
ae1e37cde6 Added read support for RARs with Protect Headers
Some RARs with recovery records contain Protect Headers, I've added
support for parsing them so that RARs containing them can be read,
instead of an invalid-header exception being thrown. Parsing logic taken
from unrar reference source.
2015-05-07 18:20:05 -08:00
Adam Hathcock
94f4d35663 Merge pull request #53 from benshoof/master
Fixed .NET2 Release build
2015-05-05 09:19:51 +01:00
benshoof
5144104fef Fixed .NET2 Release build
The .NET2 project only built in Debug configuration due to not including
the compilation symbol NET2 in the Release configuration.
2015-05-02 20:40:34 -08:00
Adam Hathcock
f87e6672f2 Merge pull request #51 from sander2/fix-tar.bz2-compression
Fixed bug where tar.bz2 compression did not work
2015-04-09 13:39:38 +01:00
Adam Hathcock
312b53398c Merge pull request #52 from sander2/tar-long-name-support
Tar long name support
2015-04-09 13:39:21 +01:00
Sander Bosma
26ddc09c6a add unit test for writing tar archives containing long filenames 2015-04-07 21:43:45 +02:00
Sander Bosma
3113500229 don't write trailing zero in WriteOctalBytes
this fixes a bug where the trailing zero of the CRC overwrote the entrytype
2015-04-07 21:43:45 +02:00
Sander Bosma
ef72829f1c Fixed bug where tar.bz2 compression did not work 2015-04-06 17:09:55 +02:00
Sander Bosma
088644240a Tar: Support for writing long filenames (>100 chars) using longlink 2015-04-06 14:38:57 +02:00
Adam Hathcock
065ed29600 Merge pull request #47 from norvegec/master
fixed: .NET2 project is not compiling
2015-03-16 11:02:46 +00:00
Adam Hathcock
ee8c1f7904 Merge pull request #45 from catester/master
Made the assembly CLSCompliant.
2015-03-16 10:55:21 +00:00
catester
5d6a83578c Made the assembly CLSCompliant.
Added attribute [assembly: CLSCompliant(true)] and changed type of
public Crc properties from uint to long to satisfy CLS compliance.
2015-03-16 12:29:21 +02:00
Adam Hathcock
37d8d34601 Merge pull request #44 from catester/master
Added IWritableArchive interface
2015-03-16 09:43:28 +00:00
Norvegec
492f64053b fixed: .NET2 project is not compiling 2015-03-15 02:10:06 +03:00
catester
988fe5eac0 Replaced NotImplementedException for streams
This commit is more correct, it covers all stream types now.
2015-03-14 14:45:04 +02:00
catester
c48a47c9b2 Revert "Revert "Replaced NotImplementedException for streams""
This reverts commit abed9eb2c9.
2015-03-14 14:22:55 +02:00
catester
abed9eb2c9 Revert "Replaced NotImplementedException for streams"
This reverts commit 391663ac67.
2015-03-14 13:31:02 +02:00
catester
391663ac67 Replaced NotImplementedException for streams
Especially for streams, it is more appropriate to throw
NotSupportedException instead of NotImplementedException. Usually
consumers of streams expect NotSupportedException to handle errors.
There are other places that also use NotImplementedException but I
didn't examine them for now. I only modified stream classes in
SharpCompress.IO. For reference about this best practise, please see
these articles:
http://blogs.msdn.com/b/brada/archive/2004/07/29/201354.aspx

http://blogs.msdn.com/b/jaredpar/archive/2008/12/12/notimplementedexception-vs-notsupportedexception.aspx
2015-03-14 12:46:34 +02:00
catester
a8c055b990 Added IWritableArchive interface
So that when working with ArchiveFactory, you can cast the opened
archive to IWritableArchive for accessing AddEntry, RemoveEntry and
SaveTo methods without having to know what type of archive is opened
underhood. Also moved extension methods from AbstractWritableArchive to
this new interface. Also added GZipArchive to ArchiveFactory.Create as
it is one of the 3 writable archive types (zip, tar, gzip), not sure why
it was omitted.
2015-03-13 22:38:33 +02:00
Adam Hathcock
7ee885e7d5 Merge pull request #41 from mattbab/patch-1
Fixed simple typo
2015-01-21 08:02:48 +00:00
M.Babcock
ed05bd721f Fixed simple typo 2015-01-20 20:24:22 -06:00
Adam Hathcock
e52c183f1a Merge pull request #40 from Strachu/master
Inconsistent time format
2015-01-14 14:25:57 +00:00
Adam Hathcock
ed6ad6ac6d Merge pull request #39 from hrasyid/master
Fix typo
2015-01-14 14:24:51 +00:00
Strachu
62f198b532 All archive types now consistently return times as local time. 2015-01-14 14:23:59 +01:00
Hamdanil Rasyid
9770cfec9b Fix typo 2015-01-11 17:06:02 -08:00
Adam Hathcock
e05f30308c Merge pull request #30 from larvata/fix-zip-ansi-filename
Fix ansi filename decoded as gibberish in zip file
2014-12-24 17:10:30 +00:00
Adam Hathcock
6958347849 Merge pull request #32 from larvata/fix-extractAllEntires
Add EntryExtractionEvent for stream reader
2014-12-24 17:10:08 +00:00
Adam Hathcock
8e6ced6138 Merge pull request #31 from larvata/implement-central-directory-header
implement info-zip unicode path extra field
2014-12-24 17:09:20 +00:00
larvata
0c36ff6082 Fix ansi filename decoded as gibberish in zip file 2014-12-24 13:58:29 +08:00
larvata
f78e839365 implement info-zip unicode path extra field
Winrar won't set 'general purpose bit flag' for unicode filename storage
but use 'extra field:Info-ZIP Unicode Path Extra Field' instead.
2014-12-24 13:51:31 +08:00
larvata
7e3f04e669 add entryExtractionEvent for stream 2014-12-22 16:34:47 +08:00
Adam Hathcock
2d237bfbca Merge pull request #29 from arition/password_fix
Fix error when password is not in English
2014-12-18 23:41:35 +00:00
Adam Hathcock
e6e88dbde0 Merge pull request #28 from Strachu/tar_fix
Fixed bugs related to handling of .tar archives with long names
2014-12-16 14:37:09 +00:00
arition
e558a78354 Fix error when password is not in English 2014-12-16 19:47:13 +08:00
Adam Hathcock
c2df06a746 Merge pull request #27 from Strachu/rar_reorder
Changed the order of detecting whether the archive is in .rar format.
2014-12-16 10:51:47 +00:00
Adam Hathcock
41da844250 Merge pull request #26 from Strachu/7z_mtime
Exposed modification time for .7z archives
2014-12-16 10:47:26 +00:00
Strachu
8fcb0cb7a2 Fixed bug causing entries of non-ustar archive being after one with very long name to be discarded 2014-12-16 11:11:02 +01:00
Strachu
2e533f9fb5 Fixed handling of ustar tar files with long names. 2014-12-15 21:13:57 +01:00
Strachu
ca2778b658 Changed the order of detecting whether the archive is in .rar format.
Its very slow with big archives and shouldn't be done when we got archive in format which can be detected fast.
2014-12-15 20:57:54 +01:00
Strachu
3147ee0f14 Exposed modification time for .7z archives 2014-12-15 20:26:26 +01:00
Adam Hathcock
f72558de9e Merge pull request #23 from pivotal-cf-experimental/master
Adds MIT license.
2014-11-11 17:33:45 +00:00
mavenraven.org
6d69791db1 Adds MIT license. 2014-11-11 08:48:32 -08:00
Adam Hathcock
6e05a20136 Support file sizes larger than int.MaxValue 2014-10-24 09:39:59 +01:00
Adam Hathcock
d36ae445e2 Don't dispose! 2014-07-01 19:33:59 +01:00
Adam Hathcock
022f7ed26b Fix change from previous pull request 2014-04-30 11:27:49 +01:00
Adam Hathcock
d1a64021e1 Check for entry stream null. 2014-04-30 11:26:54 +01:00
Adam Hathcock
9225531f1e Merge pull request #15 from bastianeicher/master
.NET 2.0 version in NuGet package with limited dependency on LinqBridge
2014-04-24 10:16:00 +01:00
Bastian Eicher
650dc6f8bb .NET 2.0 version in NuGet package with limited dependency on LinqBridge 2014-04-16 17:07:41 +02:00
Adam Hathcock
49d6f1f633 Enumerate FILES not DIRECTORIES 2014-04-15 09:41:16 +01:00
Adam Hathcock
505f435f5d Renamed LinqBridge to .NET 2.0 target.
Cleaned up some cross-versioning code.
2014-04-15 09:36:05 +01:00
Adam Hathcock
d9d63fba96 Merge pull request #12 from bastianeicher/master
Added .NET 2.0 version (with LinqBridge)
2014-04-15 09:26:36 +01:00
Bastian Eicher
8c3d260d7c Added .NET 2.0 version (with LinqBridge) 2014-03-18 14:45:48 +01:00
Adam Hathcock
0fd00efada Fixed other projects 2014-02-01 10:36:22 +00:00
Adam Hathcock
71e86cd7e4 Added explicit checks for multi-volume encrypted archives to throw an error 2014-02-01 10:29:01 +00:00
Adam Hathcock
c64a96398d RarArchives now have password support 2014-02-01 09:01:15 +00:00
Adam Hathcock
efa9805fe4 Fixing Windows Store project to be Windows 8.1 with newest files 2014-02-01 07:57:50 +00:00
Adam Hathcock
2100d49cef Little more clean up 2013-12-23 12:29:41 +00:00
Adam Hathcock
66ffc82d41 Entries don't have FilePaths, they have keys 2013-12-23 12:20:06 +00:00
Adam Hathcock
e58ec599f0 Cleaning up some FileInfo non-usage and fixing tests. 2013-12-23 12:15:57 +00:00
Adam Hathcock
4eda2043df Moved ExtractTo to common logic 2013-12-23 11:42:08 +00:00
Adam Hathcock
afd65a7505 Removed unimplemented Close 2013-12-23 11:12:07 +00:00
Adam Hathcock
25148a9bf8 No expected exception 2013-12-22 09:37:35 +00:00
Adam Hathcock
97bc1865dc When using writable entries, reset stream on entry stream access. 2013-12-21 17:37:54 +00:00
Adam Hathcock
865afbfbf0 Throw exception when adding duplicate entry 2013-12-21 17:29:36 +00:00
Adam Hathcock
c59e6a8c99 AddEntry should return the added entry. 2013-12-21 17:14:41 +00:00
Adam Hathcock
be9111630e Fix for some type management problems 2013-12-21 09:34:40 +00:00
Adam Hathcock
d46de85ca2 Allow multiple saves of an archive. New entry streams must be seekable and resetable. 2013-12-20 15:50:29 +00:00
Adam Hathcock
c1562c5829 new entry streams must be readable and seekable 2013-12-20 15:33:20 +00:00
Adam Hathcock
f862cc6947 Remove redundant test from previous change 2013-12-20 15:32:57 +00:00
Adam Hathcock
cc3848aea5 Remove write check as I never actually write to any of the read streams 2013-12-20 15:00:17 +00:00
Adam Hathcock
46fc663e90 Fixed issue where adding a new entry then removing it wouldn't actually remove it. 2013-12-20 12:28:17 +00:00
Adam Hathcock
84ed6bc7f0 More Can* implemented 2013-12-19 11:05:39 +00:00
Adam Hathcock
fe5895d373 Implement Can* 2013-12-19 10:35:15 +00:00
Adam Hathcock
770e2d6e75 Resharper 8.1 2013-12-19 10:34:59 +00:00
Adam Hathcock
84704e5ce2 Release packaging for 0.10.3 2013-12-15 11:42:54 +00:00
Adam Hathcock
059fe1f545 Test for previous change 2013-12-15 11:16:59 +00:00
Adam Hathcock
fe8c6aec5f Ensure adding always disposes 2013-12-15 11:16:48 +00:00
Adam Hathcock
3ab38fbfc2 If the requested amount of bytes was not read, assume end of stream 2013-11-24 09:40:38 +00:00
Adam Hathcock
b4bfde77d2 Version 0.10.2 2013-11-23 13:08:54 +00:00
Adam Hathcock
c4b005b3d4 Strong name Windows Store assembly 2013-11-23 11:13:01 +00:00
Adam Hathcock
c9d1f7b528 Updating with VS 2013 2013-11-23 11:10:22 +00:00
Adam Hathcock
21aa57945d Fix for byte counting 2013-11-23 11:10:07 +00:00
Adam Hathcock
1d0c7b6445 Added work around for invalid extended time format
https://sharpcompress.codeplex.com/workitem/42
2013-10-19 13:28:33 +01:00
Adam Hathcock
f33d8f9a5e Fixed deletion test 2013-10-19 13:21:37 +01:00
Adam Hathcock
ee2d6216b7 Remove unused class 2013-08-14 16:36:09 +01:00
Adam Hathcock
a42414bdaa Version 0.10.1.3 - hopefully cleaned up building problem for different platforms 2013-08-14 16:26:41 +01:00
Adam Hathcock
3e201053c6 Version 0.10.1.2 2013-08-14 08:06:07 +01:00
Adam Hathcock
46c03ce027 Release for 0.10.1 as 0.10.1.1 for nuspec changes 2013-08-13 20:35:40 +01:00
Adam Hathcock
718dac1a31 Merge pull request #4 from sawilde/master
Ensure the assemblies have the same name in the nuget package #2
2013-08-13 12:28:37 -07:00
Shaun Wilde
137f2655a5 Merge branch 'master' of https://github.com/adamhathcock/sharpcompress
Conflicts:
	NuGet/sharpcompress.nuspec
2013-08-14 05:21:45 +10:00
Adam Hathcock
8109ae003d 0.10.1 packing 2013-08-11 09:51:05 +01:00
Adam Hathcock
8325b919ce Version 0.10.1 2013-08-11 09:47:00 +01:00
Adam Hathcock
61c97faf6c Remove needless sync stream. 2013-08-11 09:45:46 +01:00
Shaun Wilde
2dc297394f Ensure the assemblies have the same name in the nuget package #2 2013-08-08 19:40:11 +10:00
Adam Hathcock
7aa5d310f2 Packaging for 0.10 2013-07-27 12:43:02 +01:00
Adam Hathcock
e3fb32aa3c Marking 0.10 2013-07-27 12:36:40 +01:00
Adam Hathcock
65090b0fb0 Force tests to run single threaded 2013-07-27 12:20:32 +01:00
Adam Hathcock
3bbb407bdb Fixed line endings 2013-07-27 12:20:17 +01:00
Adam Hathcock
404fa8c62d AES zip decryption works 2013-07-27 12:06:54 +01:00
Adam Hathcock
9cf8c1a747 Moved classes into own files 2013-07-27 08:30:20 +01:00
Adam Hathcock
0937c63a9e Added extra encryption test. 2013-07-27 08:27:31 +01:00
Adam Hathcock
f367f489eb Adding bouncy castle crypto for portable (and metro) 2013-07-20 11:46:37 +01:00
Adam Hathcock
d78677186b Refactor crypto out of MarkingBinaryReader into RarCrypto specific class. Remove functionality from portable build to make it work. 2013-07-16 16:53:00 +01:00
Adam Hathcock
9447c1ed71 Add resharper shared config 2013-07-16 16:52:01 +01:00
Adam Hathcock
a16219763b Merge pull request #1 from hrasyid/master
Added support for decryption of RAR files
2013-07-16 07:27:31 -07:00
Hamdanil Rasyid
430263b672 More style adjustment 2013-07-16 22:19:41 +08:00
Hamdanil Rasyid
cc902bcb2d Style issue and access modifiers 2013-07-16 22:17:15 +08:00
Hamdanil Rasyid
a29f7d4500 Revert "Remove requirement for signature"
This reverts commit 1d10ea29da.
2013-07-16 22:00:13 +08:00
Hamdanil Rasyid
f9194b645d Merge branch 'master' of https://github.com/hrasyid/sharpcompress 2013-07-16 00:18:36 +08:00
Hamdanil Rasyid
c7f6b506b5 Use faster implementation of SHA1 2013-07-16 00:17:24 +08:00
Hamdanil Rasyid
1ab89ba59c Cleanup 2013-07-16 00:04:54 +08:00
Hamdanil Rasyid
3370fadbd1 Refactor all cryptography codes to RarRijndael 2013-07-15 23:43:26 +08:00
Hamdanil Rasyid
a73c831647 Refactor initialization codes into one class 2013-07-15 23:29:24 +08:00
hrasyid
203a7beef7 Merge pull request #1 from hrasyid/dev_rardecryption
Dev rardecryption
2013-07-14 09:56:39 -07:00
Hamdanil Rasyid
6c2b447530 Fix filename 2013-07-15 00:44:07 +08:00
Hamdanil Rasyid
1e7d6dc4f5 Test both Rar with header+ file encrypted and only file encrypted 2013-07-15 00:42:48 +08:00
Hamdanil Rasyid
37c4665630 Make password a parameter 2013-07-15 00:35:35 +08:00
Hamdanil Rasyid
d15c8785ac Use more meaningful error message 2013-07-14 23:22:08 +08:00
Hamdanil Rasyid
ab19dedf1f Fix bug when ignoring offset 2013-07-14 23:20:43 +08:00
Hamdanil Rasyid
34190f6576 Code tidyness 2013-07-14 19:09:33 +08:00
Hamdanil Rasyid
78cecda03a Update to reflect folder structure 2013-07-14 19:08:54 +08:00
Hamdanil Rasyid
13de04ddd2 Fix bug when no rijndael is used 2013-07-14 19:08:20 +08:00
Hamdanil Rasyid
e1511bf3ba Decrypt stream (in addition to header) 2013-07-14 19:04:22 +08:00
Hamdanil Rasyid
e78ead7bc3 Temporarily do not treat warning as error 2013-07-13 21:49:35 +08:00
Hamdanil Rasyid
bb2dcce57a Remove comments from reference code 2013-07-13 21:49:04 +08:00
Hamdanil Rasyid
c0b630c795 Decrypt rar - first attempt, test still fails 2013-07-13 20:12:35 +08:00
Hamdanil Rasyid
22d181e637 recognize encrypted flag in archive header 2013-07-13 10:45:25 +08:00
Hamdanil Rasyid
af8e4f9da8 Add test case for encrypted Rar 2013-07-12 23:26:30 +08:00
Hamdanil Rasyid
ad421936dc Modify test base to follow my folder structure 2013-07-12 23:23:56 +08:00
Hamdanil Rasyid
1d10ea29da Remove requirement for signature 2013-07-12 21:43:38 +08:00
Adam Hathcock
31f8c18bc5 Added codeplex discussion link 2013-05-28 19:40:13 +01:00
Adam Hathcock
692bebf658 Better message for supported types
Changed exception message in Reader/Archive Factory to list the
supported types.
2013-05-28 19:34:41 +01:00
Adam Hathcock
0ab103e1b1 Made streams Dispose resilient
Made streams resilient to dispose being called multiple times.  Removed
unused stream.
2013-05-28 19:29:54 +01:00
Adam Hathcock
483f2e564a Fixing ArchiveFactory.Open IsRarFile: https://sharpcompress.codeplex.com/workitem/35 2013-05-07 14:31:30 +01:00
Adam Hathcock
ea6661ee8e Changing some nuget metadata 2013-05-04 17:15:46 +01:00
Adam Hathcock
9ff2369744 Version 0.9 2013-05-04 17:05:09 +01:00
Adam Hathcock
dc192601f3 Removed comment 2013-04-28 13:09:34 +01:00
Adam Hathcock
455f93f294 Windows Store version compiles 2013-04-28 13:01:29 +01:00
Adam Hathcock
0a69c584b4 Removed Silverlight project and renamed WP7 project to Portable 2013-04-28 12:45:44 +01:00
Adam Hathcock
257e85d8bf Removed 3.5 project. Added WinRT project 2013-04-28 12:41:29 +01:00
766 changed files with 94864 additions and 46569 deletions

16
.circleci/config.yml Normal file
View File

@@ -0,0 +1,16 @@
version: 2
jobs:
build:
docker:
- image: microsoft/dotnet:2.2.104-sdk
steps:
- checkout
- run:
name: Install Cake
command: |
dotnet tool install -g Cake.Tool
echo 'export PATH=$PATH:/root/.dotnet/tools' >> $BASH_ENV
source $BASH_ENV
- run:
name: Build
command: dotnet cake build.cake

5
.gitattributes vendored Normal file
View File

@@ -0,0 +1,5 @@
# Set the default behavior, in case people don't have core.autocrlf set.
* text=auto
# need original files to be windows
*.txt text eol=crlf

27
.gitignore vendored
View File

@@ -1,8 +1,19 @@
**/bin/*
**/obj/*
_ReSharper.SharpCompress/
bin/
*.suo
*.user
TestArchives/Scratch/
TestArchives/Scratch2/
**/bin/*
**/obj/*
_ReSharper.SharpCompress/
bin/
*.suo
*.user
TestArchives/Scratch/
TestArchives/Scratch2/
TestResults/
*.nupkg
packages/*/
project.lock.json
tests/TestArchives/Scratch
.vs
tools
.vscode
.idea/
.DS_Store

60
FORMATS.md Normal file
View File

@@ -0,0 +1,60 @@
# Formats
## Accessing Archives
* Archive classes allow random access to a seekable stream.
* Reader classes allow forward-only reading on a stream.
* Writer classes allow forward-only Writing on a stream.
## Supported Format Table
| Archive Format | Compression Format(s) | Compress/Decompress | Archive API | Reader API | Writer API |
| ---------------------- | ------------------------------------------------- | ------------------- | --------------- | ---------- | ------------- |
| Rar | Rar | Decompress (1) | RarArchive | RarReader | N/A |
| Zip (2) | None, DEFLATE, Deflate64, BZip2, LZMA/LZMA2, PPMd | Both | ZipArchive | ZipReader | ZipWriter |
| Tar | None | Both | TarArchive | TarReader | TarWriter (3) |
| Tar.GZip | DEFLATE | Both | TarArchive | TarReader | TarWriter (3) |
| Tar.BZip2 | BZip2 | Both | TarArchive | TarReader | TarWriter (3) |
| Tar.LZip | LZMA | Both | TarArchive | TarReader | TarWriter (3) |
| Tar.XZ | LZMA2 | Decompress | TarArchive | TarReader | TarWriter (3) |
| GZip (single file) | DEFLATE | Both | GZipArchive | GZipReader | GZipWriter |
| 7Zip (4) | LZMA, LZMA2, BZip2, PPMd, BCJ, BCJ2, Deflate | Decompress | SevenZipArchive | N/A | N/A |
| LZip (single file) (5) | LZip (LZMA) | Both | LZipArchive | LZipReader | LZipWriter |
1. SOLID Rars are only supported in the RarReader API.
2. Zip format supports pkware and WinzipAES encryption. However, encrypted LZMA is not supported. Zip64 reading/writing is supported but only with seekable streams as the Zip spec doesn't support Zip64 data in post data descriptors. Deflate64 is only supported for reading.
3. The Tar format requires a file size in the header. If no size is specified to the TarWriter and the stream is not seekable, then an exception will be thrown.
4. The 7Zip format doesn't allow for reading as a forward-only stream so 7Zip is only supported through the Archive API
5. LZip has no support for extra data like the file name or timestamp. There is a default filename used when looking at the entry Key on the archive.
## Compression Streams
For those who want to directly compress/decompress bits. The single file formats are represented here as well. However, BZip2, LZip and XZ have no metadata (GZip has a little) so using them without something like a Tar file makes little sense.
| Compressor | Compress/Decompress |
| --------------- | ------------------- |
| BZip2Stream | Both |
| GZipStream | Both |
| DeflateStream | Both |
| Deflate64Stream | Decompress |
| LZMAStream | Both |
| PPMdStream | Both |
| ADCStream | Decompress |
| LZipStream | Both |
| XZStream | Decompress |
## Archive Formats vs Compression
Sometimes the terminology gets mixed.
### Compression
DEFLATE, LZMA are pure compression algorithms
### Formats
Formats like Zip, 7Zip, Rar are archive formats only. They use other compression methods (e.g. DEFLATE, LZMA, etc.) or propriatory (e.g RAR)
### Overlap
GZip, BZip2 and LZip are single file archival formats. The overlap in the API happens because Tar uses the single file formats as "compression" methods and the API tries to hide this a bit.

21
LICENSE.txt Normal file
View File

@@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2014 Adam Hathcock
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@@ -1,23 +0,0 @@
<?xml version="1.0"?>
<package xmlns="http://schemas.microsoft.com/packaging/2010/07/nuspec.xsd">
<metadata>
<version>0.8.2</version>
<authors>Adam Hathcock</authors>
<owners>Adam Hathcock</owners>
<licenseUrl>http://sharpcompress.codeplex.com/license</licenseUrl>
<projectUrl>http://sharpcompress.codeplex.com/</projectUrl>
<id>sharpcompress</id>
<title>SharpCompress - Pure C# Decompression/Compression</title>
<requireLicenseAcceptance>false</requireLicenseAcceptance>
<description>SharpCompress is a compression library for .NET/Mono/Silverlight/WP7 that can unrar, decompress 7zip, zip/unzip, tar/untar bzip2/unbzip2 and gzip/ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip is implemented.</description>
<releaseNotes />
<language>en-US</language>
<tags>rar unrar zip unzip bzip2 gzip tar 7zip .net40 .net35 sl4</tags>
</metadata>
<files>
<file src="..\bin\SharpCompress.3.5.dll" target="lib\net35\SharpCompress.3.5.dll" />
<file src="..\bin\SharpCompress.dll" target="lib\net40\SharpCompress.dll" />
<file src="..\bin\SharpCompress.WP7.dll" target="lib\sl3-wp\SharpCompress.WP7.dll" />
<file src="..\bin\SharpCompress.Silverlight.dll" target="lib\sl50\SharpCompress.Silverlight.dll" />
</files>
</package>

195
README.md
View File

@@ -1,24 +1,197 @@
SharpCompress
=============
# SharpCompress
Github mirror of http://sharpcompress.codeplex.com
SharpCompress is a compression library in pure C# for .NET 3.5, 4.5, .NET Standard 1.0, 1.3 that can unrar, un7zip, unzip, untar unbzip2 and ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip are implemented.
SharpCompress is a compression library for .NET/Mono/Silverlight/WP7 that can unrar, un7zip, unzip, untar unbzip2 and ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip is implemented.
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).
AppVeyor Build -
[![Build status](https://ci.appveyor.com/api/projects/status/voxg971oemmvxh1e/branch/master?svg=true)](https://ci.appveyor.com/project/adamhathcock/sharpcompress/branch/master)
A Simple Request
Circle CI Build -
[![CircleCI](https://circleci.com/gh/adamhathcock/sharpcompress.svg?style=svg)](https://circleci.com/gh/adamhathcock/sharpcompress)
## Need Help?
Post Issues on Github!
Check the [Supported Formats](FORMATS.md) and [Basic Usage.](USAGE.md)
## Recommended Formats
In general, I recommend GZip (Deflate)/BZip2 (BZip)/LZip (LZMA) as the simplicity of the formats lend to better long term archival as well as the streamability. Tar is often used in conjunction for multiple files in a single archive (e.g. `.tar.gz`)
Zip is okay, but it's a very hap-hazard format and the variation in headers and implementations makes it hard to get correct. Uses Deflate by default but supports a lot of compression methods.
RAR is not recommended as it's a propriatory format and the compression is closed source. Use Tar/LZip for LZMA
7Zip and XZ both are overly complicated. 7Zip does not support streamable formats. XZ has known holes explained here: (http://www.nongnu.org/lzip/xz_inadequate.html) Use Tar/LZip for LZMA compression instead.
## A Simple Request
Hi everyone. I hope you're using SharpCompress and finding it useful. Please give me feedback on what you'd like to see changed especially as far as usability goes. New feature suggestions are always welcome as well. I would also like to know what projects SharpCompress is being used in. I like seeing how it is used to give me ideas for future versions. Thanks!
Want to contribute?
Please do not email me directly to ask for help. If you think there is a real issue, please report it here.
## Want to contribute?
I'm always looking for help or ideas. Please submit code or email with ideas. Unfortunately, just letting me know you'd like to help is not enough because I really have no overall plan of what needs to be done. I'll definitely accept code submissions and add you as a member of the project!
TODOs (always lots):
## TODOs (always lots)
* RAR 5 decryption support
* 7Zip writing
* RAR Decryption
* Zip64
* Zip64 (Need writing and extend Reading)
* Multi-volume Zip support.
7Zip implementation based on: https://code.google.com/p/managed-lzma/
## Version Log
### Version 0.18
* [Now on Github releases](https://github.com/adamhathcock/sharpcompress/releases/tag/0.18)
### Version 0.17.1
* Fix - [Bug Fix for .NET Core on Windows](https://github.com/adamhathcock/sharpcompress/pull/257)
### Version 0.17.0
* New - Full LZip support! Can read and write LZip files and Tars inside LZip files. [Make LZip a first class citizen. #241](https://github.com/adamhathcock/sharpcompress/issues/241)
* New - XZ read support! Can read XZ files and Tars inside XZ files. [XZ in SharpCompress #91](https://github.com/adamhathcock/sharpcompress/issues/94)
* Fix - [Regression - zip file writing on seekable streams always assumed stream start was 0. Introduced with Zip64 writing.](https://github.com/adamhathcock/sharpcompress/issues/244)
* Fix - [Zip files with post-data descriptors can be properly skipped via decompression](https://github.com/adamhathcock/sharpcompress/issues/162)
### Version 0.16.2
* Fix [.NET 3.5 should support files and cryptography (was a regression from 0.16.0)](https://github.com/adamhathcock/sharpcompress/pull/251)
* Fix [Zip per entry compression customization wrote the wrong method into the zip archive](https://github.com/adamhathcock/sharpcompress/pull/249)
### Version 0.16.1
* Fix [Preserve compression method when getting a compressed stream](https://github.com/adamhathcock/sharpcompress/pull/235)
* Fix [RAR entry key normalization fix](https://github.com/adamhathcock/sharpcompress/issues/201)
### Version 0.16.0
* Breaking - [Progress Event Tracking rethink](https://github.com/adamhathcock/sharpcompress/pull/226)
* Update to VS2017 - [VS2017](https://github.com/adamhathcock/sharpcompress/pull/231) - Framework targets have been changed.
* New - [Add Zip64 writing](https://github.com/adamhathcock/sharpcompress/pull/211)
* [Fix invalid/mismatching Zip version flags.](https://github.com/adamhathcock/sharpcompress/issues/164) - This allows nuget/System.IO.Packaging to read zip files generated by SharpCompress
* [Fix 7Zip directory hiding](https://github.com/adamhathcock/sharpcompress/pull/215/files)
* [Verify RAR CRC headers](https://github.com/adamhathcock/sharpcompress/pull/220)
### Version 0.15.2
* [Fix invalid headers](https://github.com/adamhathcock/sharpcompress/pull/210) - fixes an issue creating large-ish zip archives that was introduced with zip64 reading.
### Version 0.15.1
* [Zip64 extending information and ZipReader](https://github.com/adamhathcock/sharpcompress/pull/206)
### Version 0.15.0
* [Add zip64 support for ZipArchive extraction](https://github.com/adamhathcock/sharpcompress/pull/205)
### Version 0.14.1
* [.NET Assemblies aren't strong named](https://github.com/adamhathcock/sharpcompress/issues/158)
* [Pkware encryption for Zip files didn't allow for multiple reads of an entry](https://github.com/adamhathcock/sharpcompress/issues/197)
* [GZip Entry couldn't be read multiple times](https://github.com/adamhathcock/sharpcompress/issues/198)
### Version 0.14.0
* [Support for LZip reading in for Tars](https://github.com/adamhathcock/sharpcompress/pull/191)
### Version 0.13.1
* [Fix null password on ReaderFactory. Fix null options on SevenZipArchive](https://github.com/adamhathcock/sharpcompress/pull/188)
* [Make PpmdProperties lazy to avoid unnecessary allocations.](https://github.com/adamhathcock/sharpcompress/pull/185)
### Version 0.13.0
* Breaking change: Big refactor of Options on API.
* 7Zip supports Deflate
### Version 0.12.4
* Forward only zip issue fix https://github.com/adamhathcock/sharpcompress/issues/160
* Try to fix frameworks again by copying targets from JSON.NET
### Version 0.12.3
* 7Zip fixes https://github.com/adamhathcock/sharpcompress/issues/73
* Maybe all profiles will work with project.json now
### Version 0.12.2
* Support Profile 259 again
### Version 0.12.1
* Support Silverlight 5
### Version 0.12.0
* .NET Core RTM!
* Bug fix for Tar long paths
### Version 0.11.6
* Bug fix for global header in Tar
* Writers now have a leaveOpen `bool` overload. They won't close streams if not-requested to.
### Version 0.11.5
* Bug fix in Skip method
### Version 0.11.4
* SharpCompress is now endian neutral (matters for Mono platforms)
* Fix for Inflate (need to change implementation)
* Fixes for RAR detection
### Version 0.11.1
* Added Cancel on IReader
* Removed .NET 2.0 support and LinqBridge dependency
### Version 0.11
* Been over a year, contains mainly fixes from contributors!
* Possible breaking change: ArchiveEncoding is UTF8 by default now.
* TAR supports writing long names using longlink
* RAR Protect Header added
### Version 0.10.3
* Finally fixed Disposal issue when creating a new archive with the Archive API
### Version 0.10.2
* Fixed Rar Header reading for invalid extended time headers.
* Windows Store assembly is now strong named
* Known issues with Long Tar names being worked on
* Updated to VS2013
* Portable targets SL5 and Windows Phone 8 (up from SL4 and WP7)
### Version 0.10.1
* Fixed 7Zip extraction performance problem
### Version 0.10:
* Added support for RAR Decryption (thanks to https://github.com/hrasyid)
* Embedded some BouncyCastle crypto classes to allow RAR Decryption and Winzip AES Decryption in Portable and Windows Store DLLs
* Built in Release (I think)
XZ implementation based on: https://github.com/sambott/XZ.NET by @sambott
7Zip implementation based on: https://code.google.com/p/managed-lzma/
LICENSE
Copyright (c) 2000 - 2011 The Legion Of The Bouncy Castle (http://www.bouncycastle.org)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@@ -1,134 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using SharpCompress.Archive;
using SharpCompress.Common;
namespace SharpCompress.Test
{
public class ArchiveTests : TestBase
{
protected void ArchiveStreamReadExtractAll(string testArchive, CompressionType compression)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
ArchiveStreamReadExtractAll(testArchive.AsEnumerable(), compression);
}
protected void ArchiveStreamReadExtractAll(IEnumerable<string> testArchives, CompressionType compression)
{
foreach (var path in testArchives)
{
ResetScratch();
using (Stream stream = File.OpenRead(path))
using (var archive = ArchiveFactory.Open(stream))
{
Assert.IsTrue(archive.IsSolid);
using (var reader = archive.ExtractAllEntries())
{
ReaderTests.UseReader(this, reader, compression);
}
VerifyFiles();
if (archive.Entries.First().CompressionType == CompressionType.Rar)
{
return;
}
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(SCRATCH_FILES_PATH,
ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
VerifyFiles();
}
}
protected void ArchiveStreamRead(string testArchive)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
ArchiveStreamRead(testArchive.AsEnumerable());
}
protected void ArchiveStreamRead(params string[] testArchives)
{
ArchiveStreamRead(testArchives.Select(x => Path.Combine(TEST_ARCHIVES_PATH, x)));
}
protected void ArchiveStreamRead(IEnumerable<string> testArchives)
{
foreach (var path in testArchives)
{
ResetScratch();
using (Stream stream = File.OpenRead(path))
using (var archive = ArchiveFactory.Open(stream))
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(SCRATCH_FILES_PATH, ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
VerifyFiles();
}
}
protected void ArchiveFileRead(string testArchive)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
ArchiveFileRead(testArchive.AsEnumerable());
}
protected void ArchiveFileRead(IEnumerable<string> testArchives)
{
foreach (var path in testArchives)
{
ResetScratch();
using (var archive = ArchiveFactory.Open(path))
{
archive.EntryExtractionBegin += archive_EntryExtractionBegin;
archive.FilePartExtractionBegin += archive_FilePartExtractionBegin;
archive.CompressedBytesRead += archive_CompressedBytesRead;
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(SCRATCH_FILES_PATH,
ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
VerifyFiles();
}
}
void archive_CompressedBytesRead(object sender, CompressedBytesReadEventArgs e)
{
Console.WriteLine("Read Compressed File Part Bytes: {0} Percentage: {1}%",
e.CurrentFilePartCompressedBytesRead, CreatePercentage(e.CurrentFilePartCompressedBytesRead, partTotal));
string percentage = entryTotal.HasValue ? CreatePercentage(e.CompressedBytesRead,
entryTotal.Value).ToString() : "Unknown";
Console.WriteLine("Read Compressed File Entry Bytes: {0} Percentage: {1}%",
e.CompressedBytesRead, percentage);
}
void archive_FilePartExtractionBegin(object sender, FilePartExtractionBeginEventArgs e)
{
this.partTotal = e.Size;
Console.WriteLine("Initializing File Part Extraction: " + e.Name);
}
void archive_EntryExtractionBegin(object sender, ArchiveExtractionEventArgs<IArchiveEntry> e)
{
this.entryTotal = e.Item.Size;
Console.WriteLine("Initializing File Entry Extraction: " + e.Item.FilePath);
}
private long? entryTotal;
private long partTotal;
private int CreatePercentage(long n, long d)
{
return (int)(((double)n / (double)d) * 100);
}
}
}

View File

@@ -1,61 +0,0 @@
using System;
using System.IO;
using System.Linq;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using SharpCompress.Archive;
using SharpCompress.Archive.GZip;
namespace SharpCompress.Test
{
[TestClass]
public class GZipArchiveTests : ArchiveTests
{
public GZipArchiveTests()
{
UseExtensionInsteadOfNameToVerify = true;
}
[TestMethod]
public void GZip_Archive_Generic()
{
ResetScratch();
using (Stream stream = File.Open(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"), FileMode.Open))
using (var archive = ArchiveFactory.Open(stream))
{
var entry = archive.Entries.First();
entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.FilePath));
}
CompareArchivesByPath(Path.Combine(SCRATCH_FILES_PATH, "Tar.tar"),
Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar"));
}
[TestMethod]
public void GZip_Archive()
{
ResetScratch();
using (Stream stream = File.Open(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"), FileMode.Open))
using (var archive = GZipArchive.Open(stream))
{
var entry = archive.Entries.First();
entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.FilePath));
}
CompareArchivesByPath(Path.Combine(SCRATCH_FILES_PATH, "Tar.tar"),
Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar"));
}
[TestMethod]
[ExpectedException(typeof(InvalidOperationException))]
public void GZip_Archive_NoAdd()
{
string jpg = Path.Combine(ORIGINAL_FILES_PATH, "jpg\\test.jpg");
ResetScratch();
using (Stream stream = File.Open(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"), FileMode.Open))
using (var archive = GZipArchive.Open(stream))
{
archive.AddEntry("jpg\\test.jpg", jpg);
archive.SaveTo(Path.Combine(SCRATCH_FILES_PATH, "Tar.tar.gz"));
}
}
}
}

View File

@@ -1,36 +0,0 @@
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("SharpCompress.Test")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("Microsoft")]
[assembly: AssemblyProduct("SharpCompress.Test")]
[assembly: AssemblyCopyright("Copyright © Microsoft 2011")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("f01fddfb-445f-4548-9f69-88b69a8b71b0")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]

View File

@@ -1,63 +0,0 @@
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
// Runtime Version:4.0.30319.269
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace SharpCompress.Test.Properties {
using System;
/// <summary>
/// A strongly-typed resource class, for looking up localized strings, etc.
/// </summary>
// This class was auto-generated by the StronglyTypedResourceBuilder
// class via a tool like ResGen or Visual Studio.
// To add or remove a member, edit your .ResX file then rerun ResGen
// with the /str option, or rebuild your VS project.
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "4.0.0.0")]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
internal class Resources {
private static global::System.Resources.ResourceManager resourceMan;
private static global::System.Globalization.CultureInfo resourceCulture;
[global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
internal Resources() {
}
/// <summary>
/// Returns the cached ResourceManager instance used by this class.
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Resources.ResourceManager ResourceManager {
get {
if (object.ReferenceEquals(resourceMan, null)) {
global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("SharpCompress.Test.Properties.Resources", typeof(Resources).Assembly);
resourceMan = temp;
}
return resourceMan;
}
}
/// <summary>
/// Overrides the current thread's CurrentUICulture property for all
/// resource lookups using this strongly typed resource class.
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Globalization.CultureInfo Culture {
get {
return resourceCulture;
}
set {
resourceCulture = value;
}
}
}
}

View File

@@ -1,117 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<root>
<!--
Microsoft ResX Schema
Version 2.0
The primary goals of this format is to allow a simple XML format
that is mostly human readable. The generation and parsing of the
various data types are done through the TypeConverter classes
associated with the data types.
Example:
... ado.net/XML headers & schema ...
<resheader name="resmimetype">text/microsoft-resx</resheader>
<resheader name="version">2.0</resheader>
<resheader name="reader">System.Resources.ResXResourceReader, System.Windows.Forms, ...</resheader>
<resheader name="writer">System.Resources.ResXResourceWriter, System.Windows.Forms, ...</resheader>
<data name="Name1"><value>this is my long string</value><comment>this is a comment</comment></data>
<data name="Color1" type="System.Drawing.Color, System.Drawing">Blue</data>
<data name="Bitmap1" mimetype="application/x-microsoft.net.object.binary.base64">
<value>[base64 mime encoded serialized .NET Framework object]</value>
</data>
<data name="Icon1" type="System.Drawing.Icon, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64">
<value>[base64 mime encoded string representing a byte array form of the .NET Framework object]</value>
<comment>This is a comment</comment>
</data>
There are any number of "resheader" rows that contain simple
name/value pairs.
Each data row contains a name, and value. The row also contains a
type or mimetype. Type corresponds to a .NET class that support
text/value conversion through the TypeConverter architecture.
Classes that don't support this are serialized and stored with the
mimetype set.
The mimetype is used for serialized objects, and tells the
ResXResourceReader how to depersist the object. This is currently not
extensible. For a given mimetype the value must be set accordingly:
Note - application/x-microsoft.net.object.binary.base64 is the format
that the ResXResourceWriter will generate, however the reader can
read any of the formats listed below.
mimetype: application/x-microsoft.net.object.binary.base64
value : The object must be serialized with
: System.Serialization.Formatters.Binary.BinaryFormatter
: and then encoded with base64 encoding.
mimetype: application/x-microsoft.net.object.soap.base64
value : The object must be serialized with
: System.Runtime.Serialization.Formatters.Soap.SoapFormatter
: and then encoded with base64 encoding.
mimetype: application/x-microsoft.net.object.bytearray.base64
value : The object must be serialized into a byte array
: using a System.ComponentModel.TypeConverter
: and then encoded with base64 encoding.
-->
<xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:msdata="urn:schemas-microsoft-com:xml-msdata">
<xsd:element name="root" msdata:IsDataSet="true">
<xsd:complexType>
<xsd:choice maxOccurs="unbounded">
<xsd:element name="metadata">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" />
<xsd:attribute name="type" type="xsd:string" />
<xsd:attribute name="mimetype" type="xsd:string" />
</xsd:complexType>
</xsd:element>
<xsd:element name="assembly">
<xsd:complexType>
<xsd:attribute name="alias" type="xsd:string" />
<xsd:attribute name="name" type="xsd:string" />
</xsd:complexType>
</xsd:element>
<xsd:element name="data">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
<xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" msdata:Ordinal="1" />
<xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" />
<xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" />
</xsd:complexType>
</xsd:element>
<xsd:element name="resheader">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" use="required" />
</xsd:complexType>
</xsd:element>
</xsd:choice>
</xsd:complexType>
</xsd:element>
</xsd:schema>
<resheader name="resmimetype">
<value>text/microsoft-resx</value>
</resheader>
<resheader name="version">
<value>2.0</value>
</resheader>
<resheader name="reader">
<value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
<resheader name="writer">
<value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
</root>

View File

@@ -1,26 +0,0 @@
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
// Runtime Version:4.0.30319.269
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace SharpCompress.Test.Properties {
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "10.0.0.0")]
internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase {
private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
public static Settings Default {
get {
return defaultInstance;
}
}
}
}

View File

@@ -1,7 +0,0 @@
<?xml version='1.0' encoding='utf-8'?>
<SettingsFile xmlns="http://schemas.microsoft.com/VisualStudio/2004/01/settings" CurrentProfile="(Default)">
<Profiles>
<Profile Name="(Default)" />
</Profiles>
<Settings />
</SettingsFile>

View File

@@ -1,170 +0,0 @@
using System.IO;
using System.Linq;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using SharpCompress.Archive;
using SharpCompress.Archive.Rar;
using SharpCompress.Common;
namespace SharpCompress.Test
{
[TestClass]
public class RarArchiveTests : ArchiveTests
{
[TestMethod]
public void Rar_None_ArchiveStreamRead()
{
ArchiveStreamRead("Rar.none.rar");
}
[TestMethod]
public void Rar_ArchiveStreamRead()
{
ArchiveStreamRead("Rar.rar");
}
[TestMethod]
public void Rar_Jpg_ArchiveStreamRead()
{
ResetScratch();
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "RarJpeg.jpg")))
{
using (var archive = RarArchive.Open(stream, Options.LookForHeader))
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(SCRATCH_FILES_PATH,
ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
VerifyFiles();
}
}
[TestMethod]
public void Rar_IsSolidArchiveCheck()
{
ResetScratch();
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Rar.rar")))
{
using (var archive = RarArchive.Open(stream))
{
Assert.IsFalse(archive.IsSolidArchive());
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(SCRATCH_FILES_PATH,
ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
}
VerifyFiles();
}
[TestMethod]
[ExpectedException(typeof(InvalidFormatException))]
public void Rar_Solid_ArchiveStreamRead()
{
ArchiveStreamRead("Rar.solid.rar");
}
[TestMethod]
public void Rar_Solid_StreamRead_Extract_All()
{
ArchiveStreamReadExtractAll("Rar.solid.rar", CompressionType.Rar);
}
[TestMethod]
public void Rar_Multi_ArchiveStreamRead()
{
var testArchives = new string[] { "Rar.multi.part01.rar",
"Rar.multi.part02.rar",
"Rar.multi.part03.rar",
"Rar.multi.part04.rar",
"Rar.multi.part05.rar",
"Rar.multi.part06.rar"};
ResetScratch();
using (var archive = RarArchive.Open(testArchives.Select(s => Path.Combine(TEST_ARCHIVES_PATH, s))
.Select(p => File.OpenRead(p))))
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(SCRATCH_FILES_PATH, ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
VerifyFiles();
}
[TestMethod]
public void RarNoneArchiveFileRead()
{
ArchiveFileRead("Rar.none.rar");
}
[TestMethod]
public void Rar_ArchiveFileRead()
{
ArchiveFileRead("Rar.rar");
}
[TestMethod]
public void Rar_ArchiveFileRead_HasDirectories()
{
ResetScratch();
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Rar.rar")))
{
using (var archive = RarArchive.Open(stream))
{
Assert.IsFalse(archive.IsSolidArchive());
Assert.IsTrue(archive.Entries.Any(entry => entry.IsDirectory));
}
}
}
[TestMethod]
public void Rar_Jpg_ArchiveFileRead()
{
ResetScratch();
using (var archive = RarArchive.Open(Path.Combine(TEST_ARCHIVES_PATH, "RarJpeg.jpg"), Options.LookForHeader))
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(SCRATCH_FILES_PATH, ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
VerifyFiles();
}
[TestMethod]
[ExpectedException(typeof(InvalidFormatException))]
public void Rar_Solid_ArchiveFileRead()
{
ArchiveFileRead("Rar.solid.rar");
}
[TestMethod]
public void Rar_Multi_ArchiveFileRead()
{
ArchiveFileRead("Rar.multi.part01.rar");
}
[TestMethod]
public void Rar_IsFirstVolume_True()
{
using (var archive = RarArchive.Open(Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part01.rar")))
{
Assert.IsTrue(archive.IsMultipartVolume());
Assert.IsTrue(archive.IsFirstVolume());
}
}
[TestMethod]
public void Rar_IsFirstVolume_False()
{
using (var archive = RarArchive.Open(Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part03.rar")))
{
Assert.IsTrue(archive.IsMultipartVolume());
Assert.IsFalse(archive.IsFirstVolume());
}
}
}
}

View File

@@ -1,191 +0,0 @@
using System.IO;
using System.Linq;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using SharpCompress.Common;
using SharpCompress.Reader;
using SharpCompress.Reader.Rar;
namespace SharpCompress.Test
{
[TestClass]
public class RarReaderTests : ReaderTests
{
[TestMethod]
public void Rar_Multi_Reader()
{
var testArchives = new string[] { "Rar.multi.part01.rar",
"Rar.multi.part02.rar",
"Rar.multi.part03.rar",
"Rar.multi.part04.rar",
"Rar.multi.part05.rar",
"Rar.multi.part06.rar"};
ResetScratch();
using (var reader = RarReader.Open(testArchives.Select(s => Path.Combine(TEST_ARCHIVES_PATH, s))
.Select(p => File.OpenRead(p))))
{
while (reader.MoveToNextEntry())
{
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH, ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
VerifyFiles();
}
[TestMethod]
public void Rar_Multi_Reader_Delete_Files()
{
var testArchives = new string[] { "Rar.multi.part01.rar",
"Rar.multi.part02.rar",
"Rar.multi.part03.rar",
"Rar.multi.part04.rar",
"Rar.multi.part05.rar",
"Rar.multi.part06.rar"};
ResetScratch();
foreach (var file in testArchives)
{
File.Copy(Path.Combine(TEST_ARCHIVES_PATH, file), Path.Combine(SCRATCH2_FILES_PATH, file));
}
using (var reader = RarReader.Open(testArchives.Select(s => Path.Combine(SCRATCH2_FILES_PATH, s))
.Select(p => File.OpenRead(p)), Options.None))
{
while (reader.MoveToNextEntry())
{
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH, ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
VerifyFiles();
foreach (var file in testArchives.Select(s => Path.Combine(SCRATCH2_FILES_PATH, s)))
{
File.Delete(file);
}
}
[TestMethod]
public void Rar_None_Reader()
{
Read("Rar.none.rar", CompressionType.Rar);
}
[TestMethod]
public void Rar_Reader()
{
Read("Rar.rar", CompressionType.Rar);
}
[TestMethod]
public void Rar_Entry_Stream()
{
ResetScratch();
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Rar.rar")))
using (var reader = RarReader.Open(stream))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.AreEqual(reader.Entry.CompressionType, CompressionType.Rar);
using (var entryStream = reader.OpenEntryStream())
{
string file = Path.GetFileName(reader.Entry.FilePath);
string folder = Path.GetDirectoryName(reader.Entry.FilePath);
string destdir = Path.Combine(SCRATCH_FILES_PATH, folder);
if (!Directory.Exists(destdir))
{
Directory.CreateDirectory(destdir);
}
string destinationFileName = Path.Combine(destdir, file);
using (FileStream fs = File.OpenWrite(destinationFileName))
{
entryStream.TransferTo(fs);
}
}
}
}
}
VerifyFiles();
}
[TestMethod]
public void Rar_Reader_Audio_program()
{
ResetScratch();
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Audio_program.rar")))
using (var reader = RarReader.Open(stream, Options.LookForHeader))
{
while (reader.MoveToNextEntry())
{
Assert.AreEqual(reader.Entry.CompressionType, CompressionType.Rar);
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH, ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
CompareFilesByPath(Path.Combine(SCRATCH_FILES_PATH, "test.dat"),
Path.Combine(MISC_TEST_FILES_PATH, "test.dat"));
}
[TestMethod]
public void Rar_Jpg_Reader()
{
ResetScratch();
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "RarJpeg.jpg")))
using (var reader = RarReader.Open(stream, Options.LookForHeader))
{
while (reader.MoveToNextEntry())
{
Assert.AreEqual(reader.Entry.CompressionType, CompressionType.Rar);
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH, ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
VerifyFiles();
}
[TestMethod]
public void Rar_Solid_Reader()
{
Read("Rar.solid.rar", CompressionType.Rar);
}
[TestMethod]
public void Rar_Solid_Skip_Reader()
{
ResetScratch();
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Rar.solid.rar")))
using (var reader = RarReader.Open(stream, Options.LookForHeader))
{
while (reader.MoveToNextEntry())
{
if (reader.Entry.FilePath.Contains("jpg"))
{
Assert.AreEqual(reader.Entry.CompressionType, CompressionType.Rar);
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH, ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
}
}
[TestMethod]
public void Rar_Reader_Skip()
{
ResetScratch();
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Rar.rar")))
using (var reader = RarReader.Open(stream, Options.LookForHeader))
{
while (reader.MoveToNextEntry())
{
if (reader.Entry.FilePath.Contains("jpg"))
{
Assert.AreEqual(reader.Entry.CompressionType, CompressionType.Rar);
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH, ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
}
}
}
}

View File

@@ -1,43 +0,0 @@
using System.Collections.Generic;
using System.IO;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using SharpCompress.Common;
using SharpCompress.Reader;
namespace SharpCompress.Test
{
public class ReaderTests : TestBase
{
protected void Read(string testArchive, CompressionType expectedCompression)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
Read(testArchive.AsEnumerable(), expectedCompression);
}
protected void Read(IEnumerable<string> testArchives, CompressionType expectedCompression)
{
foreach (var path in testArchives)
{
using (Stream stream = File.OpenRead(path))
using (IReader reader = ReaderFactory.Open(stream))
{
UseReader(this, reader, expectedCompression);
}
}
}
public static void UseReader(TestBase test, IReader reader, CompressionType expectedCompression)
{
test.ResetScratch();
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.AreEqual(reader.Entry.CompressionType, expectedCompression);
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH, ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
test.VerifyFiles();
}
}
}

View File

@@ -1,84 +0,0 @@
using System.IO;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using SharpCompress.IO;
namespace SharpCompress.Test
{
[TestClass]
public class RewindableStreamTest
{
[TestMethod]
public void TestRewind()
{
MemoryStream ms = new MemoryStream();
BinaryWriter bw = new BinaryWriter(ms);
bw.Write(1);
bw.Write(2);
bw.Write(3);
bw.Write(4);
bw.Write(5);
bw.Write(6);
bw.Write(7);
bw.Flush();
ms.Position = 0;
RewindableStream stream = new RewindableStream(ms);
stream.StartRecording();
BinaryReader br = new BinaryReader(stream);
Assert.AreEqual(br.ReadInt32(), 1);
Assert.AreEqual(br.ReadInt32(), 2);
Assert.AreEqual(br.ReadInt32(), 3);
Assert.AreEqual(br.ReadInt32(), 4);
stream.Rewind(true);
stream.StartRecording();
Assert.AreEqual(br.ReadInt32(), 1);
Assert.AreEqual(br.ReadInt32(), 2);
Assert.AreEqual(br.ReadInt32(), 3);
Assert.AreEqual(br.ReadInt32(), 4);
Assert.AreEqual(br.ReadInt32(), 5);
Assert.AreEqual(br.ReadInt32(), 6);
Assert.AreEqual(br.ReadInt32(), 7);
stream.Rewind(true);
stream.StartRecording();
Assert.AreEqual(br.ReadInt32(), 1);
Assert.AreEqual(br.ReadInt32(), 2);
Assert.AreEqual(br.ReadInt32(), 3);
Assert.AreEqual(br.ReadInt32(), 4);
}
[TestMethod]
public void TestIncompleteRewind()
{
MemoryStream ms = new MemoryStream();
BinaryWriter bw = new BinaryWriter(ms);
bw.Write(1);
bw.Write(2);
bw.Write(3);
bw.Write(4);
bw.Write(5);
bw.Write(6);
bw.Write(7);
bw.Flush();
ms.Position = 0;
RewindableStream stream = new RewindableStream(ms);
stream.StartRecording();
BinaryReader br = new BinaryReader(stream);
Assert.AreEqual(br.ReadInt32(), 1);
Assert.AreEqual(br.ReadInt32(), 2);
Assert.AreEqual(br.ReadInt32(), 3);
Assert.AreEqual(br.ReadInt32(), 4);
stream.Rewind(true);
Assert.AreEqual(br.ReadInt32(), 1);
Assert.AreEqual(br.ReadInt32(), 2);
stream.StartRecording();
Assert.AreEqual(br.ReadInt32(), 3);
Assert.AreEqual(br.ReadInt32(), 4);
Assert.AreEqual(br.ReadInt32(), 5);
stream.Rewind(true);
Assert.AreEqual(br.ReadInt32(), 3);
Assert.AreEqual(br.ReadInt32(), 4);
Assert.AreEqual(br.ReadInt32(), 5);
Assert.AreEqual(br.ReadInt32(), 6);
Assert.AreEqual(br.ReadInt32(), 7);
}
}
}

View File

@@ -1,74 +0,0 @@
using System;
using System.IO;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using SharpCompress.Common;
namespace SharpCompress.Test
{
[TestClass]
public class SevenZipArchiveTests : ArchiveTests
{
[TestMethod]
public void SevenZipArchive_LZMA_StreamRead()
{
ArchiveStreamRead("7Zip.LZMA.7z");
}
[TestMethod]
public void SevenZipArchive_LZMA_PathRead()
{
ArchiveFileRead("7Zip.LZMA.7z");
}
[TestMethod]
public void SevenZipArchive_PPMd_StreamRead()
{
ArchiveStreamRead("7Zip.PPMd.7z");
}
[TestMethod]
public void SevenZipArchive_PPMd_StreamRead_Extract_All()
{
ArchiveStreamReadExtractAll("7Zip.PPMd.7z", CompressionType.PPMd);
}
[TestMethod]
public void SevenZipArchive_PPMd_PathRead()
{
ArchiveFileRead("7Zip.PPMd.7z");
}
[TestMethod]
public void SevenZipArchive_LZMA2_StreamRead()
{
ArchiveStreamRead("7Zip.LZMA2.7z");
}
[TestMethod]
public void SevenZipArchive_LZMA2_PathRead()
{
ArchiveFileRead("7Zip.LZMA2.7z");
}
[TestMethod]
public void SevenZipArchive_BZip2_StreamRead()
{
ArchiveStreamRead("7Zip.BZip2.7z");
}
[TestMethod]
public void SevenZipArchive_BZip2_PathRead()
{
ArchiveFileRead("7Zip.BZip2.7z");
}
[TestMethod]
[ExpectedException(typeof(IndexOutOfRangeException))]
public void SevenZipArchive_BZip2_Split()
{
ArchiveStreamRead("Original.7z.001", "Original.7z.002",
"Original.7z.003", "Original.7z.004", "Original.7z.005",
"Original.7z.006", "Original.7z.007");
}
}
}

View File

@@ -1,112 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProductVersion>8.0.30703</ProductVersion>
<SchemaVersion>2.0</SchemaVersion>
<ProjectGuid>{15679D7A-F22C-4943-87FF-BF5C76C4A6FD}</ProjectGuid>
<OutputType>Library</OutputType>
<AppDesignerFolder>Properties</AppDesignerFolder>
<RootNamespace>SharpCompress.Test</RootNamespace>
<AssemblyName>SharpCompress.Test</AssemblyName>
<TargetFrameworkVersion>v4.0</TargetFrameworkVersion>
<TargetFrameworkProfile>
</TargetFrameworkProfile>
<FileAlignment>512</FileAlignment>
<SolutionDir Condition="$(SolutionDir) == '' Or $(SolutionDir) == '*Undefined*'">..\..\sharpcompress\</SolutionDir>
<RestorePackages>true</RestorePackages>
<ProjectTypeGuids>{3AC096D0-A1C2-E12C-1390-A8335801FDAB};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}</ProjectTypeGuids>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<PlatformTarget>AnyCPU</PlatformTarget>
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>bin\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<PlatformTarget>AnyCPU</PlatformTarget>
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>bin\Release\</OutputPath>
<DefineConstants>TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup>
<StartupObject>
</StartupObject>
</PropertyGroup>
<PropertyGroup>
<SignAssembly>true</SignAssembly>
</PropertyGroup>
<PropertyGroup>
<AssemblyOriginatorKeyFile>..\SharpCompress\SharpCompress.pfx</AssemblyOriginatorKeyFile>
</PropertyGroup>
<ItemGroup>
<Reference Include="Microsoft.VisualStudio.QualityTools.UnitTestFramework, Version=10.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL" />
<Reference Include="System" />
<Reference Include="System.Core" />
</ItemGroup>
<ItemGroup>
<Compile Include="ArchiveTests.cs" />
<Compile Include="GZip\GZipWriterTests.cs" />
<Compile Include="GZip\GZipArchiveTests.cs" />
<Compile Include="SevenZip\SevenZipArchiveTests.cs" />
<Compile Include="Streams\StreamTests.cs" />
<Compile Include="Tar\TarWriterTests.cs" />
<Compile Include="Tar\TarReaderTests.cs" />
<Compile Include="Zip\ZipWriterTests.cs" />
<Compile Include="WriterTests.cs" />
<Compile Include="Rar\RarReaderTests.cs" />
<Compile Include="ReaderTests.cs" />
<Compile Include="Tar\TarArchiveTests.cs" />
<Compile Include="Zip\ZipArchiveTests.cs" />
<Compile Include="Rar\RarArchiveTests.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
<Compile Include="RewindableStreamTest.cs" />
<Compile Include="TestBase.cs" />
<Compile Include="TestStream.cs" />
<Compile Include="Zip\ZipReaderTests.cs" />
<EmbeddedResource Include="Properties\Resources.resx">
<Generator>ResXFileCodeGenerator</Generator>
<LastGenOutput>Resources.Designer.cs</LastGenOutput>
<SubType>Designer</SubType>
</EmbeddedResource>
<Compile Include="Properties\Resources.Designer.cs">
<AutoGen>True</AutoGen>
<DependentUpon>Resources.resx</DependentUpon>
<DesignTime>True</DesignTime>
</Compile>
<None Include="..\SharpCompress\SharpCompress.pfx">
<Link>SharpCompress.pfx</Link>
</None>
<None Include="Properties\Settings.settings">
<Generator>SettingsSingleFileGenerator</Generator>
<LastGenOutput>Settings.Designer.cs</LastGenOutput>
</None>
<Compile Include="Properties\Settings.Designer.cs">
<AutoGen>True</AutoGen>
<DependentUpon>Settings.settings</DependentUpon>
<DesignTimeSharedInput>True</DesignTimeSharedInput>
</Compile>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\SharpCompress\SharpCompress.csproj">
<Project>{10A689CF-76A2-4A4F-96E4-553C33398438}</Project>
<Name>SharpCompress</Name>
</ProjectReference>
</ItemGroup>
<Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project>

View File

@@ -1,91 +0,0 @@
using System.IO;
using System.Linq;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using SharpCompress.Archive;
using SharpCompress.Archive.Tar;
using SharpCompress.Common;
namespace SharpCompress.Test
{
[TestClass]
public class TarArchiveTests : ArchiveTests
{
public TarArchiveTests()
{
UseExtensionInsteadOfNameToVerify = true;
}
[TestMethod]
public void TarArchiveStreamRead()
{
ArchiveStreamRead("Tar.tar");
}
[TestMethod]
public void TarArchivePathRead()
{
ArchiveFileRead("Tar.tar");
}
[TestMethod]
public void TarArchivePathReadLongName()
{
string unmodified = Path.Combine(TEST_ARCHIVES_PATH, "very long filename.tar");
using (var archive = TarArchive.Open(unmodified))
{
Assert.AreEqual(2, archive.Entries.Count);
Assert.AreEqual(archive.Entries.Last().FilePath, @"very long filename/very long filename very long filename very long filename very long filename very long filename very long filename very long filename very long filename very long filename very long filename.jpg");
}
}
[TestMethod]
public void Tar_Create_New()
{
string scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.tar");
string unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar");
base.ResetScratch();
using (var archive = TarArchive.Create())
{
archive.AddAllFromDirectory(ORIGINAL_FILES_PATH);
archive.SaveTo(scratchPath, CompressionType.None);
}
CompareArchivesByPath(unmodified, scratchPath);
}
[TestMethod]
public void Tar_Random_Write_Add()
{
string jpg = Path.Combine(ORIGINAL_FILES_PATH, "jpg\\test.jpg");
string scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.mod.tar");
string unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.mod.tar");
string modified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar");
base.ResetScratch();
using (var archive = TarArchive.Open(unmodified))
{
archive.AddEntry("jpg\\test.jpg", jpg);
archive.SaveTo(scratchPath, CompressionType.None);
}
CompareArchivesByPath(modified, scratchPath);
}
[TestMethod]
public void Tar_Random_Write_Remove()
{
string scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.mod.tar");
string modified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.mod.tar");
string unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar");
base.ResetScratch();
using (var archive = TarArchive.Open(unmodified))
{
var entry = archive.Entries.Where(x => x.FilePath.EndsWith("jpg")).Single();
archive.RemoveEntry(entry);
archive.SaveTo(scratchPath, CompressionType.None);
}
CompareArchivesByPath(modified, scratchPath);
}
}
}

View File

@@ -1,92 +0,0 @@
using System.Collections.Generic;
using System.IO;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using SharpCompress.Common;
using SharpCompress.Reader.Tar;
namespace SharpCompress.Test
{
[TestClass]
public class TarReaderTests : ReaderTests
{
public TarReaderTests()
{
UseExtensionInsteadOfNameToVerify = true;
}
[TestMethod]
public void Tar_Reader()
{
Read("Tar.tar", CompressionType.None);
}
[TestMethod]
public void Tar_BZip2_Reader()
{
Read("Tar.tar.bz2", CompressionType.BZip2);
}
[TestMethod]
public void Tar_GZip_Reader()
{
Read("Tar.tar.gz", CompressionType.GZip);
}
[TestMethod]
public void Tar_BZip2_Entry_Stream()
{
ResetScratch();
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2")))
using (var reader = TarReader.Open(stream))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.AreEqual(reader.Entry.CompressionType, CompressionType.BZip2);
using (var entryStream = reader.OpenEntryStream())
{
string file = Path.GetFileName(reader.Entry.FilePath);
string folder = Path.GetDirectoryName(reader.Entry.FilePath);
string destdir = Path.Combine(SCRATCH_FILES_PATH, folder);
if (!Directory.Exists(destdir))
{
Directory.CreateDirectory(destdir);
}
string destinationFileName = Path.Combine(destdir, file);
using (FileStream fs = File.OpenWrite(destinationFileName))
{
entryStream.TransferTo(fs);
}
}
}
}
}
VerifyFiles();
}
[TestMethod]
public void Tar_BZip2_Skip_Entry_Stream()
{
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2")))
using (var reader = TarReader.Open(stream))
{
List<string> names = new List<string>();
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.AreEqual(reader.Entry.CompressionType, CompressionType.BZip2);
using (var entryStream = reader.OpenEntryStream())
{
entryStream.SkipEntry();
names.Add(reader.Entry.FilePath);
}
}
}
Assert.AreEqual(names.Count, 3);
}
}
}
}

View File

@@ -1,34 +0,0 @@
using Microsoft.VisualStudio.TestTools.UnitTesting;
using SharpCompress.Common;
namespace SharpCompress.Test
{
[TestClass]
public class TarWriterTests : WriterTests
{
public TarWriterTests()
: base(ArchiveType.Tar)
{
UseExtensionInsteadOfNameToVerify = true;
}
[TestMethod]
public void Tar_Writer()
{
Write(CompressionType.None, "Tar.noEmptyDirs.tar", "Tar.noEmptyDirs.tar");
}
[TestMethod]
public void Tar_BZip2_Writer()
{
Write(CompressionType.BZip2, "Tar.noEmptyDirs.tar.bz2", "Tar.noEmptyDirs.tar.bz2");
}
[TestMethod]
[ExpectedException(typeof(InvalidFormatException))]
public void Tar_Rar_Write()
{
Write(CompressionType.Rar, "Zip.ppmd.noEmptyDirs.zip", "Zip.ppmd.noEmptyDirs.zip");
}
}
}

View File

@@ -1,151 +0,0 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using SharpCompress.Common;
using SharpCompress.Reader;
namespace SharpCompress.Test
{
public class TestBase
{
protected const string TEST_BASE_PATH = @"C:\Git\sharpcompress";
protected static readonly string TEST_ARCHIVES_PATH = Path.Combine(TEST_BASE_PATH, "TestArchives", "Archives");
protected static readonly string ORIGINAL_FILES_PATH = Path.Combine(TEST_BASE_PATH, "TestArchives", "Original");
protected static readonly string MISC_TEST_FILES_PATH = Path.Combine(TEST_BASE_PATH, "TestArchives", "MiscTest");
protected static readonly string SCRATCH_FILES_PATH = Path.Combine(TEST_BASE_PATH, "TestArchives", "Scratch");
protected static readonly string SCRATCH2_FILES_PATH = Path.Combine(TEST_BASE_PATH, "TestArchives", "Scratch2");
protected static IEnumerable<string> GetRarArchives()
{
yield return Path.Combine(TEST_ARCHIVES_PATH, "Rar.none.rar");
yield return Path.Combine(TEST_ARCHIVES_PATH, "Rar.rar");
yield return Path.Combine(TEST_ARCHIVES_PATH, "Rar.solid.rar");
yield return Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part01.rar");
}
protected static IEnumerable<string> GetZipArchives()
{
yield return Path.Combine(TEST_ARCHIVES_PATH, "Zip.bzip2.dd.zip");
yield return Path.Combine(TEST_ARCHIVES_PATH, "Zip.bzip2.zip");
yield return Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd-.zip");
yield return Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd.zip");
yield return Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.zip");
yield return Path.Combine(TEST_ARCHIVES_PATH, "Zip.lzma.dd.zip");
yield return Path.Combine(TEST_ARCHIVES_PATH, "Zip.lzma.zip");
yield return Path.Combine(TEST_ARCHIVES_PATH, "Zip.none.zip");
yield return Path.Combine(TEST_ARCHIVES_PATH, "Zip.ppmd.dd.zip");
yield return Path.Combine(TEST_ARCHIVES_PATH, "Zip.ppmd.zip");
}
protected static IEnumerable<string> GetTarArchives()
{
yield return Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar");
}
protected static IEnumerable<string> GetTarBz2Archives()
{
yield return Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2");
}
protected static IEnumerable<string> GetTarGzArchives()
{
yield return Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz");
}
public void ResetScratch()
{
if (Directory.Exists(SCRATCH_FILES_PATH))
{
Directory.Delete(SCRATCH_FILES_PATH, true);
}
Directory.CreateDirectory(SCRATCH_FILES_PATH);
if (Directory.Exists(SCRATCH2_FILES_PATH))
{
Directory.Delete(SCRATCH2_FILES_PATH, true);
}
Directory.CreateDirectory(SCRATCH2_FILES_PATH);
}
public void VerifyFiles()
{
if (UseExtensionInsteadOfNameToVerify)
{
VerifyFilesByExtension();
}
else
{
VerifyFilesByName();
}
}
protected void VerifyFilesByName()
{
var extracted =
Directory.EnumerateFiles(SCRATCH_FILES_PATH, "*.*", SearchOption.AllDirectories)
.ToLookup(path => path.Substring(SCRATCH_FILES_PATH.Length));
var original =
Directory.EnumerateFiles(ORIGINAL_FILES_PATH, "*.*", SearchOption.AllDirectories)
.ToLookup(path => path.Substring(ORIGINAL_FILES_PATH.Length));
Assert.AreEqual(extracted.Count, original.Count);
foreach (var orig in original)
{
Assert.IsTrue(extracted.Contains(orig.Key));
CompareFilesByPath(orig.Single(), extracted[orig.Key].Single());
}
}
protected bool UseExtensionInsteadOfNameToVerify { get; set; }
protected void VerifyFilesByExtension()
{
var extracted =
Directory.EnumerateFiles(SCRATCH_FILES_PATH, "*.*", SearchOption.AllDirectories)
.ToLookup(path => Path.GetExtension(path));
var original =
Directory.EnumerateFiles(ORIGINAL_FILES_PATH, "*.*", SearchOption.AllDirectories)
.ToLookup(path => Path.GetExtension(path));
Assert.AreEqual(extracted.Count, original.Count);
foreach (var orig in original)
{
Assert.IsTrue(extracted.Contains(orig.Key));
CompareFilesByPath(orig.Single(), extracted[orig.Key].Single());
}
}
protected void CompareFilesByPath(string file1, string file2)
{
using (var file1Stream = File.OpenRead(file1))
using (var file2Stream = File.OpenRead(file2))
{
Assert.AreEqual(file1Stream.Length, file2Stream.Length);
int byte1 = 0;
int byte2 = 0;
while (byte1 != -1)
{
byte1 = file1Stream.ReadByte();
byte2 = file2Stream.ReadByte();
Assert.AreEqual(byte1, byte2);
}
}
}
protected void CompareArchivesByPath(string file1, string file2)
{
using (var archive1 = ReaderFactory.Open(File.OpenRead(file1), Options.None))
using (var archive2 = ReaderFactory.Open(File.OpenRead(file2), Options.None))
{
while (archive1.MoveToNextEntry())
{
Assert.IsTrue(archive2.MoveToNextEntry());
Assert.AreEqual(archive1.Entry.FilePath, archive2.Entry.FilePath);
}
Assert.IsFalse(archive2.MoveToNextEntry());
}
}
}
}

View File

@@ -1,85 +0,0 @@
using System.IO;
namespace SharpCompress.Test
{
public class TestStream : Stream
{
private Stream stream;
private bool read;
private bool write;
private bool seek;
public TestStream(Stream stream)
: this(stream, true, true, true)
{
}
public bool IsDisposed { get; private set; }
public TestStream(Stream stream, bool read, bool write, bool seek)
{
this.stream = stream;
this.read = read;
this.write = write;
this.seek = seek;
}
protected override void Dispose(bool disposing)
{
base.Dispose(disposing);
stream.Dispose();
IsDisposed = true;
}
public override bool CanRead
{
get { return read; }
}
public override bool CanSeek
{
get { return seek; }
}
public override bool CanWrite
{
get { return write; }
}
public override void Flush()
{
stream.Flush();
}
public override long Length
{
get { return stream.Length; }
}
public override long Position
{
get { return stream.Position; }
set { stream.Position = value; }
}
public override int Read(byte[] buffer, int offset, int count)
{
return stream.Read(buffer, offset, count);
}
public override long Seek(long offset, SeekOrigin origin)
{
return stream.Seek(offset, origin);
}
public override void SetLength(long value)
{
stream.SetLength(value);
}
public override void Write(byte[] buffer, int offset, int count)
{
stream.Write(buffer, offset, count);
}
}
}

View File

@@ -1,36 +0,0 @@
using System.IO;
using SharpCompress.Common;
using SharpCompress.Reader;
using SharpCompress.Writer;
namespace SharpCompress.Test
{
public class WriterTests : TestBase
{
private ArchiveType type;
protected WriterTests(ArchiveType type)
{
this.type = type;
}
protected void Write(CompressionType compressionType, string archive, string archiveToVerifyAgainst)
{
ResetScratch();
using (Stream stream = File.OpenWrite(Path.Combine(SCRATCH2_FILES_PATH, archive)))
using (var writer = WriterFactory.Open(stream, type, compressionType))
{
writer.WriteAll(ORIGINAL_FILES_PATH, "*", SearchOption.AllDirectories);
}
CompareArchivesByPath(Path.Combine(SCRATCH2_FILES_PATH, archive),
Path.Combine(TEST_ARCHIVES_PATH, archiveToVerifyAgainst));
using (Stream stream = File.OpenRead(Path.Combine(SCRATCH2_FILES_PATH, archive)))
using (var reader = ReaderFactory.Open(stream))
{
reader.WriteAllToDirectory(SCRATCH_FILES_PATH, ExtractOptions.ExtractFullPath);
}
VerifyFiles();
}
}
}

View File

@@ -1,310 +0,0 @@
using System.IO;
using System.Linq;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using SharpCompress.Archive;
using SharpCompress.Archive.Zip;
using SharpCompress.Common;
using SharpCompress.Writer;
namespace SharpCompress.Test
{
[TestClass]
public class ZipArchiveTests : ArchiveTests
{
public ZipArchiveTests()
{
UseExtensionInsteadOfNameToVerify = true;
}
[TestMethod]
public void Zip_ZipX_ArchiveStreamRead()
{
ArchiveStreamRead("Zip.zipx");
}
[TestMethod]
public void Zip_BZip2_Streamed_ArchiveStreamRead()
{
ArchiveStreamRead("Zip.bzip2.dd.zip");
}
[TestMethod]
public void Zip_BZip2_ArchiveStreamRead()
{
ArchiveStreamRead("Zip.bzip2.zip");
}
[TestMethod]
public void Zip_Deflate_Streamed2_ArchiveStreamRead()
{
ArchiveStreamRead("Zip.deflate.dd-.zip");
}
[TestMethod]
public void Zip_Deflate_Streamed_ArchiveStreamRead()
{
ArchiveStreamRead("Zip.deflate.dd.zip");
}
[TestMethod]
public void Zip_Deflate_ArchiveStreamRead()
{
ArchiveStreamRead("Zip.deflate.zip");
}
[TestMethod]
public void Zip_LZMA_Streamed_ArchiveStreamRead()
{
ArchiveStreamRead("Zip.lzma.dd.zip");
}
[TestMethod]
public void Zip_LZMA_ArchiveStreamRead()
{
ArchiveStreamRead("Zip.lzma.zip");
}
[TestMethod]
public void Zip_PPMd_Streamed_ArchiveStreamRead()
{
ArchiveStreamRead("Zip.ppmd.dd.zip");
}
[TestMethod]
public void Zip_PPMd_ArchiveStreamRead()
{
ArchiveStreamRead("Zip.ppmd.zip");
}
[TestMethod]
public void Zip_None_ArchiveStreamRead()
{
ArchiveStreamRead("Zip.none.zip");
}
[TestMethod]
public void Zip_BZip2_Streamed_ArchiveFileRead()
{
ArchiveFileRead("Zip.bzip2.dd.zip");
}
[TestMethod]
public void Zip_BZip2_ArchiveFileRead()
{
ArchiveFileRead("Zip.bzip2.zip");
}
[TestMethod]
public void Zip_Deflate_Streamed2_ArchiveFileRead()
{
ArchiveFileRead("Zip.deflate.dd-.zip");
}
[TestMethod]
public void Zip_Deflate_Streamed_ArchiveFileRead()
{
ArchiveFileRead("Zip.deflate.dd.zip");
}
[TestMethod]
public void Zip_Deflate_ArchiveFileRead()
{
ArchiveFileRead("Zip.deflate.zip");
}
[TestMethod]
public void Zip_LZMA_Streamed_ArchiveFileRead()
{
ArchiveFileRead("Zip.lzma.dd.zip");
}
[TestMethod]
public void Zip_LZMA_ArchiveFileRead()
{
ArchiveFileRead("Zip.lzma.zip");
}
[TestMethod]
public void Zip_PPMd_Streamed_ArchiveFileRead()
{
ArchiveFileRead("Zip.ppmd.dd.zip");
}
[TestMethod]
public void Zip_PPMd_ArchiveFileRead()
{
ArchiveFileRead("Zip.ppmd.zip");
}
[TestMethod]
public void Zip_None_ArchiveFileRead()
{
ArchiveFileRead("Zip.none.zip");
}
[TestMethod]
public void Zip_Random_Write_Remove()
{
string scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Zip.deflate.mod.zip");
string unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.noEmptyDirs.zip");
string modified = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.mod.zip");
base.ResetScratch();
using (var archive = ZipArchive.Open(unmodified))
{
var entry = archive.Entries.Where(x => x.FilePath.EndsWith("jpg")).Single();
archive.RemoveEntry(entry);
archive.SaveTo(scratchPath, CompressionType.Deflate);
}
CompareArchivesByPath(modified, scratchPath);
}
[TestMethod]
[ExpectedException(typeof(ArchiveException))]
public void Zip_Random_Write_Remove_Fail()
{
string scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Zip.deflate.mod.zip");
string unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.noEmptyDirs.zip");
string modified = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.mod.zip");
base.ResetScratch();
using (var stream = File.OpenRead(unmodified))
using (var archive = ZipArchive.Open(stream))
{
var entry = archive.Entries.Where(x => x.FilePath.EndsWith("jpg")).Single();
archive.RemoveEntry(entry);
archive.SaveTo(scratchPath, CompressionType.Deflate);
}
CompareArchivesByPath(modified, scratchPath);
}
[TestMethod]
public void Zip_Random_Write_Add()
{
string jpg = Path.Combine(ORIGINAL_FILES_PATH, "jpg\\test.jpg");
string scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Zip.deflate.mod.zip");
string unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.mod.zip");
string modified = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.mod2.zip");
base.ResetScratch();
using (var archive = ZipArchive.Open(unmodified))
{
archive.AddEntry("jpg\\test.jpg", jpg);
archive.SaveTo(scratchPath, CompressionType.Deflate);
}
CompareArchivesByPath(modified, scratchPath);
}
[TestMethod]
public void Zip_Create_New()
{
string scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Zip.deflate.noEmptyDirs.zip");
string unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.noEmptyDirs.zip");
base.ResetScratch();
using (var archive = ZipArchive.Create())
{
archive.AddAllFromDirectory(ORIGINAL_FILES_PATH);
archive.SaveTo(scratchPath, CompressionType.Deflate);
}
CompareArchivesByPath(unmodified, scratchPath);
}
[TestMethod]
public void Zip_Deflate_WinzipAES_Read()
{
ResetScratch();
using (var reader = ZipArchive.Open(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.WinzipAES.zip"), "test"))
{
foreach (var entry in reader.Entries.Where(x => !x.IsDirectory))
{
entry.WriteToDirectory(SCRATCH_FILES_PATH, ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
VerifyFiles();
}
[TestMethod]
public void Zip_BZip2_Pkware_Read()
{
ResetScratch();
using (var reader = ZipArchive.Open(Path.Combine(TEST_ARCHIVES_PATH, "Zip.bzip2.pkware.zip"), "test"))
{
foreach (var entry in reader.Entries.Where(x => !x.IsDirectory))
{
entry.WriteToDirectory(SCRATCH_FILES_PATH, ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
VerifyFiles();
}
[TestMethod]
public void Zip_Random_Entry_Access()
{
string unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.noEmptyDirs.zip");
base.ResetScratch();
ZipArchive a = ZipArchive.Open(unmodified);
int count = 0;
foreach (var e in a.Entries)
count++;
//Prints 3
Assert.AreEqual(count, 3);
a.Dispose();
a = ZipArchive.Open(unmodified);
int count2 = 0;
foreach (var e in a.Entries)
{
count2++;
//Stop at last file
if (count2 == count)
{
var s = e.OpenEntryStream();
s.ReadByte(); //Actually access stream
s.Dispose();
break;
}
}
int count3 = 0;
foreach (var e in a.Entries)
count3++;
Assert.AreEqual(count3, 3);
}
class NonSeekableMemoryStream : MemoryStream
{
public override bool CanSeek
{
get
{
return false;
}
}
}
[TestMethod]
public void TestSharpCompressWithEmptyStream()
{
MemoryStream stream = new NonSeekableMemoryStream();
using (IWriter zipWriter = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.Deflate))
{
zipWriter.Write("foo.txt", new MemoryStream(new byte[0]));
zipWriter.Write("foo2.txt", new MemoryStream(new byte[10]));
}
stream = new MemoryStream(stream.ToArray());
File.WriteAllBytes("foo.zip", stream.ToArray());
using (var zipArchive = ZipArchive.Open(stream))
{
foreach(var entry in zipArchive.Entries)
{
using (var entryStream = entry.OpenEntryStream())
{
MemoryStream tempStream = new MemoryStream();
const int bufSize = 0x1000;
byte[] buf = new byte[bufSize];
int bytesRead = 0;
while ((bytesRead = entryStream.Read(buf, 0, bufSize)) > 0)
tempStream.Write(buf, 0, bytesRead);
}
}
}
}
}
}

View File

@@ -1,224 +0,0 @@
using System;
using System.IO;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using SharpCompress.Common;
using SharpCompress.Reader;
using SharpCompress.Reader.Zip;
using SharpCompress.Writer;
namespace SharpCompress.Test
{
[TestClass]
public class ZipReaderTests : ReaderTests
{
public ZipReaderTests()
{
UseExtensionInsteadOfNameToVerify = true;
}
[TestMethod]
public void Zip_ZipX_Streamed_Read()
{
Read("Zip.Zipx", CompressionType.LZMA);
}
[TestMethod]
public void Zip_BZip2_Streamed_Read()
{
Read("Zip.bzip2.dd.zip", CompressionType.BZip2);
}
[TestMethod]
public void Zip_BZip2_Read()
{
Read("Zip.bzip2.zip", CompressionType.BZip2);
}
[TestMethod]
public void Zip_Deflate_Streamed2_Read()
{
Read("Zip.deflate.dd-.zip", CompressionType.Deflate);
}
[TestMethod]
public void Zip_Deflate_Streamed_Read()
{
Read("Zip.deflate.dd.zip", CompressionType.Deflate);
}
[TestMethod]
public void Zip_Deflate_Read()
{
Read("Zip.deflate.zip", CompressionType.Deflate);
}
[TestMethod]
public void Zip_LZMA_Streamed_Read()
{
Read("Zip.lzma.dd.zip", CompressionType.LZMA);
}
[TestMethod]
public void Zip_LZMA_Read()
{
Read("Zip.lzma.zip", CompressionType.LZMA);
}
[TestMethod]
public void Zip_PPMd_Streamed_Read()
{
Read("Zip.ppmd.dd.zip", CompressionType.PPMd);
}
[TestMethod]
public void Zip_PPMd_Read()
{
Read("Zip.ppmd.zip", CompressionType.PPMd);
}
[TestMethod]
public void Zip_None_Read()
{
Read("Zip.none.zip", CompressionType.None);
}
[TestMethod]
public void Zip_Deflate_NoEmptyDirs_Read()
{
Read("Zip.deflate.noEmptyDirs.zip", CompressionType.Deflate);
}
[TestMethod]
public void Zip_BZip2_PkwareEncryption_Read()
{
ResetScratch();
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.bzip2.pkware.zip")))
using (var reader = ZipReader.Open(stream, "test"))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.AreEqual(reader.Entry.CompressionType, CompressionType.BZip2);
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH, ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
}
VerifyFiles();
}
[TestMethod]
public void Zip_Reader_Disposal_Test()
{
ResetScratch();
using (TestStream stream = new TestStream(File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd.zip"))))
{
using (var reader = ReaderFactory.Open(stream, Options.None))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH,
ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
}
Assert.IsTrue(stream.IsDisposed);
}
}
[TestMethod]
public void Zip_Reader_Disposal_Test2()
{
ResetScratch();
using (TestStream stream = new TestStream(File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd.zip"))))
{
var reader = ReaderFactory.Open(stream, Options.None);
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH,
ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
Assert.IsFalse(stream.IsDisposed);
}
}
[TestMethod]
[ExpectedException(typeof(NotSupportedException))]
public void Zip_LZMA_WinzipAES_Read()
{
ResetScratch();
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.lzma.winzipaes.zip")))
using (var reader = ZipReader.Open(stream, "test"))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.AreEqual(reader.Entry.CompressionType, CompressionType.Unknown);
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH, ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
}
VerifyFiles();
}
[TestMethod]
public void Zip_Deflate_WinzipAES_Read()
{
ResetScratch();
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.WinzipAES.zip")))
using (var reader = ZipReader.Open(stream, "test"))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.AreEqual(reader.Entry.CompressionType, CompressionType.Unknown);
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH,
ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
}
VerifyFiles();
}
class NonSeekableMemoryStream : MemoryStream
{
public override bool CanSeek
{
get
{
return false;
}
}
}
[TestMethod]
public void TestSharpCompressWithEmptyStream()
{
MemoryStream stream = new NonSeekableMemoryStream();
using (IWriter zipWriter = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.Deflate))
{
zipWriter.Write("foo.txt", new MemoryStream(new byte[0]));
zipWriter.Write("foo2.txt", new MemoryStream(new byte[10]));
}
stream = new MemoryStream(stream.ToArray());
File.WriteAllBytes("foo.zip", stream.ToArray());
using (IReader zipReader = ZipReader.Open(stream))
{
while (zipReader.MoveToNextEntry())
{
using (EntryStream entry = zipReader.OpenEntryStream())
{
MemoryStream tempStream = new MemoryStream();
const int bufSize = 0x1000;
byte[] buf = new byte[bufSize];
int bytesRead = 0;
while ((bytesRead = entry.Read(buf, 0, bufSize)) > 0)
tempStream.Write(buf, 0, bytesRead);
}
}
}
}
}
}

View File

@@ -1,53 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003" ToolsVersion="4.0">
<PropertyGroup>
<!-- The configuration and platform will be used to determine which
assemblies to include from solution and project documentation
sources -->
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<SchemaVersion>2.0</SchemaVersion>
<ProjectGuid>{a226cac9-f2c4-4cc2-94f9-163b3e221817}</ProjectGuid>
<SHFBSchemaVersion>1.9.3.0</SHFBSchemaVersion>
<!-- AssemblyName, Name, and RootNamespace are not used by SHFB but Visual
Studio adds them anyway -->
<AssemblyName>Documentation</AssemblyName>
<RootNamespace>Documentation</RootNamespace>
<Name>Documentation</Name>
<!-- SHFB properties -->
<OutputPath>.\Help\</OutputPath>
<HtmlHelpName>sharpcompress</HtmlHelpName>
<Language>en-US</Language>
<RootNamespaceTitle>SharpCompress</RootNamespaceTitle>
<SandcastlePath>..\..\Program Files (x86)\Sandcastle\</SandcastlePath>
<DocumentationSources>
<DocumentationSource sourceFile="bin\SharpCompress.dll" />
<DocumentationSource sourceFile="bin\SharpCompress.xml" />
</DocumentationSources>
<HelpTitle>SharpCompress</HelpTitle>
<PresentationStyle>Prototype</PresentationStyle>
<HelpFileFormat>HtmlHelp1, Website</HelpFileFormat>
<MissingTags>AutoDocumentCtors, AutoDocumentDispose</MissingTags>
</PropertyGroup>
<!-- There are no properties for these groups. AnyCPU needs to appear in
order for Visual Studio to perform the build. The others are optional
common platform types that may appear. -->
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|x86' ">
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|x86' ">
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|x64' ">
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|x64' ">
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|Win32' ">
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|Win32' ">
</PropertyGroup>
<!-- Import the SHFB build targets -->
<Import Project="$(SHFBROOT)\SandcastleHelpFileBuilder.targets" />
</Project>

Binary file not shown.

View File

@@ -1,78 +1,38 @@
Microsoft Visual Studio Solution File, Format Version 11.00
# Visual Studio 2010
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SharpCompress", "SharpCompress\SharpCompress.csproj", "{10A689CF-76A2-4A4F-96E4-553C33398438}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SharpCompress.Test", "SharpCompress.Test\SharpCompress.Test.csproj", "{15679D7A-F22C-4943-87FF-BF5C76C4A6FD}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SharpCompress.3.5", "SharpCompress\SharpCompress.3.5.csproj", "{43F219EC-21D3-4EA8-8379-151AF9580CB8}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SharpCompress.Silverlight", "SharpCompress\SharpCompress.Silverlight.csproj", "{185ED218-A74F-4F57-85A5-30647495E615}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SharpCompress.WP7", "SharpCompress\SharpCompress.WP7.csproj", "{7FA7D133-1417-4F85-9998-4C618AC8FEDA}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Debug|Mixed Platforms = Debug|Mixed Platforms
Debug|x86 = Debug|x86
Release|Any CPU = Release|Any CPU
Release|Mixed Platforms = Release|Mixed Platforms
Release|x86 = Release|x86
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{10A689CF-76A2-4A4F-96E4-553C33398438}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{10A689CF-76A2-4A4F-96E4-553C33398438}.Debug|Any CPU.Build.0 = Debug|Any CPU
{10A689CF-76A2-4A4F-96E4-553C33398438}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU
{10A689CF-76A2-4A4F-96E4-553C33398438}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU
{10A689CF-76A2-4A4F-96E4-553C33398438}.Debug|x86.ActiveCfg = Debug|Any CPU
{10A689CF-76A2-4A4F-96E4-553C33398438}.Release|Any CPU.ActiveCfg = Release|Any CPU
{10A689CF-76A2-4A4F-96E4-553C33398438}.Release|Any CPU.Build.0 = Release|Any CPU
{10A689CF-76A2-4A4F-96E4-553C33398438}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU
{10A689CF-76A2-4A4F-96E4-553C33398438}.Release|Mixed Platforms.Build.0 = Release|Any CPU
{10A689CF-76A2-4A4F-96E4-553C33398438}.Release|x86.ActiveCfg = Release|Any CPU
{15679D7A-F22C-4943-87FF-BF5C76C4A6FD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{15679D7A-F22C-4943-87FF-BF5C76C4A6FD}.Debug|Any CPU.Build.0 = Debug|Any CPU
{15679D7A-F22C-4943-87FF-BF5C76C4A6FD}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU
{15679D7A-F22C-4943-87FF-BF5C76C4A6FD}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU
{15679D7A-F22C-4943-87FF-BF5C76C4A6FD}.Debug|x86.ActiveCfg = Debug|Any CPU
{15679D7A-F22C-4943-87FF-BF5C76C4A6FD}.Release|Any CPU.ActiveCfg = Release|Any CPU
{15679D7A-F22C-4943-87FF-BF5C76C4A6FD}.Release|Any CPU.Build.0 = Release|Any CPU
{15679D7A-F22C-4943-87FF-BF5C76C4A6FD}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU
{15679D7A-F22C-4943-87FF-BF5C76C4A6FD}.Release|Mixed Platforms.Build.0 = Release|Any CPU
{15679D7A-F22C-4943-87FF-BF5C76C4A6FD}.Release|x86.ActiveCfg = Release|Any CPU
{43F219EC-21D3-4EA8-8379-151AF9580CB8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{43F219EC-21D3-4EA8-8379-151AF9580CB8}.Debug|Any CPU.Build.0 = Debug|Any CPU
{43F219EC-21D3-4EA8-8379-151AF9580CB8}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU
{43F219EC-21D3-4EA8-8379-151AF9580CB8}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU
{43F219EC-21D3-4EA8-8379-151AF9580CB8}.Debug|x86.ActiveCfg = Debug|Any CPU
{43F219EC-21D3-4EA8-8379-151AF9580CB8}.Release|Any CPU.ActiveCfg = Release|Any CPU
{43F219EC-21D3-4EA8-8379-151AF9580CB8}.Release|Any CPU.Build.0 = Release|Any CPU
{43F219EC-21D3-4EA8-8379-151AF9580CB8}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU
{43F219EC-21D3-4EA8-8379-151AF9580CB8}.Release|Mixed Platforms.Build.0 = Release|Any CPU
{43F219EC-21D3-4EA8-8379-151AF9580CB8}.Release|x86.ActiveCfg = Release|Any CPU
{185ED218-A74F-4F57-85A5-30647495E615}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{185ED218-A74F-4F57-85A5-30647495E615}.Debug|Any CPU.Build.0 = Debug|Any CPU
{185ED218-A74F-4F57-85A5-30647495E615}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU
{185ED218-A74F-4F57-85A5-30647495E615}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU
{185ED218-A74F-4F57-85A5-30647495E615}.Debug|x86.ActiveCfg = Debug|Any CPU
{185ED218-A74F-4F57-85A5-30647495E615}.Release|Any CPU.ActiveCfg = Release|Any CPU
{185ED218-A74F-4F57-85A5-30647495E615}.Release|Any CPU.Build.0 = Release|Any CPU
{185ED218-A74F-4F57-85A5-30647495E615}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU
{185ED218-A74F-4F57-85A5-30647495E615}.Release|Mixed Platforms.Build.0 = Release|Any CPU
{185ED218-A74F-4F57-85A5-30647495E615}.Release|x86.ActiveCfg = Release|Any CPU
{7FA7D133-1417-4F85-9998-4C618AC8FEDA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{7FA7D133-1417-4F85-9998-4C618AC8FEDA}.Debug|Any CPU.Build.0 = Debug|Any CPU
{7FA7D133-1417-4F85-9998-4C618AC8FEDA}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU
{7FA7D133-1417-4F85-9998-4C618AC8FEDA}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU
{7FA7D133-1417-4F85-9998-4C618AC8FEDA}.Debug|x86.ActiveCfg = Debug|Any CPU
{7FA7D133-1417-4F85-9998-4C618AC8FEDA}.Release|Any CPU.ActiveCfg = Release|Any CPU
{7FA7D133-1417-4F85-9998-4C618AC8FEDA}.Release|Any CPU.Build.0 = Release|Any CPU
{7FA7D133-1417-4F85-9998-4C618AC8FEDA}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU
{7FA7D133-1417-4F85-9998-4C618AC8FEDA}.Release|Mixed Platforms.Build.0 = Release|Any CPU
{7FA7D133-1417-4F85-9998-4C618AC8FEDA}.Release|x86.ActiveCfg = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
EndGlobal
Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio 15
VisualStudioVersion = 15.0.26430.6
MinimumVisualStudioVersion = 10.0.40219.1
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{F18F1765-4A02-42FD-9BEF-F0E2FCBD9D17}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{3C5BE746-03E5-4895-9988-0B57F162F86C}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "tests", "tests", "{0F0901FF-E8D9-426A-B5A2-17C7F47C1529}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SharpCompress", "src\SharpCompress\SharpCompress.csproj", "{FD19DDD8-72B2-4024-8665-0D1F7A2AA998}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SharpCompress.Test", "tests\SharpCompress.Test\SharpCompress.Test.csproj", "{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{FD19DDD8-72B2-4024-8665-0D1F7A2AA998}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{FD19DDD8-72B2-4024-8665-0D1F7A2AA998}.Debug|Any CPU.Build.0 = Debug|Any CPU
{FD19DDD8-72B2-4024-8665-0D1F7A2AA998}.Release|Any CPU.ActiveCfg = Release|Any CPU
{FD19DDD8-72B2-4024-8665-0D1F7A2AA998}.Release|Any CPU.Build.0 = Release|Any CPU
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}.Debug|Any CPU.Build.0 = Debug|Any CPU
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}.Release|Any CPU.ActiveCfg = Release|Any CPU
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(NestedProjects) = preSolution
{FD19DDD8-72B2-4024-8665-0D1F7A2AA998} = {3C5BE746-03E5-4895-9988-0B57F162F86C}
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F} = {0F0901FF-E8D9-426A-B5A2-17C7F47C1529}
EndGlobalSection
EndGlobal

View File

@@ -0,0 +1,129 @@
<wpf:ResourceDictionary xml:space="preserve" xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml" xmlns:s="clr-namespace:System;assembly=mscorlib" xmlns:ss="urn:shemas-jetbrains-com:settings-storage-xaml" xmlns:wpf="http://schemas.microsoft.com/winfx/2006/xaml/presentation">
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=ArgumentsStyleNamedExpression/@EntryIndexedValue">DO_NOT_SHOW</s:String>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=ArrangeBraces_005Fdowhile/@EntryIndexedValue">ERROR</s:String>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=ArrangeBraces_005Ffixed/@EntryIndexedValue">ERROR</s:String>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=ArrangeBraces_005Ffor/@EntryIndexedValue">ERROR</s:String>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=ArrangeBraces_005Fforeach/@EntryIndexedValue">ERROR</s:String>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=ArrangeBraces_005Fifelse/@EntryIndexedValue">ERROR</s:String>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=ArrangeBraces_005Flock/@EntryIndexedValue">ERROR</s:String>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=ArrangeBraces_005Fusing/@EntryIndexedValue">ERROR</s:String>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=ArrangeBraces_005Fwhile/@EntryIndexedValue">ERROR</s:String>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=MethodSupportsCancellation/@EntryIndexedValue">ERROR</s:String>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=RedundantExplicitParamsArrayCreation/@EntryIndexedValue">DO_NOT_SHOW</s:String>
<s:String x:Key="/Default/CodeStyle/CodeCleanup/Profiles/=Basic_0020Clean/@EntryIndexedValue">&lt;?xml version="1.0" encoding="utf-16"?&gt;&lt;Profile name="Basic Clean"&gt;&lt;CSOptimizeUsings&gt;&lt;OptimizeUsings&gt;True&lt;/OptimizeUsings&gt;&lt;EmbraceInRegion&gt;False&lt;/EmbraceInRegion&gt;&lt;RegionName&gt;&lt;/RegionName&gt;&lt;/CSOptimizeUsings&gt;&lt;CSShortenReferences&gt;True&lt;/CSShortenReferences&gt;&lt;CSRemoveCodeRedundancies&gt;True&lt;/CSRemoveCodeRedundancies&gt;&lt;CSMakeFieldReadonly&gt;True&lt;/CSMakeFieldReadonly&gt;&lt;CSCodeStyleAttributes ArrangeTypeAccessModifier="False" ArrangeTypeMemberAccessModifier="False" SortModifiers="False" RemoveRedundantParentheses="False" AddMissingParentheses="False" ArrangeBraces="True" ArrangeAttributes="False" ArrangeArgumentsStyle="False" /&gt;&lt;RemoveCodeRedundancies&gt;True&lt;/RemoveCodeRedundancies&gt;&lt;CSUseAutoProperty&gt;True&lt;/CSUseAutoProperty&gt;&lt;CSMakeAutoPropertyGetOnly&gt;True&lt;/CSMakeAutoPropertyGetOnly&gt;&lt;CSReformatCode&gt;True&lt;/CSReformatCode&gt;&lt;/Profile&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/CodeCleanup/SilentCleanupProfile/@EntryValue">Basic Clean</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/APPLY_ON_COMPLETION/@EntryValue">True</s:Boolean>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/ARGUMENTS_NAMED/@EntryValue">Named</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_FOR/@EntryValue">Required</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_FOREACH/@EntryValue">Required</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_IFELSE/@EntryValue">Required</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_WHILE/@EntryValue">Required</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_FIRST_ARG_BY_PAREN/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_LINQ_QUERY/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_ARGUMENT/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_ARRAY_AND_OBJECT_INITIALIZER/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_CALLS_CHAIN/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_EXPRESSION/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_EXTENDS_LIST/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_FOR_STMT/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_PARAMETER/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTIPLE_DECLARATION/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTLINE_TYPE_PARAMETER_CONSTRAINS/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTLINE_TYPE_PARAMETER_LIST/@EntryValue">True</s:Boolean>
<s:Int64 x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/BLANK_LINES_AFTER_START_COMMENT/@EntryValue">0</s:Int64>
<s:Int64 x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/BLANK_LINES_BEFORE_SINGLE_LINE_COMMENT/@EntryValue">1</s:Int64>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_ATTRIBUTE_STYLE/@EntryValue">SEPARATE</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_FIXED_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_FOR_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_FOREACH_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_IFELSE_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_USING_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_WHILE_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/INDENT_ANONYMOUS_METHOD_BLOCK/@EntryValue">True</s:Boolean>
<s:Int64 x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/KEEP_BLANK_LINES_IN_CODE/@EntryValue">1</s:Int64>
<s:Int64 x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/KEEP_BLANK_LINES_IN_DECLARATIONS/@EntryValue">1</s:Int64>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_ACCESSOR_ATTRIBUTE_ON_SAME_LINE_EX/@EntryValue">NEVER</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_ACCESSORHOLDER_ATTRIBUTE_ON_SAME_LINE_EX/@EntryValue">NEVER</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_CONSTRUCTOR_INITIALIZER_ON_SAME_LINE/@EntryValue">False</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_FIELD_ATTRIBUTE_ON_SAME_LINE/@EntryValue">False</s:Boolean>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_FIELD_ATTRIBUTE_ON_SAME_LINE_EX/@EntryValue">NEVER</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_ACCESSORHOLDER_ON_SINGLE_LINE/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_ACCESSOR_ATTRIBUTE_ON_SAME_LINE/@EntryValue">False</s:Boolean>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_EMBEDDED_STATEMENT_ON_SAME_LINE/@EntryValue">NEVER</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_INITIALIZER_ON_SINGLE_LINE/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_WHILE_ON_NEW_LINE/@EntryValue">True</s:Boolean>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SIMPLE_EMBEDDED_STATEMENT_STYLE/@EntryValue">LINE_BREAK</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SPACE_AFTER_TYPECAST_PARENTHESES/@EntryValue">False</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SPACE_AROUND_ARROW_OP/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SPACE_AROUND_MULTIPLICATIVE_OP/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SPACE_BEFORE_SIZEOF_PARENTHESES/@EntryValue">False</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SPACE_BEFORE_TYPEOF_PARENTHESES/@EntryValue">False</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/STICK_COMMENT/@EntryValue">False</s:Boolean>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_ARGUMENTS_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_ARRAY_INITIALIZER_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_EXTENDS_LIST_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_LINES/@EntryValue">False</s:Boolean>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_PARAMETERS_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForBuiltInTypes/@EntryValue">UseVarWhenEvident</s:String>
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForOtherTypes/@EntryValue">UseVarWhenEvident</s:String>
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForSimpleTypes/@EntryValue">UseVarWhenEvident</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateInstanceFields/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="_" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateStaticFields/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateStaticReadonly/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=StaticReadonly/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FCONSTANT/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FFUNCTION/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FVARIABLE/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FCLASS/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FCONSTRUCTOR/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FFUNCTION/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FGLOBAL_005FVARIABLE/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FLABEL/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FLOCAL_005FCONSTRUCTOR/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FLOCAL_005FVARIABLE/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FOBJECT_005FPROPERTY_005FOF_005FFUNCTION/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FPARAMETER/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FCLASS/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FENUM/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FENUM_005FMEMBER/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FINTERFACE/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="I" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FMODULE/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FMODULE_005FEXPORTED/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FMODULE_005FLOCAL/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPRIVATE_005FMEMBER_005FACCESSOR/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPRIVATE_005FSTATIC_005FTYPE_005FFIELD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPRIVATE_005FTYPE_005FFIELD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPRIVATE_005FTYPE_005FMETHOD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPROTECTED_005FMEMBER_005FACCESSOR/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPROTECTED_005FSTATIC_005FTYPE_005FFIELD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPROTECTED_005FTYPE_005FFIELD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPROTECTED_005FTYPE_005FMETHOD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPUBLIC_005FMEMBER_005FACCESSOR/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPUBLIC_005FSTATIC_005FTYPE_005FFIELD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPUBLIC_005FTYPE_005FFIELD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPUBLIC_005FTYPE_005FMETHOD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FTYPE_005FPARAMETER/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="T" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/WebNaming/UserRules/=ASP_005FFIELD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/WebNaming/UserRules/=ASP_005FHTML_005FCONTROL/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/WebNaming/UserRules/=ASP_005FTAG_005FNAME/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/WebNaming/UserRules/=ASP_005FTAG_005FPREFIX/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/XamlNaming/UserRules/=NAMESPACE_005FALIAS/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/XamlNaming/UserRules/=XAML_005FFIELD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/XamlNaming/UserRules/=XAML_005FRESOURCE/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpAttributeForSingleLineMethodUpgrade/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpKeepExistingMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpPlaceEmbeddedOnSameLineMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpRenamePlacementToArrangementMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpUseContinuousIndentInsideBracesMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EAddAccessorOwnerDeclarationBracesMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002ECSharpPlaceAttributeOnSameLineMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateBlankLinesAroundFieldToBlankLinesAroundProperty/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateThisQualifierSettings/@EntryIndexedValue">True</s:Boolean></wpf:ResourceDictionary>

BIN
SharpCompress.snk Normal file

Binary file not shown.

View File

@@ -1,90 +0,0 @@
using System.IO;
using SharpCompress.Common;
namespace SharpCompress.Archive
{
public static class AbstractWritableArchiveExtensions
{
public static void SaveTo<TEntry, TVolume>(this AbstractWritableArchive<TEntry, TVolume> writableArchive,
Stream stream, CompressionType compressionType)
where TEntry : IArchiveEntry
where TVolume : IVolume
{
writableArchive.SaveTo(stream, new CompressionInfo {Type = compressionType});
}
#if !PORTABLE
public static void AddEntry<TEntry, TVolume>(this AbstractWritableArchive<TEntry, TVolume> writableArchive,
string entryPath, string filePath)
where TEntry : IArchiveEntry
where TVolume : IVolume
{
var fileInfo = new FileInfo(filePath);
if (!fileInfo.Exists)
{
throw new FileNotFoundException("Could not AddEntry: " + filePath);
}
writableArchive.AddEntry(entryPath, new FileInfo(filePath).OpenRead(), true, fileInfo.Length,
fileInfo.LastWriteTime);
}
public static void SaveTo<TEntry, TVolume>(this AbstractWritableArchive<TEntry, TVolume> writableArchive,
string filePath, CompressionType compressionType)
where TEntry : IArchiveEntry
where TVolume : IVolume
{
writableArchive.SaveTo(new FileInfo(filePath), new CompressionInfo {Type = compressionType});
}
public static void SaveTo<TEntry, TVolume>(this AbstractWritableArchive<TEntry, TVolume> writableArchive,
FileInfo fileInfo, CompressionType compressionType)
where TEntry : IArchiveEntry
where TVolume : IVolume
{
using (var stream = fileInfo.Open(FileMode.Create, FileAccess.Write))
{
writableArchive.SaveTo(stream, new CompressionInfo {Type = compressionType});
}
}
public static void SaveTo<TEntry, TVolume>(this AbstractWritableArchive<TEntry, TVolume> writableArchive,
string filePath, CompressionInfo compressionInfo)
where TEntry : IArchiveEntry
where TVolume : IVolume
{
writableArchive.SaveTo(new FileInfo(filePath), compressionInfo);
}
public static void SaveTo<TEntry, TVolume>(this AbstractWritableArchive<TEntry, TVolume> writableArchive,
FileInfo fileInfo, CompressionInfo compressionInfo)
where TEntry : IArchiveEntry
where TVolume : IVolume
{
using (var stream = fileInfo.Open(FileMode.Create, FileAccess.Write))
{
writableArchive.SaveTo(stream, compressionInfo);
}
}
public static void AddAllFromDirectory<TEntry, TVolume>(
this AbstractWritableArchive<TEntry, TVolume> writableArchive,
string filePath, string searchPattern = "*.*", SearchOption searchOption = SearchOption.AllDirectories)
where TEntry : IArchiveEntry
where TVolume : IVolume
{
#if THREEFIVE
foreach (var path in Directory.GetFiles(filePath, searchPattern, searchOption))
#else
foreach (var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption))
#endif
{
var fileInfo = new FileInfo(path);
writableArchive.AddEntry(path.Substring(filePath.Length), fileInfo.OpenRead(), true, fileInfo.Length,
fileInfo.LastWriteTime);
}
}
#endif
}
}

View File

@@ -1,121 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common;
namespace SharpCompress.Archive
{
public abstract class AbstractWritableArchive<TEntry, TVolume> : AbstractArchive<TEntry, TVolume>
where TEntry : IArchiveEntry
where TVolume : IVolume
{
private readonly List<TEntry> newEntries = new List<TEntry>();
private readonly List<TEntry> removedEntries = new List<TEntry>();
private readonly List<TEntry> modifiedEntries = new List<TEntry>();
private bool hasModifications;
private readonly bool anyNotWritable;
internal AbstractWritableArchive(ArchiveType type)
: base(type)
{
}
internal AbstractWritableArchive(ArchiveType type, IEnumerable<Stream> streams, Options options)
: base(type, streams, options)
{
if (streams.Any(x => !x.CanWrite))
{
anyNotWritable = true;
}
}
#if !PORTABLE
internal AbstractWritableArchive(ArchiveType type, FileInfo fileInfo, Options options)
: base(type, fileInfo, options)
{
}
#endif
private void CheckWritable()
{
if (anyNotWritable)
{
throw new ArchiveException("All Archive streams must be Writable to use Archive writing functionality.");
}
}
public override ICollection<TEntry> Entries
{
get
{
if (hasModifications)
{
return modifiedEntries;
}
return base.Entries;
}
}
private void RebuildModifiedCollection()
{
hasModifications = true;
modifiedEntries.Clear();
modifiedEntries.AddRange(OldEntries.Concat(newEntries));
}
private IEnumerable<TEntry> OldEntries
{
get { return base.Entries.Where(x => !removedEntries.Contains(x)); }
}
public void RemoveEntry(TEntry entry)
{
CheckWritable();
if (!removedEntries.Contains(entry))
{
removedEntries.Add(entry);
RebuildModifiedCollection();
}
}
public void AddEntry(string filePath, Stream source,
long size = 0, DateTime? modified = null)
{
CheckWritable();
newEntries.Add(CreateEntry(filePath, source, size, modified, false));
RebuildModifiedCollection();
}
public void AddEntry(string filePath, Stream source, bool closeStream,
long size = 0, DateTime? modified = null)
{
CheckWritable();
newEntries.Add(CreateEntry(filePath, source, size, modified, closeStream));
RebuildModifiedCollection();
}
#if !PORTABLE
public void AddEntry(string filePath, FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
throw new ArgumentException("FileInfo does not exist.");
}
AddEntry(filePath, fileInfo.OpenRead(), true, fileInfo.Length, fileInfo.LastWriteTime);
}
#endif
public void SaveTo(Stream stream, CompressionInfo compressionType)
{
SaveTo(stream, compressionType, OldEntries, newEntries);
}
protected abstract TEntry CreateEntry(string filePath, Stream source, long size, DateTime? modified,
bool closeStream);
protected abstract void SaveTo(Stream stream, CompressionInfo compressionType,
IEnumerable<TEntry> oldEntries, IEnumerable<TEntry> newEntries);
}
}

View File

@@ -1,41 +0,0 @@
using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.Common.GZip;
namespace SharpCompress.Archive.GZip
{
public class GZipArchiveEntry : GZipEntry, IArchiveEntry
{
private GZipArchive archive;
internal GZipArchiveEntry(GZipArchive archive, GZipFilePart part)
: base(part)
{
this.archive = archive;
}
public virtual Stream OpenEntryStream()
{
return Parts.Single().GetCompressedStream();
}
#region IArchiveEntry Members
public void WriteTo(Stream streamToWriteTo)
{
if (IsEncrypted)
{
throw new PasswordProtectedException("Entry is password protected and cannot be extracted.");
}
this.Extract(archive, streamToWriteTo);
}
public bool IsComplete
{
get { return true; }
}
#endregion
}
}

View File

@@ -1,102 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Archive.GZip
{
internal class GZipWritableArchiveEntry : GZipArchiveEntry
{
private string path;
private long size;
private DateTime? lastModified;
private bool closeStream;
internal GZipWritableArchiveEntry(GZipArchive archive, Stream stream,
string path, long size, DateTime? lastModified, bool closeStream)
: base(archive, null)
{
this.Stream = stream;
this.path = path;
this.size = size;
this.lastModified = lastModified;
this.closeStream = closeStream;
}
public override uint Crc
{
get { return 0; }
}
public override string FilePath
{
get { return path; }
}
public override long CompressedSize
{
get { return 0; }
}
public override long Size
{
get { return size; }
}
public override DateTime? LastModifiedTime
{
get { return lastModified; }
}
public override DateTime? CreatedTime
{
get { return null; }
}
public override DateTime? LastAccessedTime
{
get { return null; }
}
public override DateTime? ArchivedTime
{
get { return null; }
}
public override bool IsEncrypted
{
get { return false; }
}
public override bool IsDirectory
{
get { return false; }
}
public override bool IsSplit
{
get { return false; }
}
internal override IEnumerable<FilePart> Parts
{
get { throw new NotImplementedException(); }
}
internal Stream Stream { get; private set; }
public override Stream OpenEntryStream()
{
return new NonDisposingStream(Stream);
}
internal override void Close()
{
if (closeStream)
{
Stream.Dispose();
}
}
}
}

View File

@@ -1,65 +0,0 @@
using System.IO;
using SharpCompress.Common;
#if THREEFIVE
using SharpCompress.Common.Rar.Headers;
#endif
namespace SharpCompress.Archive
{
public static class IArchiveEntryExtensions
{
#if !PORTABLE
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(this IArchiveEntry entry, string destinationDirectory,
ExtractOptions options = ExtractOptions.Overwrite)
{
string destinationFileName = string.Empty;
string file = Path.GetFileName(entry.FilePath);
if (options.HasFlag(ExtractOptions.ExtractFullPath))
{
string folder = Path.GetDirectoryName(entry.FilePath);
string destdir = Path.Combine(destinationDirectory, folder);
if (!Directory.Exists(destdir))
{
Directory.CreateDirectory(destdir);
}
destinationFileName = Path.Combine(destdir, file);
}
else
{
destinationFileName = Path.Combine(destinationDirectory, file);
}
entry.WriteToFile(destinationFileName, options);
}
/// <summary>
/// Extract to specific file
/// </summary>
public static void WriteToFile(this IArchiveEntry entry, string destinationFileName,
ExtractOptions options = ExtractOptions.Overwrite)
{
if (entry.IsDirectory)
{
return;
}
FileMode fm = FileMode.Create;
if (!options.HasFlag(ExtractOptions.Overwrite))
{
fm = FileMode.CreateNew;
}
using (FileStream fs = File.Open(destinationFileName, fm))
// using (Stream entryStream = entry.OpenEntryStream())
{
//entryStream.TransferTo(fs);
entry.WriteTo(fs);
}
}
#endif
}
}

View File

@@ -1,51 +0,0 @@
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
namespace SharpCompress.Archive.Rar
{
/// <summary>
/// A rar part based on a FileInfo object
/// </summary>
internal class FileInfoRarArchiveVolume : RarVolume
{
internal FileInfoRarArchiveVolume(FileInfo fileInfo, Options options)
: base(StreamingMode.Seekable, fileInfo.OpenRead(), FixOptions(options))
{
FileInfo = fileInfo;
FileParts = base.GetVolumeFileParts().ToReadOnly();
}
private static Options FixOptions(Options options)
{
//make sure we're closing streams with fileinfo
if (options.HasFlag(Options.KeepStreamsOpen))
{
options = (Options) FlagUtility.SetFlag(options, Options.KeepStreamsOpen, false);
}
return options;
}
internal ReadOnlyCollection<RarFilePart> FileParts { get; private set; }
internal FileInfo FileInfo { get; private set; }
internal override RarFilePart CreateFilePart(FileHeader fileHeader, MarkHeader markHeader)
{
return new FileInfoRarFilePart(this, markHeader, fileHeader, FileInfo);
}
internal override IEnumerable<RarFilePart> ReadFileParts()
{
return FileParts;
}
public override FileInfo VolumeFile
{
get { return FileInfo; }
}
}
}

View File

@@ -1,36 +0,0 @@
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Archive.Rar
{
internal class FileInfoRarFilePart : RarFilePart
{
private FileInfoRarArchiveVolume volume;
internal FileInfoRarFilePart(FileInfoRarArchiveVolume volume, MarkHeader mh, FileHeader fh, FileInfo fi)
: base(mh, fh)
{
this.volume = volume;
FileInfo = fi;
}
internal FileInfo FileInfo { get; private set; }
internal override Stream GetCompressedStream()
{
var stream = volume.Stream;
stream.Position = FileHeader.DataStartPosition;
return stream;
}
internal override string FilePartName
{
get
{
return "Rar File: " + FileInfo.FullName
+ " File Entry: " + FileHeader.FileName;
}
}
}
}

View File

@@ -1,203 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Compressor.Rar;
using SharpCompress.IO;
using SharpCompress.Reader;
using SharpCompress.Reader.Rar;
namespace SharpCompress.Archive.Rar
{
public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
{
private readonly Unpack unpack = new Unpack();
internal Unpack Unpack
{
get { return unpack; }
}
#if !PORTABLE
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
internal RarArchive(FileInfo fileInfo, Options options)
: base(ArchiveType.Rar, fileInfo, options)
{
}
protected override IEnumerable<RarVolume> LoadVolumes(FileInfo file, Options options)
{
return RarArchiveVolumeFactory.GetParts(file, options);
}
#endif
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
internal RarArchive(IEnumerable<Stream> streams, Options options)
: base(ArchiveType.Rar, streams, options)
{
}
protected override IEnumerable<RarArchiveEntry> LoadEntries(IEnumerable<RarVolume> volumes)
{
return RarArchiveEntryFactory.GetEntries(this, volumes);
}
protected override IEnumerable<RarVolume> LoadVolumes(IEnumerable<Stream> streams, Options options)
{
return RarArchiveVolumeFactory.GetParts(streams, options);
}
protected override IReader CreateReaderForSolidExtraction()
{
var stream = Volumes.First().Stream;
stream.Position = 0;
return RarReader.Open(stream);
}
public override bool IsSolid
{
get { return Volumes.First().IsSolidArchive; }
}
#region Creation
#if !PORTABLE
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
public static RarArchive Open(string filePath)
{
return Open(filePath, Options.None);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
public static RarArchive Open(FileInfo fileInfo)
{
return Open(fileInfo, Options.None);
}
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
public static RarArchive Open(string filePath, Options options)
{
filePath.CheckNotNullOrEmpty("filePath");
return Open(new FileInfo(filePath), options);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
public static RarArchive Open(FileInfo fileInfo, Options options)
{
fileInfo.CheckNotNull("fileInfo");
return new RarArchive(fileInfo, options);
}
#endif
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
public static RarArchive Open(Stream stream)
{
stream.CheckNotNull("stream");
return Open(stream.AsEnumerable());
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
public static RarArchive Open(Stream stream, Options options)
{
stream.CheckNotNull("stream");
return Open(stream.AsEnumerable(), options);
}
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
/// </summary>
/// <param name="streams"></param>
public static RarArchive Open(IEnumerable<Stream> streams)
{
streams.CheckNotNull("streams");
return new RarArchive(streams, Options.KeepStreamsOpen);
}
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
public static RarArchive Open(IEnumerable<Stream> streams, Options options)
{
streams.CheckNotNull("streams");
return new RarArchive(streams, options);
}
#if !PORTABLE
public static bool IsRarFile(string filePath)
{
return IsRarFile(new FileInfo(filePath));
}
public static bool IsRarFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
using (Stream stream = fileInfo.OpenRead())
{
return IsRarFile(stream);
}
}
#endif
public static bool IsRarFile(Stream stream)
{
return IsRarFile(stream, Options.None);
}
public static bool IsRarFile(Stream stream, Options options)
{
try
{
var headerFactory = new RarHeaderFactory(StreamingMode.Seekable, options);
RarHeader header = headerFactory.ReadHeaders(stream).FirstOrDefault();
if (header == null)
{
return false;
}
return Enum.IsDefined(typeof (HeaderType), header.HeaderType);
}
catch
{
return false;
}
}
#endregion
}
}

View File

@@ -1,105 +0,0 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Compressor.Rar;
namespace SharpCompress.Archive.Rar
{
public class RarArchiveEntry : RarEntry, IArchiveEntry
{
private readonly ICollection<RarFilePart> parts;
internal RarArchiveEntry(RarArchive archive, IEnumerable<RarFilePart> parts)
{
this.parts = parts.ToList();
Archive = archive;
}
public override CompressionType CompressionType
{
get { return CompressionType.Rar; }
}
private RarArchive Archive { get; set; }
internal override IEnumerable<FilePart> Parts
{
get { return parts.Cast<FilePart>(); }
}
internal override FileHeader FileHeader
{
get { return parts.First().FileHeader; }
}
public override uint Crc
{
get
{
CheckIncomplete();
return parts.Select(fp => fp.FileHeader)
.Where(fh => !fh.FileFlags.HasFlag(FileFlags.SPLIT_AFTER))
.Single().FileCRC;
}
}
public override long Size
{
get
{
CheckIncomplete();
return parts.First().FileHeader.UncompressedSize;
}
}
public override long CompressedSize
{
get
{
CheckIncomplete();
return parts.Aggregate(0L, (total, fp) => { return total + fp.FileHeader.CompressedSize; });
}
}
public Stream OpenEntryStream()
{
return new RarStream(Archive.Unpack, FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), Archive));
}
public void WriteTo(Stream streamToWriteTo)
{
CheckIncomplete();
if (Archive.IsSolidArchive())
{
throw new InvalidFormatException("Cannot use Archive random access on SOLID Rar files.");
}
if (IsEncrypted)
{
throw new PasswordProtectedException("Entry is password protected and cannot be extracted.");
}
this.Extract(Archive, streamToWriteTo);
}
public bool IsComplete
{
get { return parts.Select(fp => fp.FileHeader).Any(fh => !fh.FileFlags.HasFlag(FileFlags.SPLIT_AFTER)); }
}
private void CheckIncomplete()
{
if (!IsComplete)
{
throw new IncompleteArchiveException("ArchiveEntry is incomplete and cannot perform this operation.");
}
}
internal override void Close()
{
}
}
}

View File

@@ -1,28 +0,0 @@
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Archive.Rar
{
internal class SeekableStreamFilePart : RarFilePart
{
internal SeekableStreamFilePart(MarkHeader mh, FileHeader fh, Stream stream)
: base(mh, fh)
{
Stream = stream;
}
internal Stream Stream { get; private set; }
internal override Stream GetCompressedStream()
{
Stream.Position = FileHeader.DataStartPosition;
return Stream;
}
internal override string FilePartName
{
get { return "Unknown Stream - File Entry: " + base.FileHeader.FileName; }
}
}
}

View File

@@ -1,34 +0,0 @@
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
namespace SharpCompress.Archive.Rar
{
internal class StreamRarArchiveVolume : RarVolume
{
internal StreamRarArchiveVolume(Stream stream, Options options)
: base(StreamingMode.Seekable, stream, options)
{
}
#if !PORTABLE
public override FileInfo VolumeFile
{
get { return null; }
}
#endif
internal override IEnumerable<RarFilePart> ReadFileParts()
{
return GetVolumeFileParts();
}
internal override RarFilePart CreateFilePart(FileHeader fileHeader, MarkHeader markHeader)
{
return new SeekableStreamFilePart(markHeader, fileHeader, Stream);
}
}
}

View File

@@ -1,44 +0,0 @@
using System.IO;
using SharpCompress.Common;
using SharpCompress.Common.SevenZip;
namespace SharpCompress.Archive.SevenZip
{
public class SevenZipArchiveEntry : SevenZipEntry, IArchiveEntry
{
private SevenZipArchive archive;
internal SevenZipArchiveEntry(SevenZipArchive archive, SevenZipFilePart part)
: base(part)
{
this.archive = archive;
}
public Stream OpenEntryStream()
{
return FilePart.GetCompressedStream();
}
public void WriteTo(Stream stream)
{
if (IsEncrypted)
{
throw new PasswordProtectedException("Entry is password protected and cannot be extracted.");
}
this.Extract(archive, stream);
}
public bool IsComplete
{
get { return true; }
}
/// <summary>
/// This is a 7Zip Anti item
/// </summary>
public bool IsAnti
{
get { return FilePart.Header.IsAnti; }
}
}
}

View File

@@ -1,102 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Archive.Tar
{
internal class TarWritableArchiveEntry : TarArchiveEntry
{
private string path;
private long size;
private DateTime? lastModified;
private bool closeStream;
internal TarWritableArchiveEntry(TarArchive archive, Stream stream, CompressionType compressionType,
string path, long size, DateTime? lastModified, bool closeStream)
: base(archive, null, compressionType)
{
this.Stream = stream;
this.path = path;
this.size = size;
this.lastModified = lastModified;
this.closeStream = closeStream;
}
public override uint Crc
{
get { return 0; }
}
public override string FilePath
{
get { return path; }
}
public override long CompressedSize
{
get { return 0; }
}
public override long Size
{
get { return size; }
}
public override DateTime? LastModifiedTime
{
get { return lastModified; }
}
public override DateTime? CreatedTime
{
get { return null; }
}
public override DateTime? LastAccessedTime
{
get { return null; }
}
public override DateTime? ArchivedTime
{
get { return null; }
}
public override bool IsEncrypted
{
get { return false; }
}
public override bool IsDirectory
{
get { return false; }
}
public override bool IsSplit
{
get { return false; }
}
internal override IEnumerable<FilePart> Parts
{
get { throw new NotImplementedException(); }
}
internal Stream Stream { get; private set; }
public override Stream OpenEntryStream()
{
return new NonDisposingStream(Stream);
}
internal override void Close()
{
if (closeStream)
{
Stream.Dispose();
}
}
}
}

View File

@@ -1,41 +0,0 @@
using System.IO;
using System.Linq;
using SharpCompress.Common.Zip;
namespace SharpCompress.Archive.Zip
{
public class ZipArchiveEntry : ZipEntry, IArchiveEntry
{
private ZipArchive archive;
internal ZipArchiveEntry(ZipArchive archive, SeekableZipFilePart part)
: base(part)
{
this.archive = archive;
}
public virtual Stream OpenEntryStream()
{
return Parts.Single().GetCompressedStream();
}
#region IArchiveEntry Members
public void WriteTo(Stream streamToWriteTo)
{
this.Extract(archive, streamToWriteTo);
}
public bool IsComplete
{
get { return true; }
}
#endregion
public string Comment
{
get { return (Parts.Single() as SeekableZipFilePart).Comment; }
}
}
}

View File

@@ -1,102 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Archive.Zip
{
internal class ZipWritableArchiveEntry : ZipArchiveEntry
{
private string path;
private long size;
private DateTime? lastModified;
private bool closeStream;
internal ZipWritableArchiveEntry(ZipArchive archive, Stream stream, string path, long size,
DateTime? lastModified, bool closeStream)
: base(archive, null)
{
this.Stream = stream;
this.path = path;
this.size = size;
this.lastModified = lastModified;
this.closeStream = closeStream;
}
public override uint Crc
{
get { return 0; }
}
public override string FilePath
{
get { return path; }
}
public override long CompressedSize
{
get { return 0; }
}
public override long Size
{
get { return size; }
}
public override DateTime? LastModifiedTime
{
get { return lastModified; }
}
public override DateTime? CreatedTime
{
get { return null; }
}
public override DateTime? LastAccessedTime
{
get { return null; }
}
public override DateTime? ArchivedTime
{
get { return null; }
}
public override bool IsEncrypted
{
get { return false; }
}
public override bool IsDirectory
{
get { return false; }
}
public override bool IsSplit
{
get { return false; }
}
internal override IEnumerable<FilePart> Parts
{
get { throw new NotImplementedException(); }
}
internal Stream Stream { get; private set; }
public override Stream OpenEntryStream()
{
return new NonDisposingStream(Stream);
}
internal override void Close()
{
if (closeStream)
{
Stream.Dispose();
}
}
}
}

View File

@@ -1,25 +0,0 @@
using System.Reflection;
using System.Runtime.CompilerServices;
#if SILVERLIGHT
[assembly: AssemblyTitle("SharpCompress.Silverlight")]
[assembly: AssemblyProduct("SharpCompress.Silverlight")]
#else
#if PORTABLE
[assembly: AssemblyTitle("SharpCompress.Silverlight")]
[assembly: AssemblyProduct("SharpCompress.Silverlight")]
#else
#if THREEFIVE
[assembly: AssemblyTitle("SharpCompress.3.5")]
[assembly: AssemblyProduct("SharpCompress.3.5")]
#else
[assembly: AssemblyTitle("SharpCompress")]
[assembly: AssemblyProduct("SharpCompress")]
[assembly:
InternalsVisibleTo(
"SharpCompress.Test, PublicKey=002400000480000094000000060200000024000052534131000400000100010005d6ae1b0f6875393da83c920a5b9408f5191aaf4e8b3c2c476ad2a11f5041ecae84ce9298bc4c203637e2fd3a80ad5378a9fa8da1363e98cea45c73969198a4b64510927c910001491cebbadf597b22448ad103b0a4007e339faf8fe8665dcdb70d65b27ac05b1977c0655fad06b372b820ecbdccf10a0f214fee0986dfeded"
)]
#endif
#endif
#endif

View File

@@ -1,29 +0,0 @@
using System.Globalization;
using System.Text;
namespace SharpCompress.Common
{
public class ArchiveEncoding
{
/// <summary>
/// Default encoding to use when archive format doesn't specify one.
/// </summary>
public static Encoding Default;
/// <summary>
/// Encoding used by encryption schemes which don't comply with RFC 2898.
/// </summary>
public static Encoding Password;
static ArchiveEncoding()
{
#if SILVERLIGHT || PORTABLE
Default = Encoding.UTF8;
Password = Encoding.UTF8;
#else
Default = Encoding.GetEncoding(CultureInfo.CurrentCulture.TextInfo.OEMCodePage);
Password = Encoding.Default;
#endif
}
}
}

View File

@@ -1,30 +0,0 @@
using SharpCompress.Compressor.Deflate;
namespace SharpCompress.Common
{
/// <summary>
/// Detailed compression properties when saving.
/// </summary>
public class CompressionInfo
{
public CompressionInfo()
{
DeflateCompressionLevel = CompressionLevel.Default;
}
/// <summary>
/// The algorthm to use. Must be valid for the format type.
/// </summary>
public CompressionType Type { get; set; }
/// <summary>
/// When CompressionType.Deflate is used, this property is referenced. Defaults to CompressionLevel.Default.
/// </summary>
public CompressionLevel DeflateCompressionLevel { get; set; }
public static implicit operator CompressionInfo(CompressionType compressionType)
{
return new CompressionInfo() {Type = compressionType};
}
}
}

View File

@@ -1,95 +0,0 @@
using System;
using System.IO;
namespace SharpCompress.Common
{
public class EntryStream : Stream
{
private Stream stream;
private bool completed;
internal EntryStream(Stream stream)
{
this.stream = stream;
}
/// <summary>
/// When reading a stream from OpenEntryStream, the stream must be completed so use this to finish reading the entire entry.
/// </summary>
public void SkipEntry()
{
var buffer = new byte[4096];
while (Read(buffer, 0, buffer.Length) > 0)
{
}
completed = true;
}
protected override void Dispose(bool disposing)
{
if (!completed)
{
throw new InvalidOperationException(
"EntryStream has not been fully consumed. Read the entire stream or use SkipEntry.");
}
base.Dispose(disposing);
stream.Dispose();
}
public override bool CanRead
{
get { return true; }
}
public override bool CanSeek
{
get { return false; }
}
public override bool CanWrite
{
get { return false; }
}
public override void Flush()
{
throw new System.NotImplementedException();
}
public override long Length
{
get { throw new System.NotImplementedException(); }
}
public override long Position
{
get { throw new System.NotImplementedException(); }
set { throw new System.NotImplementedException(); }
}
public override int Read(byte[] buffer, int offset, int count)
{
int read = stream.Read(buffer, offset, count);
if (read <= 0)
{
completed = true;
}
return read;
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new System.NotImplementedException();
}
public override void SetLength(long value)
{
throw new System.NotImplementedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new System.NotImplementedException();
}
}
}

View File

@@ -1,20 +0,0 @@
using System;
namespace SharpCompress.Common
{
[Flags]
public enum ExtractOptions
{
None,
/// <summary>
/// overwrite target if it exists
/// </summary>
Overwrite,
/// <summary>
/// extract with internal directory structure
/// </summary>
ExtractFullPath,
}
}

View File

@@ -1,90 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Common.GZip
{
public class GZipEntry : Entry
{
private readonly GZipFilePart filePart;
internal GZipEntry(GZipFilePart filePart)
{
this.filePart = filePart;
}
public override CompressionType CompressionType
{
get { return CompressionType.GZip; }
}
public override uint Crc
{
get { return 0; }
}
public override string FilePath
{
get { return filePart.FilePartName; }
}
public override long CompressedSize
{
get { return 0; }
}
public override long Size
{
get { return 0; }
}
public override DateTime? LastModifiedTime
{
get { return filePart.DateModified; }
}
public override DateTime? CreatedTime
{
get { return null; }
}
public override DateTime? LastAccessedTime
{
get { return null; }
}
public override DateTime? ArchivedTime
{
get { return null; }
}
public override bool IsEncrypted
{
get { return false; }
}
public override bool IsDirectory
{
get { return false; }
}
public override bool IsSplit
{
get { return false; }
}
internal override IEnumerable<FilePart> Parts
{
get { return filePart.AsEnumerable<FilePart>(); }
}
internal static IEnumerable<GZipEntry> GetEntries(Stream stream)
{
yield return new GZipEntry(new GZipFilePart(stream));
}
internal override void Close()
{
}
}
}

View File

@@ -1,44 +0,0 @@
using System.IO;
namespace SharpCompress.Common.GZip
{
public class GZipVolume : Volume
{
#if !PORTABLE
private readonly FileInfo fileInfo;
#endif
public GZipVolume(Stream stream, Options options)
: base(stream, options)
{
}
#if !PORTABLE
public GZipVolume(FileInfo fileInfo, Options options)
: base(fileInfo.OpenRead(), options)
{
this.fileInfo = fileInfo;
}
#endif
#if !PORTABLE
/// <summary>
/// File that backs this volume, if it not stream based
/// </summary>
public override FileInfo VolumeFile
{
get { return fileInfo; }
}
#endif
public override bool IsFirstVolume
{
get { return true; }
}
public override bool IsMultiVolume
{
get { return true; }
}
}
}

View File

@@ -1,44 +0,0 @@
using System.IO;
namespace SharpCompress.Common
{
public class GenericVolume : Volume
{
#if !PORTABLE
private FileInfo fileInfo;
#endif
public GenericVolume(Stream stream, Options options)
: base(stream, options)
{
}
#if !PORTABLE
public GenericVolume(FileInfo fileInfo, Options options)
: base(fileInfo.OpenRead(), options)
{
this.fileInfo = fileInfo;
}
#endif
#if !PORTABLE
/// <summary>
/// File that backs this volume, if it not stream based
/// </summary>
public override FileInfo VolumeFile
{
get { return fileInfo; }
}
#endif
public override bool IsFirstVolume
{
get { return true; }
}
public override bool IsMultiVolume
{
get { return true; }
}
}
}

View File

@@ -1,18 +0,0 @@
using System;
#if !PORTABLE
using System.IO;
#endif
namespace SharpCompress.Common
{
public interface IVolume : IDisposable
{
#if !PORTABLE
/// <summary>
/// File that backs this volume, if it not stream based
/// </summary>
FileInfo VolumeFile { get; }
#endif
}
}

View File

@@ -1,23 +0,0 @@
using System;
namespace SharpCompress.Common
{
[Flags]
public enum Options
{
/// <summary>
/// No options specified
/// </summary>
None = 0,
/// <summary>
/// SharpCompress will keep the supplied streams open
/// </summary>
KeepStreamsOpen = 1,
/// <summary>
/// Look for RarArchive (Check for self-extracting archives or cases where RarArchive isn't at the start of the file)
/// </summary>
LookForHeader = 2,
}
}

View File

@@ -1,31 +0,0 @@
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{
internal class ArchiveHeader : RarHeader
{
internal const short mainHeaderSizeWithEnc = 7;
internal const short mainHeaderSize = 6;
protected override void ReadFromReader(MarkingBinaryReader reader)
{
HighPosAv = reader.ReadInt16();
PosAv = reader.ReadInt32();
if (ArchiveHeaderFlags.HasFlag(ArchiveFlags.ENCRYPTVER))
{
EncryptionVersion = reader.ReadByte();
}
}
internal ArchiveFlags ArchiveHeaderFlags
{
get { return (ArchiveFlags) base.Flags; }
}
internal short HighPosAv { get; private set; }
internal int PosAv { get; private set; }
internal byte EncryptionVersion { get; private set; }
}
}

View File

@@ -1,28 +0,0 @@
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{
internal class EndArchiveHeader : RarHeader
{
protected override void ReadFromReader(MarkingBinaryReader reader)
{
if (EndArchiveFlags.HasFlag(EndArchiveFlags.EARC_DATACRC))
{
ArchiveCRC = reader.ReadInt32();
}
if (EndArchiveFlags.HasFlag(EndArchiveFlags.EARC_VOLNUMBER))
{
VolumeNumber = reader.ReadInt16();
}
}
internal EndArchiveFlags EndArchiveFlags
{
get { return (EndArchiveFlags) base.Flags; }
}
internal int? ArchiveCRC { get; private set; }
internal short? VolumeNumber { get; private set; }
}
}

View File

@@ -1,229 +0,0 @@
using System;
using System.IO;
using System.Text;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{
internal class FileHeader : RarHeader
{
private const byte SALT_SIZE = 8;
private const byte NEWLHD_SIZE = 32;
protected override void ReadFromReader(MarkingBinaryReader reader)
{
uint lowUncompressedSize = reader.ReadUInt32();
HostOS = (HostOS) (int) reader.ReadByte();
FileCRC = reader.ReadUInt32();
FileLastModifiedTime = Utility.DosDateToDateTime(reader.ReadInt32());
RarVersion = reader.ReadByte();
PackingMethod = reader.ReadByte();
short nameSize = reader.ReadInt16();
FileAttributes = reader.ReadInt32();
uint highCompressedSize = 0;
uint highUncompressedkSize = 0;
if (FileFlags.HasFlag(FileFlags.LARGE))
{
highCompressedSize = reader.ReadUInt32();
highUncompressedkSize = reader.ReadUInt32();
}
else
{
if (lowUncompressedSize == 0xffffffff)
{
lowUncompressedSize = 0xffffffff;
highUncompressedkSize = int.MaxValue;
}
}
CompressedSize = UInt32To64(highCompressedSize, AdditionalSize);
UncompressedSize = UInt32To64(highUncompressedkSize, lowUncompressedSize);
nameSize = nameSize > 4*1024 ? (short) (4*1024) : nameSize;
byte[] fileNameBytes = reader.ReadBytes(nameSize);
switch (HeaderType)
{
case HeaderType.FileHeader:
{
if (FileFlags.HasFlag(FileFlags.UNICODE))
{
int length = 0;
while (length < fileNameBytes.Length
&& fileNameBytes[length] != 0)
{
length++;
}
if (length != nameSize)
{
length++;
FileName = FileNameDecoder.Decode(fileNameBytes, length);
}
else
{
FileName = DecodeDefault(fileNameBytes);
}
}
else
{
FileName = DecodeDefault(fileNameBytes);
}
FileName = ConvertPath(FileName, HostOS);
}
break;
case HeaderType.NewSubHeader:
{
int datasize = HeaderSize - NEWLHD_SIZE - nameSize;
if (FileFlags.HasFlag(FileFlags.SALT))
{
datasize -= SALT_SIZE;
}
if (datasize > 0)
{
SubData = reader.ReadBytes(datasize);
}
if (NewSubHeaderType.SUBHEAD_TYPE_RR.Equals(fileNameBytes))
{
RecoverySectors = SubData[8] + (SubData[9] << 8)
+ (SubData[10] << 16) + (SubData[11] << 24);
}
}
break;
}
if (FileFlags.HasFlag(FileFlags.SALT))
{
Salt = reader.ReadBytes(SALT_SIZE);
}
if (FileFlags.HasFlag(FileFlags.EXTTIME))
{
ushort extendedFlags = reader.ReadUInt16();
FileLastModifiedTime = ProcessExtendedTime(extendedFlags, FileLastModifiedTime, reader, 0);
FileCreatedTime = ProcessExtendedTime(extendedFlags, null, reader, 1);
FileLastAccessedTime = ProcessExtendedTime(extendedFlags, null, reader, 2);
FileArchivedTime = ProcessExtendedTime(extendedFlags, null, reader, 3);
}
}
//only the full .net framework will do other code pages than unicode/utf8
private string DecodeDefault(byte[] bytes)
{
return ArchiveEncoding.Default.GetString(bytes, 0, bytes.Length);
}
private long UInt32To64(uint x, uint y)
{
long l = x;
l <<= 32;
return l + y;
}
private static DateTime? ProcessExtendedTime(ushort extendedFlags, DateTime? time, MarkingBinaryReader reader,
int i)
{
uint rmode = (uint) extendedFlags >> (3 - i)*4;
if ((rmode & 8) == 0)
{
return null;
}
if (i != 0)
{
uint DosTime = reader.ReadUInt32();
time = Utility.DosDateToDateTime(DosTime);
}
if ((rmode & 4) == 0)
{
time = time.Value.AddSeconds(1);
}
uint nanosecondHundreds = 0;
int count = (int) rmode & 3;
for (int j = 0; j < count; j++)
{
byte b = reader.ReadByte();
nanosecondHundreds |= (((uint) b) << ((j + 3 - count)*8));
}
//10^-7 to 10^-3
return time.Value.AddMilliseconds(nanosecondHundreds*Math.Pow(10, -4));
}
private static string ConvertPath(string path, HostOS os)
{
#if PORTABLE
return path.Replace('\\', '/');
#else
switch (os)
{
case HostOS.MacOS:
case HostOS.Unix:
{
if (Path.DirectorySeparatorChar == '\\')
{
return path.Replace('/', '\\');
}
}
break;
default:
{
if (Path.DirectorySeparatorChar == '/')
{
return path.Replace('\\', '/');
}
}
break;
}
return path;
#endif
}
internal long DataStartPosition { get; set; }
internal HostOS HostOS { get; private set; }
internal uint FileCRC { get; private set; }
internal DateTime? FileLastModifiedTime { get; private set; }
internal DateTime? FileCreatedTime { get; private set; }
internal DateTime? FileLastAccessedTime { get; private set; }
internal DateTime? FileArchivedTime { get; private set; }
internal byte RarVersion { get; private set; }
internal byte PackingMethod { get; private set; }
internal int FileAttributes { get; private set; }
internal FileFlags FileFlags
{
get { return (FileFlags) base.Flags; }
}
internal long CompressedSize { get; private set; }
internal long UncompressedSize { get; private set; }
internal string FileName { get; private set; }
internal byte[] SubData { get; private set; }
internal int RecoverySectors { get; private set; }
internal byte[] Salt { get; private set; }
public override string ToString()
{
return FileName;
}
public Stream PackedStream { get; set; }
}
}

View File

@@ -1,85 +0,0 @@
using System;
namespace SharpCompress.Common.Rar.Headers
{
internal enum HeaderType
{
MarkHeader = 0x72,
ArchiveHeader = 0x73,
FileHeader = 0x74,
CommHeader = 0x75,
AvHeader = 0x76,
SubHeader = 0x77,
ProtectHeader = 0x78,
SignHeader = 0x79,
NewSubHeader = 0x7a,
EndArchiveHeader = 0x7b,
}
internal enum HeaderFlags : short
{
LONG_BLOCK = -0x8000,
}
[Flags]
internal enum ArchiveFlags
{
VOLUME = 0x0001,
COMMENT = 0x0002,
LOCK = 0x0004,
SOLID = 0x0008,
NEWNUMBERING = 0x0010,
AV = 0x0020,
PROTECT = 0x0040,
PASSWORD = 0x0080,
FIRSTVOLUME = 0x0100,
ENCRYPTVER = 0x0200,
}
internal enum HostOS
{
MSDOS = 0,
OS2 = 1,
Win32 = 2,
Unix = 3,
MacOS = 4,
BeOS = 5
}
[Flags]
internal enum FileFlags : ushort
{
SPLIT_BEFORE = 0x0001,
SPLIT_AFTER = 0x0002,
PASSWORD = 0x0004,
COMMENT = 0x0008,
SOLID = 0x0010,
WINDOWMASK = 0x00e0,
WINDOW64 = 0x0000,
WINDOW128 = 0x0020,
WINDOW256 = 0x0040,
WINDOW512 = 0x0060,
WINDOW1024 = 0x0080,
WINDOW2048 = 0x00a0,
WINDOW4096 = 0x00c0,
DIRECTORY = 0x00e0,
LARGE = 0x0100,
UNICODE = 0x0200,
SALT = 0x0400,
VERSION = 0x0800,
EXTTIME = 0x1000,
EXTFLAGS = 0x2000,
}
[Flags]
internal enum EndArchiveFlags
{
EARC_NEXT_VOLUME = 0x0001,
EARC_DATACRC = 0x0002,
EARC_REVSPACE = 0x0004,
EARC_VOLNUMBER = 0x0008,
}
}

View File

@@ -1,59 +0,0 @@
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{
internal class MarkHeader : RarHeader
{
protected override void ReadFromReader(MarkingBinaryReader reader)
{
}
internal bool IsValid()
{
if (!(HeadCRC == 0x6152))
{
return false;
}
if (!(HeaderType == HeaderType.MarkHeader))
{
return false;
}
if (!(Flags == 0x1a21))
{
return false;
}
if (!(HeaderSize == BaseBlockSize))
{
return false;
}
return true;
}
internal bool IsSignature()
{
bool valid = false;
/*byte[] d = new byte[BaseBlock.BaseBlockSize];
BinaryWriter writer = new BinaryWriter();
writer.Write(HeadCRC);
writer.Write((byte)HeaderType);
writer.Write(flags);
writer.Write(HeaderSize);
writer.Flush
if (d[0] == 0x52) {
if (d[1]==0x45 && d[2]==0x7e && d[3]==0x5e) {
oldFormat=true;
valid=true;
}
else if (d[1]==0x61 && d[2]==0x72 && d[3]==0x21 && d[4]==0x1a &&
d[5]==0x07 && d[6]==0x00) {
oldFormat=false;
valid=true;
}
}*/
return valid;
}
internal bool OldFormat { get; private set; }
}
}

View File

@@ -1,96 +0,0 @@
using System.IO;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{
internal class RarHeader
{
internal const short BaseBlockSize = 7;
internal const short LONG_BLOCK = -0x8000;
private void FillBase(RarHeader baseHeader)
{
HeadCRC = baseHeader.HeadCRC;
HeaderType = baseHeader.HeaderType;
Flags = baseHeader.Flags;
HeaderSize = baseHeader.HeaderSize;
AdditionalSize = baseHeader.AdditionalSize;
ReadBytes = baseHeader.ReadBytes;
}
internal static RarHeader Create(MarkingBinaryReader reader)
{
try
{
RarHeader header = new RarHeader();
reader.Mark();
header.ReadFromReader(reader);
header.ReadBytes += reader.CurrentReadByteCount;
return header;
}
catch (EndOfStreamException)
{
return null;
}
}
protected virtual void ReadFromReader(MarkingBinaryReader reader)
{
HeadCRC = reader.ReadInt16();
HeaderType = (HeaderType) (int) (reader.ReadByte() & 0xff);
Flags = reader.ReadInt16();
HeaderSize = reader.ReadInt16();
if (FlagUtility.HasFlag(Flags, LONG_BLOCK))
{
AdditionalSize = reader.ReadUInt32();
}
}
internal T PromoteHeader<T>(MarkingBinaryReader reader)
where T : RarHeader, new()
{
T header = new T();
header.FillBase(this);
reader.Mark();
header.ReadFromReader(reader);
header.ReadBytes += reader.CurrentReadByteCount;
int headerSizeDiff = header.HeaderSize - (int) header.ReadBytes;
if (headerSizeDiff > 0)
{
reader.ReadBytes(headerSizeDiff);
}
return header;
}
protected virtual void PostReadingBytes(MarkingBinaryReader reader)
{
}
/// <summary>
/// This is the number of bytes read when reading the header
/// </summary>
protected long ReadBytes { get; private set; }
protected short HeadCRC { get; private set; }
internal HeaderType HeaderType { get; private set; }
/// <summary>
/// Untyped flags. These should be typed when Promoting to another header
/// </summary>
protected short Flags { get; private set; }
protected short HeaderSize { get; private set; }
/// <summary>
/// This additional size of the header could be file data
/// </summary>
protected uint AdditionalSize { get; private set; }
}
}

View File

@@ -1,192 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{
internal class RarHeaderFactory
{
private int MAX_SFX_SIZE = 0x80000 - 16; //archive.cpp line 136
internal RarHeaderFactory(StreamingMode mode, Options options)
{
StreamingMode = mode;
Options = options;
}
private Options Options { get; set; }
internal StreamingMode StreamingMode { get; private set; }
internal IEnumerable<RarHeader> ReadHeaders(Stream stream)
{
if (Options.HasFlag(Options.LookForHeader))
{
stream = CheckSFX(stream);
}
RarHeader header;
while ((header = ReadNextHeader(stream)) != null)
{
yield return header;
if (header.HeaderType == HeaderType.EndArchiveHeader)
{
yield break; // the end?
}
}
}
private Stream CheckSFX(Stream stream)
{
RewindableStream rewindableStream = GetRewindableStream(stream);
stream = rewindableStream;
BinaryReader reader = new BinaryReader(rewindableStream);
try
{
int count = 0;
while (true)
{
byte firstByte = reader.ReadByte();
if (firstByte == 0x52)
{
MemoryStream buffer = new MemoryStream();
byte[] nextThreeBytes = reader.ReadBytes(3);
if ((nextThreeBytes[0] == 0x45)
&& (nextThreeBytes[1] == 0x7E)
&& (nextThreeBytes[2] == 0x5E))
{
//old format and isvalid
buffer.WriteByte(0x52);
buffer.Write(nextThreeBytes, 0, 3);
rewindableStream.Rewind(buffer);
break;
}
byte[] secondThreeBytes = reader.ReadBytes(3);
if ((nextThreeBytes[0] == 0x61)
&& (nextThreeBytes[1] == 0x72)
&& (nextThreeBytes[2] == 0x21)
&& (secondThreeBytes[0] == 0x1A)
&& (secondThreeBytes[1] == 0x07)
&& (secondThreeBytes[2] == 0x00))
{
//new format and isvalid
buffer.WriteByte(0x52);
buffer.Write(nextThreeBytes, 0, 3);
buffer.Write(secondThreeBytes, 0, 3);
rewindableStream.Rewind(buffer);
break;
}
buffer.Write(nextThreeBytes, 0, 3);
buffer.Write(secondThreeBytes, 0, 3);
rewindableStream.Rewind(buffer);
}
if (count > MAX_SFX_SIZE)
{
break;
}
}
}
catch (Exception e)
{
if (!Options.HasFlag(Options.KeepStreamsOpen))
{
#if THREEFIVE
reader.Close();
#else
reader.Dispose();
#endif
}
throw new InvalidFormatException("Error trying to read rar signature.", e);
}
return stream;
}
private RewindableStream GetRewindableStream(Stream stream)
{
RewindableStream rewindableStream = stream as RewindableStream;
if (rewindableStream == null)
{
rewindableStream = new RewindableStream(stream);
}
return rewindableStream;
}
private RarHeader ReadNextHeader(Stream stream)
{
MarkingBinaryReader reader = new MarkingBinaryReader(stream);
RarHeader header = RarHeader.Create(reader);
if (header == null)
{
return null;
}
switch (header.HeaderType)
{
case HeaderType.ArchiveHeader:
{
return header.PromoteHeader<ArchiveHeader>(reader);
}
case HeaderType.MarkHeader:
{
return header.PromoteHeader<MarkHeader>(reader);
}
case HeaderType.NewSubHeader:
{
FileHeader fh = header.PromoteHeader<FileHeader>(reader);
switch (StreamingMode)
{
case StreamingMode.Seekable:
{
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming:
{
//skip the data because it's useless?
reader.BaseStream.Skip(fh.CompressedSize);
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
return fh;
}
case HeaderType.FileHeader:
{
FileHeader fh = header.PromoteHeader<FileHeader>(reader);
switch (StreamingMode)
{
case StreamingMode.Seekable:
{
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming:
{
ReadOnlySubStream ms
= new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize);
fh.PackedStream = ms;
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
return fh;
}
case HeaderType.EndArchiveHeader:
{
return header.PromoteHeader<EndArchiveHeader>(reader);
}
default:
{
throw new InvalidFormatException("Invalid Rar Header: " + header.HeaderType.ToString());
}
}
}
}
}

View File

@@ -1,23 +0,0 @@
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{
internal class AVHeader : RarHeader
{
protected override void ReadFromReader(MarkingBinaryReader reader)
{
UnpackVersion = reader.ReadByte();
Method = reader.ReadByte();
AVVersion = reader.ReadByte();
AVInfoCRC = reader.ReadInt32();
}
internal int AVInfoCRC { get; private set; }
internal byte UnpackVersion { get; private set; }
internal byte Method { get; private set; }
internal byte AVVersion { get; private set; }
}
}

View File

@@ -1,85 +0,0 @@
using System;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Common.Rar
{
public abstract class RarEntry : Entry
{
internal abstract FileHeader FileHeader { get; }
/// <summary>
/// The File's 32 bit CRC Hash
/// </summary>
public override uint Crc
{
get { return FileHeader.FileCRC; }
}
/// <summary>
/// The path of the file internal to the Rar Archive.
/// </summary>
public override string FilePath
{
get { return FileHeader.FileName; }
}
/// <summary>
/// The entry last modified time in the archive, if recorded
/// </summary>
public override DateTime? LastModifiedTime
{
get { return FileHeader.FileLastModifiedTime; }
}
/// <summary>
/// The entry create time in the archive, if recorded
/// </summary>
public override DateTime? CreatedTime
{
get { return FileHeader.FileCreatedTime; }
}
/// <summary>
/// The entry last accessed time in the archive, if recorded
/// </summary>
public override DateTime? LastAccessedTime
{
get { return FileHeader.FileLastAccessedTime; }
}
/// <summary>
/// The entry time whend archived, if recorded
/// </summary>
public override DateTime? ArchivedTime
{
get { return FileHeader.FileArchivedTime; }
}
/// <summary>
/// Entry is password protected and encrypted and cannot be extracted.
/// </summary>
public override bool IsEncrypted
{
get { return FileHeader.FileFlags.HasFlag(FileFlags.PASSWORD); }
}
/// <summary>
/// Entry is password protected and encrypted and cannot be extracted.
/// </summary>
public override bool IsDirectory
{
get { return FileHeader.FileFlags.HasFlag(FileFlags.DIRECTORY); }
}
public override bool IsSplit
{
get { return FileHeader.FileFlags.HasFlag(FileFlags.SPLIT_AFTER); }
}
public override string ToString()
{
return string.Format("Entry Path: {0} Compressed Size: {1} Uncompressed Size: {2} CRC: {3}",
FilePath, CompressedSize, Size, Crc);
}
}
}

View File

@@ -1,161 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Compressor.LZMA;
using SharpCompress.Compressor.LZMA.Utilites;
namespace SharpCompress.Common.SevenZip
{
internal class ArchiveDatabase
{
internal byte MajorVersion;
internal byte MinorVersion;
internal long StartPositionAfterHeader;
internal long DataStartPosition;
internal List<long> PackSizes = new List<long>();
internal List<uint?> PackCRCs = new List<uint?>();
internal List<CFolder> Folders = new List<CFolder>();
internal List<int> NumUnpackStreamsVector;
internal List<CFileItem> Files = new List<CFileItem>();
internal List<long> PackStreamStartPositions = new List<long>();
internal List<int> FolderStartFileIndex = new List<int>();
internal List<int> FileIndexToFolderIndexMap = new List<int>();
internal void Clear()
{
PackSizes.Clear();
PackCRCs.Clear();
Folders.Clear();
NumUnpackStreamsVector = null;
Files.Clear();
PackStreamStartPositions.Clear();
FolderStartFileIndex.Clear();
FileIndexToFolderIndexMap.Clear();
}
internal bool IsEmpty()
{
return PackSizes.Count == 0
&& PackCRCs.Count == 0
&& Folders.Count == 0
&& NumUnpackStreamsVector.Count == 0
&& Files.Count == 0;
}
private void FillStartPos()
{
PackStreamStartPositions.Clear();
long startPos = 0;
for (int i = 0; i < PackSizes.Count; i++)
{
PackStreamStartPositions.Add(startPos);
startPos += PackSizes[i];
}
}
private void FillFolderStartFileIndex()
{
FolderStartFileIndex.Clear();
FileIndexToFolderIndexMap.Clear();
int folderIndex = 0;
int indexInFolder = 0;
for (int i = 0; i < Files.Count; i++)
{
CFileItem file = Files[i];
bool emptyStream = !file.HasStream;
if (emptyStream && indexInFolder == 0)
{
FileIndexToFolderIndexMap.Add(-1);
continue;
}
if (indexInFolder == 0)
{
// v3.13 incorrectly worked with empty folders
// v4.07: Loop for skipping empty folders
for (; ; )
{
if (folderIndex >= Folders.Count)
throw new InvalidOperationException();
FolderStartFileIndex.Add(i); // check it
if (NumUnpackStreamsVector[folderIndex] != 0)
break;
folderIndex++;
}
}
FileIndexToFolderIndexMap.Add(folderIndex);
if (emptyStream)
continue;
indexInFolder++;
if (indexInFolder >= NumUnpackStreamsVector[folderIndex])
{
folderIndex++;
indexInFolder = 0;
}
}
}
public void Fill()
{
FillStartPos();
FillFolderStartFileIndex();
}
internal long GetFolderStreamPos(CFolder folder, int indexInFolder)
{
int index = folder.FirstPackStreamId + indexInFolder;
return DataStartPosition + PackStreamStartPositions[index];
}
internal long GetFolderFullPackSize(int folderIndex)
{
int packStreamIndex = Folders[folderIndex].FirstPackStreamId;
CFolder folder = Folders[folderIndex];
long size = 0;
for (int i = 0; i < folder.PackStreams.Count; i++)
size += PackSizes[packStreamIndex + i];
return size;
}
internal Stream GetFolderStream(Stream stream, CFolder folder, IPasswordProvider pw)
{
int packStreamIndex = folder.FirstPackStreamId;
long folderStartPackPos = GetFolderStreamPos(folder, 0);
List<long> packSizes = new List<long>();
for (int j = 0; j < folder.PackStreams.Count; j++)
packSizes.Add(PackSizes[packStreamIndex + j]);
return DecoderStreamHelper.CreateDecoderStream(stream, folderStartPackPos, packSizes.ToArray(), folder, pw);
}
private long GetFolderPackStreamSize(int folderIndex, int streamIndex)
{
return PackSizes[Folders[folderIndex].FirstPackStreamId + streamIndex];
}
private long GetFilePackSize(int fileIndex)
{
int folderIndex = FileIndexToFolderIndexMap[fileIndex];
if (folderIndex != -1)
if (FolderStartFileIndex[folderIndex] == fileIndex)
return GetFolderFullPackSize(folderIndex);
return 0;
}
}
}

View File

@@ -1,10 +0,0 @@
namespace SharpCompress.Common.SevenZip
{
internal class CCoderInfo
{
internal CMethodId MethodId;
internal byte[] Props;
internal int NumInStreams;
internal int NumOutStreams;
}
}

View File

@@ -1,139 +0,0 @@
using System;
using System.Collections.Generic;
using SharpCompress.Compressor.LZMA;
namespace SharpCompress.Common.SevenZip
{
internal class CFolder
{
internal List<CCoderInfo> Coders = new List<CCoderInfo>();
internal List<CBindPair> BindPairs = new List<CBindPair>();
internal List<int> PackStreams = new List<int>();
internal int FirstPackStreamId;
internal List<long> UnpackSizes = new List<long>();
internal uint? UnpackCRC;
internal bool UnpackCRCDefined
{
get { return UnpackCRC != null; }
}
public long GetUnpackSize()
{
if (UnpackSizes.Count == 0)
return 0;
for (int i = UnpackSizes.Count - 1; i >= 0; i--)
if (FindBindPairForOutStream(i) < 0)
return UnpackSizes[i];
throw new Exception();
}
public int GetNumOutStreams()
{
int count = 0;
for (int i = 0; i < Coders.Count; i++)
count += Coders[i].NumOutStreams;
return count;
}
public int FindBindPairForInStream(int inStreamIndex)
{
for (int i = 0; i < BindPairs.Count; i++)
if (BindPairs[i].InIndex == inStreamIndex)
return i;
return -1;
}
public int FindBindPairForOutStream(int outStreamIndex)
{
for (int i = 0; i < BindPairs.Count; i++)
if (BindPairs[i].OutIndex == outStreamIndex)
return i;
return -1;
}
public int FindPackStreamArrayIndex(int inStreamIndex)
{
for (int i = 0; i < PackStreams.Count; i++)
if (PackStreams[i] == inStreamIndex)
return i;
return -1;
}
public bool IsEncrypted()
{
for (int i = Coders.Count - 1; i >= 0; i--)
if (Coders[i].MethodId == CMethodId.kAES)
return true;
return false;
}
public bool CheckStructure()
{
const int kNumCodersMax = 32; // don't change it
const int kMaskSize = 32; // it must be >= kNumCodersMax
const int kNumBindsMax = 32;
if (Coders.Count > kNumCodersMax || BindPairs.Count > kNumBindsMax)
return false;
{
var v = new BitVector(BindPairs.Count + PackStreams.Count);
for (int i = 0; i < BindPairs.Count; i++)
if (v.GetAndSet(BindPairs[i].InIndex))
return false;
for (int i = 0; i < PackStreams.Count; i++)
if (v.GetAndSet(PackStreams[i]))
return false;
}
{
var v = new BitVector(UnpackSizes.Count);
for (int i = 0; i < BindPairs.Count; i++)
if (v.GetAndSet(BindPairs[i].OutIndex))
return false;
}
uint[] mask = new uint[kMaskSize];
{
List<int> inStreamToCoder = new List<int>();
List<int> outStreamToCoder = new List<int>();
for (int i = 0; i < Coders.Count; i++)
{
CCoderInfo coder = Coders[i];
for (int j = 0; j < coder.NumInStreams; j++)
inStreamToCoder.Add(i);
for (int j = 0; j < coder.NumOutStreams; j++)
outStreamToCoder.Add(i);
}
for (int i = 0; i < BindPairs.Count; i++)
{
CBindPair bp = BindPairs[i];
mask[inStreamToCoder[bp.InIndex]] |= (1u << outStreamToCoder[bp.OutIndex]);
}
}
for (int i = 0; i < kMaskSize; i++)
for (int j = 0; j < kMaskSize; j++)
if (((1u << j) & mask[i]) != 0)
mask[i] |= mask[j];
for (int i = 0; i < kMaskSize; i++)
if (((1u << i) & mask[i]) != 0)
return false;
return true;
}
}
}

View File

@@ -1,55 +0,0 @@
namespace SharpCompress.Common.SevenZip
{
internal struct CMethodId
{
public const ulong kCopyId = 0;
public const ulong kLzmaId = 0x030101;
public const ulong kLzma2Id = 0x21;
public const ulong kAESId = 0x06F10701;
public static readonly CMethodId kCopy = new CMethodId(kCopyId);
public static readonly CMethodId kLzma = new CMethodId(kLzmaId);
public static readonly CMethodId kLzma2 = new CMethodId(kLzma2Id);
public static readonly CMethodId kAES = new CMethodId(kAESId);
public readonly ulong Id;
public CMethodId(ulong id)
{
this.Id = id;
}
public override int GetHashCode()
{
return Id.GetHashCode();
}
public override bool Equals(object obj)
{
return obj is CMethodId && (CMethodId) obj == this;
}
public bool Equals(CMethodId other)
{
return Id == other.Id;
}
public static bool operator ==(CMethodId left, CMethodId right)
{
return left.Id == right.Id;
}
public static bool operator !=(CMethodId left, CMethodId right)
{
return left.Id != right.Id;
}
public int GetLength()
{
int bytes = 0;
for (ulong value = Id; value != 0; value >>= 8)
bytes++;
return bytes;
}
}
}

View File

@@ -1,84 +0,0 @@
using System;
using System.Collections.Generic;
namespace SharpCompress.Common.SevenZip
{
public class SevenZipEntry : Entry
{
internal SevenZipEntry(SevenZipFilePart filePart)
{
this.FilePart = filePart;
}
internal SevenZipFilePart FilePart { get; private set; }
public override CompressionType CompressionType
{
get { return FilePart.CompressionType; }
}
public override uint Crc
{
get { return FilePart.Header.Crc ?? 0; }
}
public override string FilePath
{
get { return FilePart.Header.Name; }
}
public override long CompressedSize
{
get { return 0; }
}
public override long Size
{
get { return (long) FilePart.Header.Size; }
}
public override DateTime? LastModifiedTime
{
get { throw new NotImplementedException(); }
}
public override DateTime? CreatedTime
{
get { throw new NotImplementedException(); }
}
public override DateTime? LastAccessedTime
{
get { throw new NotImplementedException(); }
}
public override DateTime? ArchivedTime
{
get { throw new NotImplementedException(); }
}
public override bool IsEncrypted
{
get { return false; }
}
public override bool IsDirectory
{
get { return FilePart.Header.IsDir; }
}
public override bool IsSplit
{
get { return false; }
}
internal override IEnumerable<FilePart> Parts
{
get { return FilePart.AsEnumerable<FilePart>(); }
}
internal override void Close()
{
}
}
}

View File

@@ -1,109 +0,0 @@
using System;
using System.IO;
using System.Linq;
using SharpCompress.IO;
namespace SharpCompress.Common.SevenZip
{
internal class SevenZipFilePart : FilePart
{
private CompressionType? type;
private Stream stream;
private ArchiveDatabase database;
internal SevenZipFilePart(Stream stream, ArchiveDatabase database, int index, CFileItem fileEntry)
{
this.stream = stream;
this.database = database;
Index = index;
Header = fileEntry;
if (Header.HasStream)
{
Folder = database.Folders[database.FileIndexToFolderIndexMap[index]];
}
}
internal Stream BaseStream { get; private set; }
internal CFileItem Header { get; private set; }
internal CFolder Folder { get; private set; }
internal int Index { get; private set; }
internal override string FilePartName
{
get { return Header.Name; }
}
internal override Stream GetRawStream()
{
return null;
}
internal override Stream GetCompressedStream()
{
if (!Header.HasStream)
{
return null;
}
var folderStream = database.GetFolderStream(stream, Folder, null);
int firstFileIndex = database.FolderStartFileIndex[database.Folders.IndexOf(Folder)];
int skipCount = Index - firstFileIndex;
long skipSize = 0;
for (int i = 0; i < skipCount; i++)
{
skipSize += database.Files[firstFileIndex + i].Size;
}
if (skipSize > 0)
{
folderStream.Skip(skipSize);
}
return new ReadOnlySubStream(folderStream, Header.Size);
}
public CompressionType CompressionType
{
get
{
if (type == null)
{
type = GetCompression();
}
return type.Value;
}
}
//copied from DecoderRegistry
private const uint k_Copy = 0x0;
private const uint k_Delta = 3;
private const uint k_LZMA2 = 0x21;
private const uint k_LZMA = 0x030101;
private const uint k_PPMD = 0x030401;
private const uint k_BCJ = 0x03030103;
private const uint k_BCJ2 = 0x0303011B;
private const uint k_Deflate = 0x040108;
private const uint k_BZip2 = 0x040202;
internal CompressionType GetCompression()
{
var coder = Folder.Coders.First();
switch (coder.MethodId.Id)
{
case k_LZMA:
case k_LZMA2:
{
return CompressionType.LZMA;
}
case k_PPMD:
{
return CompressionType.PPMd;
}
case k_BZip2:
{
return CompressionType.BZip2;
}
default:
throw new NotImplementedException();
}
}
}
}

View File

@@ -1,19 +0,0 @@
using System.IO;
namespace SharpCompress.Common.SevenZip
{
public class SevenZipVolume : GenericVolume
{
public SevenZipVolume(Stream stream, Options options)
: base(stream, options)
{
}
#if !PORTABLE
public SevenZipVolume(FileInfo fileInfo, Options options)
: base(fileInfo, options)
{
}
#endif
}
}

View File

@@ -1,235 +0,0 @@
using System;
using System.IO;
#if !PORTABLE
using System.Net;
#endif
using System.Text;
namespace SharpCompress.Common.Tar.Headers
{
internal enum EntryType : byte
{
File = 0,
OldFile = (byte) '0',
HardLink = (byte) '1',
SymLink = (byte) '2',
CharDevice = (byte) '3',
BlockDevice = (byte) '4',
Directory = (byte) '5',
Fifo = (byte) '6',
LongLink = (byte) 'K',
LongName = (byte) 'L',
SparseFile = (byte) 'S',
VolumeHeader = (byte) 'V',
}
internal class TarHeader
{
internal static readonly DateTime Epoch = new DateTime(1970, 1, 1, 0, 0, 0);
internal string Name { get; set; }
//internal int Mode { get; set; }
//internal int UserId { get; set; }
//internal string UserName { get; set; }
//internal int GroupId { get; set; }
//internal string GroupName { get; set; }
internal long Size { get; set; }
internal DateTime LastModifiedTime { get; set; }
internal EntryType EntryType { get; set; }
internal Stream PackedStream { get; set; }
internal void Write(Stream output)
{
if (Name.Length > 255)
{
throw new InvalidFormatException("UsTar fileName can not be longer thatn 255 chars");
}
byte[] buffer = new byte[512];
string name = Name;
if (name.Length > 100)
{
name = Name.Substring(0, 100);
}
WriteStringBytes(name, buffer, 0, 100);
WriteOctalBytes(511, buffer, 100, 8);
WriteOctalBytes(0, buffer, 108, 8);
WriteOctalBytes(0, buffer, 116, 8);
WriteOctalBytes(Size, buffer, 124, 12);
var time = (long) (LastModifiedTime - Epoch).TotalSeconds;
WriteOctalBytes(time, buffer, 136, 12);
buffer[156] = (byte) EntryType;
//Encoding.UTF8.GetBytes("magic").CopyTo(buffer, 257);
if (Name.Length > 100)
{
name = Name.Substring(101, Name.Length);
ArchiveEncoding.Default.GetBytes(name).CopyTo(buffer, 345);
}
if (Size >= 0x1FFFFFFFF)
{
#if PORTABLE
byte[] bytes = BitConverter.GetBytes(Utility.HostToNetworkOrder(Size));
#else
byte[] bytes = BitConverter.GetBytes(IPAddress.HostToNetworkOrder(Size));
#endif
var bytes12 = new byte[12];
bytes.CopyTo(bytes12, 12 - bytes.Length);
bytes12[0] |= 0x80;
bytes12.CopyTo(buffer, 124);
}
int crc = RecalculateChecksum(buffer);
WriteOctalBytes(crc, buffer, 148, 8);
output.Write(buffer, 0, buffer.Length);
}
internal bool Read(BinaryReader reader)
{
byte[] buffer = reader.ReadBytes(512);
if (buffer.Length == 0)
{
return false;
}
if (buffer.Length < 512)
{
throw new InvalidOperationException();
}
Name = ArchiveEncoding.Default.GetString(buffer, 0, 100).TrimNulls();
//Mode = ReadASCIIInt32Base8(buffer, 100, 7);
//UserId = ReadASCIIInt32Base8(buffer, 108, 7);
//GroupId = ReadASCIIInt32Base8(buffer, 116, 7);
EntryType = (EntryType) buffer[156];
if ((buffer[124] & 0x80) == 0x80) // if size in binary
{
long sizeBigEndian = BitConverter.ToInt64(buffer, 0x80);
#if PORTABLE
Size = Utility.NetworkToHostOrder(sizeBigEndian);
#else
Size = IPAddress.NetworkToHostOrder(sizeBigEndian);
#endif
}
else
{
Size = ReadASCIIInt64Base8(buffer, 124, 11);
}
long unixTimeStamp = ReadASCIIInt64Base8(buffer, 136, 11);
LastModifiedTime = Epoch.AddSeconds(unixTimeStamp);
Magic = ArchiveEncoding.Default.GetString(buffer, 257, 6).TrimNulls();
if (!string.IsNullOrEmpty(Magic) && "ustar ".Equals(Magic))
{
string namePrefix = ArchiveEncoding.Default.GetString(buffer, 345, 157);
namePrefix = namePrefix.TrimNulls();
if (!string.IsNullOrEmpty(namePrefix))
{
Name = namePrefix + "/" + Name;
}
}
if (EntryType != EntryType.LongName && Name.Length == 0)
{
return false;
}
return true;
}
private static void WriteStringBytes(string name, byte[] buffer, int offset, int length)
{
int i;
for (i = 0; i < length - 1 && i < name.Length; ++i)
{
buffer[offset + i] = (byte) name[i];
}
for (; i < length; ++i)
{
buffer[offset + i] = 0;
}
}
private static void WriteOctalBytes(long value, byte[] buffer, int offset, int length)
{
string val = Convert.ToString(value, 8);
int shift = length - val.Length - 1;
for (int i = 0; i < shift; i++)
{
buffer[offset + i] = (byte) ' ';
}
for (int i = 0; i < val.Length; i++)
{
buffer[offset + i + shift] = (byte) val[i];
}
buffer[offset + length] = 0;
}
private static int ReadASCIIInt32Base8(byte[] buffer, int offset, int count)
{
string s = Encoding.UTF8.GetString(buffer, offset, count).TrimNulls();
if (string.IsNullOrEmpty(s))
{
return 0;
}
return Convert.ToInt32(s, 8);
}
private static long ReadASCIIInt64Base8(byte[] buffer, int offset, int count)
{
string s = Encoding.UTF8.GetString(buffer, offset, count).TrimNulls();
if (string.IsNullOrEmpty(s))
{
return 0;
}
return Convert.ToInt64(s, 8);
}
private static long ReadASCIIInt64(byte[] buffer, int offset, int count)
{
string s = Encoding.UTF8.GetString(buffer, offset, count).TrimNulls();
if (string.IsNullOrEmpty(s))
{
return 0;
}
return Convert.ToInt64(s);
}
internal static int RecalculateChecksum(byte[] buf)
{
// Set default value for checksum. That is 8 spaces.
Encoding.UTF8.GetBytes(" ").CopyTo(buf, 148);
// Calculate checksum
int headerChecksum = 0;
foreach (byte b in buf)
{
headerChecksum += b;
}
return headerChecksum;
}
internal static int RecalculateAltChecksum(byte[] buf)
{
Encoding.UTF8.GetBytes(" ").CopyTo(buf, 148);
int headerChecksum = 0;
foreach (byte b in buf)
{
if ((b & 0x80) == 0x80)
{
headerChecksum -= b ^ 0x80;
}
else
{
headerChecksum += b;
}
}
return headerChecksum;
}
public long? DataStartPosition { get; set; }
public string Magic { get; set; }
}
}

View File

@@ -1,108 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
namespace SharpCompress.Common.Tar
{
public class TarEntry : Entry
{
private readonly TarFilePart filePart;
private readonly CompressionType type;
internal TarEntry(TarFilePart filePart, CompressionType type)
{
this.filePart = filePart;
this.type = type;
}
public override CompressionType CompressionType
{
get { return type; }
}
public override uint Crc
{
get { return 0; }
}
public override string FilePath
{
get { return filePart.Header.Name; }
}
public override long CompressedSize
{
get { return filePart.Header.Size; }
}
public override long Size
{
get { return filePart.Header.Size; }
}
public override DateTime? LastModifiedTime
{
get { return filePart.Header.LastModifiedTime; }
}
public override DateTime? CreatedTime
{
get { return null; }
}
public override DateTime? LastAccessedTime
{
get { return null; }
}
public override DateTime? ArchivedTime
{
get { return null; }
}
public override bool IsEncrypted
{
get { return false; }
}
public override bool IsDirectory
{
get { return filePart.Header.EntryType == EntryType.Directory; }
}
public override bool IsSplit
{
get { return false; }
}
internal override IEnumerable<FilePart> Parts
{
get { return filePart.AsEnumerable<FilePart>(); }
}
internal static IEnumerable<TarEntry> GetEntries(StreamingMode mode, Stream stream,
CompressionType compressionType)
{
foreach (TarHeader h in TarHeaderFactory.ReadHeader(mode, stream))
{
if (h != null)
{
if (mode == StreamingMode.Seekable)
{
yield return new TarEntry(new TarFilePart(h, stream), compressionType);
}
else
{
yield return new TarEntry(new TarFilePart(h, null), compressionType);
}
}
}
}
internal override void Close()
{
}
}
}

View File

@@ -1,39 +0,0 @@
using System.IO;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
namespace SharpCompress.Common.Tar
{
internal class TarFilePart : FilePart
{
private Stream seekableStream;
internal TarFilePart(TarHeader header, Stream seekableStream)
{
this.seekableStream = seekableStream;
this.Header = header;
}
internal TarHeader Header { get; private set; }
internal override string FilePartName
{
get { return Header.Name; }
}
internal override Stream GetCompressedStream()
{
if (seekableStream != null)
{
seekableStream.Position = Header.DataStartPosition.Value;
return new ReadOnlySubStream(seekableStream, Header.Size);
}
return Header.PackedStream;
}
internal override Stream GetRawStream()
{
return null;
}
}
}

View File

@@ -1,99 +0,0 @@
using System.IO;
namespace SharpCompress.Common.Tar
{
internal class TarReadOnlySubStream : Stream
{
private int amountRead;
public TarReadOnlySubStream(Stream stream, long bytesToRead)
{
this.Stream = stream;
this.BytesLeftToRead = bytesToRead;
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
int skipBytes = this.amountRead%512;
if (skipBytes == 0)
{
return;
}
skipBytes = 512 - skipBytes;
if (skipBytes == 0)
{
return;
}
var buffer = new byte[skipBytes];
this.Stream.ReadFully(buffer);
}
}
private long BytesLeftToRead { get; set; }
public Stream Stream { get; private set; }
public override bool CanRead
{
get { return true; }
}
public override bool CanSeek
{
get { return false; }
}
public override bool CanWrite
{
get { return false; }
}
public override void Flush()
{
throw new System.NotImplementedException();
}
public override long Length
{
get { throw new System.NotImplementedException(); }
}
public override long Position
{
get { throw new System.NotImplementedException(); }
set { throw new System.NotImplementedException(); }
}
public override int Read(byte[] buffer, int offset, int count)
{
if (this.BytesLeftToRead < count)
{
count = (int) this.BytesLeftToRead;
}
int read = this.Stream.Read(buffer, offset, count);
if (read > 0)
{
this.BytesLeftToRead -= read;
this.amountRead += read;
}
return read;
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new System.NotImplementedException();
}
public override void SetLength(long value)
{
throw new System.NotImplementedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new System.NotImplementedException();
}
}
}

View File

@@ -1,19 +0,0 @@
using System.IO;
namespace SharpCompress.Common.Tar
{
public class TarVolume : GenericVolume
{
public TarVolume(Stream stream, Options options)
: base(stream, options)
{
}
#if !PORTABLE
public TarVolume(FileInfo fileInfo, Options options)
: base(fileInfo, options)
{
}
#endif
}
}

View File

@@ -1,49 +0,0 @@
using System.IO;
using SharpCompress.IO;
namespace SharpCompress.Common
{
public abstract class Volume : IVolume
{
private readonly Stream actualStream;
internal Volume(Stream stream, Options options)
{
actualStream = stream;
Options = options;
}
internal Stream Stream
{
get { return new NonDisposingStream(actualStream); }
}
internal Options Options { get; private set; }
/// <summary>
/// RarArchive is the first volume of a multi-part archive.
/// Only Rar 3.0 format and higher
/// </summary>
public abstract bool IsFirstVolume { get; }
/// <summary>
/// RarArchive is part of a multi-part archive.
/// </summary>
public abstract bool IsMultiVolume { get; }
#if !PORTABLE
public abstract FileInfo VolumeFile { get; }
#endif
private bool disposed;
public void Dispose()
{
if (!Options.HasFlag(Options.KeepStreamsOpen) && !disposed)
{
actualStream.Dispose();
disposed = true;
}
}
}
}

View File

@@ -1,81 +0,0 @@
using System.IO;
namespace SharpCompress.Common.Zip.Headers
{
internal class DirectoryEntryHeader : ZipFileEntry
{
public DirectoryEntryHeader()
: base(ZipHeaderType.DirectoryEntry)
{
}
internal override void Read(BinaryReader reader)
{
Version = reader.ReadUInt16();
VersionNeededToExtract = reader.ReadUInt16();
Flags = (HeaderFlags) reader.ReadUInt16();
CompressionMethod = (ZipCompressionMethod) reader.ReadUInt16();
LastModifiedTime = reader.ReadUInt16();
LastModifiedDate = reader.ReadUInt16();
Crc = reader.ReadUInt32();
CompressedSize = reader.ReadUInt32();
UncompressedSize = reader.ReadUInt32();
ushort nameLength = reader.ReadUInt16();
ushort extraLength = reader.ReadUInt16();
ushort commentLength = reader.ReadUInt16();
DiskNumberStart = reader.ReadUInt16();
InternalFileAttributes = reader.ReadUInt16();
ExternalFileAttributes = reader.ReadUInt32();
RelativeOffsetOfEntryHeader = reader.ReadUInt32();
byte[] name = reader.ReadBytes(nameLength);
Name = DecodeString(name);
byte[] extra = reader.ReadBytes(extraLength);
byte[] comment = reader.ReadBytes(commentLength);
Comment = DecodeString(comment);
LoadExtra(extra);
}
internal override void Write(BinaryWriter writer)
{
writer.Write(Version);
writer.Write(VersionNeededToExtract);
writer.Write((ushort) Flags);
writer.Write((ushort) CompressionMethod);
writer.Write(LastModifiedTime);
writer.Write(LastModifiedDate);
writer.Write(Crc);
writer.Write(CompressedSize);
writer.Write(UncompressedSize);
byte[] nameBytes = EncodeString(Name);
writer.Write((ushort) nameBytes.Length);
//writer.Write((ushort)Extra.Length);
writer.Write((ushort) 0);
writer.Write((ushort) Comment.Length);
writer.Write(DiskNumberStart);
writer.Write(InternalFileAttributes);
writer.Write(ExternalFileAttributes);
writer.Write(RelativeOffsetOfEntryHeader);
writer.Write(nameBytes);
// writer.Write(Extra);
writer.Write(Comment);
}
internal ushort Version { get; private set; }
public ushort VersionNeededToExtract { get; set; }
public uint RelativeOffsetOfEntryHeader { get; set; }
public uint ExternalFileAttributes { get; set; }
public ushort InternalFileAttributes { get; set; }
public ushort DiskNumberStart { get; set; }
public string Comment { get; private set; }
}
}

View File

@@ -1,66 +0,0 @@
using System.IO;
namespace SharpCompress.Common.Zip.Headers
{
internal enum ExtraDataType : ushort
{
WinZipAes = 0x9901,
}
internal class ExtraData
{
internal ExtraDataType Type { get; set; }
internal ushort Length { get; set; }
internal byte[] DataBytes { get; set; }
}
internal class LocalEntryHeader : ZipFileEntry
{
public LocalEntryHeader()
: base(ZipHeaderType.LocalEntry)
{
}
internal override void Read(BinaryReader reader)
{
Version = reader.ReadUInt16();
Flags = (HeaderFlags) reader.ReadUInt16();
CompressionMethod = (ZipCompressionMethod) reader.ReadUInt16();
LastModifiedTime = reader.ReadUInt16();
LastModifiedDate = reader.ReadUInt16();
Crc = reader.ReadUInt32();
CompressedSize = reader.ReadUInt32();
UncompressedSize = reader.ReadUInt32();
ushort nameLength = reader.ReadUInt16();
ushort extraLength = reader.ReadUInt16();
byte[] name = reader.ReadBytes(nameLength);
byte[] extra = reader.ReadBytes(extraLength);
Name = DecodeString(name);
LoadExtra(extra);
}
internal override void Write(BinaryWriter writer)
{
writer.Write(Version);
writer.Write((ushort) Flags);
writer.Write((ushort) CompressionMethod);
writer.Write(LastModifiedTime);
writer.Write(LastModifiedDate);
writer.Write(Crc);
writer.Write(CompressedSize);
writer.Write(UncompressedSize);
byte[] nameBytes = EncodeString(Name);
writer.Write((ushort) nameBytes.Length);
writer.Write((ushort) 0);
//if (Extra != null)
//{
// writer.Write(Extra);
//}
writer.Write(nameBytes);
}
internal ushort Version { get; private set; }
}
}

View File

@@ -1,91 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace SharpCompress.Common.Zip.Headers
{
internal abstract class ZipFileEntry : ZipHeader
{
protected ZipFileEntry(ZipHeaderType type)
: base(type)
{
Extra = new List<ExtraData>();
}
internal bool IsDirectory
{
get { return Name.EndsWith("/"); }
}
protected string DecodeString(byte[] str)
{
if (FlagUtility.HasFlag(Flags, HeaderFlags.UTF8))
{
return Encoding.UTF8.GetString(str, 0, str.Length);
}
return ArchiveEncoding.Default.GetString(str, 0, str.Length);
}
protected byte[] EncodeString(string str)
{
if (FlagUtility.HasFlag(Flags, HeaderFlags.UTF8))
{
return Encoding.UTF8.GetBytes(str);
}
return ArchiveEncoding.Default.GetBytes(str);
}
internal Stream PackedStream { get; set; }
internal string Name { get; set; }
internal HeaderFlags Flags { get; set; }
internal ZipCompressionMethod CompressionMethod { get; set; }
internal uint CompressedSize { get; set; }
internal long? DataStartPosition { get; set; }
internal uint UncompressedSize { get; set; }
internal List<ExtraData> Extra { get; set; }
internal PkwareTraditionalEncryptionData PkwareTraditionalEncryptionData { get; set; }
#if !PORTABLE
internal WinzipAesEncryptionData WinzipAesEncryptionData { get; set; }
#endif
internal ushort LastModifiedDate { get; set; }
internal ushort LastModifiedTime { get; set; }
internal uint Crc { get; set; }
protected void LoadExtra(byte[] extra)
{
for (int i = 0; i < extra.Length;)
{
ExtraDataType type = (ExtraDataType) BitConverter.ToUInt16(extra, i);
if (!Enum.IsDefined(typeof (ExtraDataType), type))
{
return;
}
ushort length = BitConverter.ToUInt16(extra, i + 2);
byte[] data = new byte[length];
Buffer.BlockCopy(extra, i + 4, data, 0, length);
Extra.Add(new ExtraData
{
Type = type,
Length = length,
DataBytes = data
});
i += length + 4;
}
}
internal ZipFilePart Part { get; set; }
}
}

View File

@@ -1,77 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip
{
internal class SeekableZipHeaderFactory : ZipHeaderFactory
{
private const int MAX_ITERATIONS_FOR_DIRECTORY_HEADER = 1000;
internal SeekableZipHeaderFactory(string password)
: base(StreamingMode.Seekable, password)
{
}
internal IEnumerable<DirectoryEntryHeader> ReadSeekableHeader(Stream stream)
{
long offset = 0;
uint signature;
BinaryReader reader = new BinaryReader(stream);
int iterationCount = 0;
do
{
if ((stream.Length + offset) - 4 < 0)
{
throw new ArchiveException("Failed to locate the Zip Header");
}
stream.Seek(offset - 4, SeekOrigin.End);
signature = reader.ReadUInt32();
offset--;
iterationCount++;
if (iterationCount > MAX_ITERATIONS_FOR_DIRECTORY_HEADER)
{
throw new ArchiveException(
"Could not find Zip file Directory at the end of the file. File may be corrupted.");
}
} while (signature != DIRECTORY_END_HEADER_BYTES);
var entry = new DirectoryEndHeader();
entry.Read(reader);
stream.Seek(entry.DirectoryStartOffsetRelativeToDisk, SeekOrigin.Begin);
DirectoryEntryHeader directoryEntryHeader = null;
long position = stream.Position;
while (true)
{
stream.Position = position;
signature = reader.ReadUInt32();
directoryEntryHeader = ReadHeader(signature, reader) as DirectoryEntryHeader;
position = stream.Position;
if (directoryEntryHeader == null)
{
yield break;
}
//entry could be zero bytes so we need to know that.
directoryEntryHeader.HasData = directoryEntryHeader.CompressedSize != 0;
yield return directoryEntryHeader;
}
}
internal LocalEntryHeader GetLocalHeader(Stream stream, DirectoryEntryHeader directoryEntryHeader)
{
stream.Seek(directoryEntryHeader.RelativeOffsetOfEntryHeader, SeekOrigin.Begin);
BinaryReader reader = new BinaryReader(stream);
uint signature = reader.ReadUInt32();
var localEntryHeader = ReadHeader(signature, reader) as LocalEntryHeader;
if (localEntryHeader == null)
{
throw new InvalidOperationException();
}
return localEntryHeader;
}
}
}

View File

@@ -1,120 +0,0 @@
using System;
using System.Collections.Generic;
using SharpCompress.Common.Zip.Headers;
namespace SharpCompress.Common.Zip
{
public class ZipEntry : Entry
{
private readonly ZipFilePart filePart;
private readonly DateTime? lastModifiedTime;
internal ZipEntry(ZipFilePart filePart)
{
if (filePart != null)
{
this.filePart = filePart;
lastModifiedTime = Utility.DosDateToDateTime(filePart.Header.LastModifiedDate,
filePart.Header.LastModifiedTime);
}
}
public override CompressionType CompressionType
{
get
{
switch (filePart.Header.CompressionMethod)
{
case ZipCompressionMethod.BZip2:
{
return CompressionType.BZip2;
}
case ZipCompressionMethod.Deflate:
{
return CompressionType.Deflate;
}
case ZipCompressionMethod.LZMA:
{
return CompressionType.LZMA;
}
case ZipCompressionMethod.PPMd:
{
return CompressionType.PPMd;
}
case ZipCompressionMethod.None:
{
return CompressionType.None;
}
default:
{
return CompressionType.Unknown;
}
}
}
}
public override uint Crc
{
get { return filePart.Header.Crc; }
}
public override string FilePath
{
get { return filePart.Header.Name; }
}
public override long CompressedSize
{
get { return filePart.Header.CompressedSize; }
}
public override long Size
{
get { return filePart.Header.UncompressedSize; }
}
public override DateTime? LastModifiedTime
{
get { return lastModifiedTime; }
}
public override DateTime? CreatedTime
{
get { return null; }
}
public override DateTime? LastAccessedTime
{
get { return null; }
}
public override DateTime? ArchivedTime
{
get { return null; }
}
public override bool IsEncrypted
{
get { return FlagUtility.HasFlag(filePart.Header.Flags, HeaderFlags.Encrypted); }
}
public override bool IsDirectory
{
get { return filePart.Header.IsDirectory; }
}
public override bool IsSplit
{
get { return false; }
}
internal override IEnumerable<FilePart> Parts
{
get { return filePart.AsEnumerable<FilePart>(); }
}
internal override void Close()
{
}
}
}

View File

@@ -1,170 +0,0 @@
using System;
using System.IO;
using System.Linq;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressor;
using SharpCompress.Compressor.BZip2;
using SharpCompress.Compressor.Deflate;
using SharpCompress.Compressor.LZMA;
using SharpCompress.Compressor.PPMd;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip
{
internal abstract class ZipFilePart : FilePart
{
internal ZipFilePart(ZipFileEntry header, Stream stream)
{
Header = header;
header.Part = this;
this.BaseStream = stream;
}
internal Stream BaseStream { get; private set; }
internal ZipFileEntry Header { get; set; }
internal override string FilePartName
{
get { return Header.Name; }
}
internal override Stream GetCompressedStream()
{
if (!Header.HasData)
{
return Stream.Null;
}
Stream decompressionStream = CreateDecompressionStream(GetCryptoStream(CreateBaseStream()));
if (LeaveStreamOpen)
{
return new NonDisposingStream(decompressionStream);
}
return decompressionStream;
}
internal override Stream GetRawStream()
{
if (!Header.HasData)
{
return Stream.Null;
}
return CreateBaseStream();
}
protected abstract Stream CreateBaseStream();
protected bool LeaveStreamOpen
{
get { return FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor); }
}
protected Stream CreateDecompressionStream(Stream stream)
{
switch (Header.CompressionMethod)
{
case ZipCompressionMethod.None:
{
return stream;
}
case ZipCompressionMethod.Deflate:
{
return new DeflateStream(stream, CompressionMode.Decompress);
}
case ZipCompressionMethod.BZip2:
{
return new BZip2Stream(stream, CompressionMode.Decompress);
}
case ZipCompressionMethod.LZMA:
{
if (FlagUtility.HasFlag(Header.Flags, HeaderFlags.Encrypted))
{
throw new NotSupportedException("LZMA with pkware encryption.");
}
var reader = new BinaryReader(stream);
reader.ReadUInt16(); //LZMA version
var props = new byte[reader.ReadUInt16()];
reader.Read(props, 0, props.Length);
return new LzmaStream(props, stream,
Header.CompressedSize > 0 ? Header.CompressedSize - 4 - props.Length : -1,
FlagUtility.HasFlag(Header.Flags, HeaderFlags.Bit1)
? -1
: (long) Header.UncompressedSize);
}
case ZipCompressionMethod.PPMd:
{
var props = new byte[2];
stream.Read(props, 0, props.Length);
return new PpmdStream(new PpmdProperties(props), stream, false);
}
case ZipCompressionMethod.WinzipAes:
{
ExtraData data = Header.Extra.Where(x => x.Type == ExtraDataType.WinZipAes).SingleOrDefault();
if (data == null)
{
throw new InvalidFormatException("No Winzip AES extra data found.");
}
if (data.Length != 7)
{
throw new InvalidFormatException("Winzip data length is not 7.");
}
ushort method = BitConverter.ToUInt16(data.DataBytes, 0);
if (method != 0x01 && method != 0x02)
{
throw new InvalidFormatException("Unexpected vendor version number for WinZip AES metadata");
}
ushort vendorId = BitConverter.ToUInt16(data.DataBytes, 2);
if (vendorId != 0x4541)
{
throw new InvalidFormatException("Unexpected vendor ID for WinZip AES metadata");
}
Header.CompressionMethod = (ZipCompressionMethod) BitConverter.ToUInt16(data.DataBytes, 5);
return CreateDecompressionStream(stream);
}
default:
{
throw new NotSupportedException("CompressionMethod: " + Header.CompressionMethod);
}
}
}
protected Stream GetCryptoStream(Stream plainStream)
{
if ((Header.CompressedSize == 0)
#if !PORTABLE
&& ((Header.PkwareTraditionalEncryptionData != null)
|| (Header.WinzipAesEncryptionData != null)))
#else
&& (Header.PkwareTraditionalEncryptionData != null))
#endif
{
throw new NotSupportedException("Cannot encrypt file with unknown size at start.");
}
if ((Header.CompressedSize == 0)
&& FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor))
{
plainStream = new NonDisposingStream(plainStream); //make sure AES doesn't close
}
else
{
plainStream = new ReadOnlySubStream(plainStream, Header.CompressedSize); //make sure AES doesn't close
}
if (Header.PkwareTraditionalEncryptionData != null)
{
return new PkwareTraditionalCryptoStream(plainStream, Header.PkwareTraditionalEncryptionData,
CryptoMode.Decrypt);
}
#if !PORTABLE
if (Header.WinzipAesEncryptionData != null)
{
//only read 10 less because the last ten are auth bytes
return new WinzipAesCryptoStream(plainStream, Header.WinzipAesEncryptionData,
Header.CompressedSize - 10, CryptoMode.Decrypt);
}
#endif
return plainStream;
}
}
}

View File

@@ -1,21 +0,0 @@
using System.IO;
namespace SharpCompress.Common.Zip
{
public class ZipVolume : GenericVolume
{
public ZipVolume(Stream stream, Options options)
: base(stream, options)
{
}
#if !PORTABLE
public ZipVolume(FileInfo fileInfo, Options options)
: base(fileInfo, options)
{
}
#endif
public string Comment { get; internal set; }
}
}

View File

@@ -1,101 +0,0 @@
/*
* Copyright 2001,2004-2005 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* This package is based on the work done by Keiron Liddle, Aftex Software
* <keiron@aftexsw.com> to whom the Ant project is very grateful for his
* great code.
*/
namespace SharpCompress.Compressor.BZip2
{
/**
* Base class for both the compress and decompress classes.
* Holds common arrays, and static data.
*
* @author <a href="mailto:keiron@aftexsw.com">Keiron Liddle</a>
*/
internal class BZip2Constants
{
public const int baseBlockSize = 100000;
public const int MAX_ALPHA_SIZE = 258;
public const int MAX_CODE_LEN = 23;
public const int RUNA = 0;
public const int RUNB = 1;
public const int N_GROUPS = 6;
public const int G_SIZE = 50;
public const int N_ITERS = 4;
public const int MAX_SELECTORS = (2 + (900000/G_SIZE));
public const int NUM_OVERSHOOT_BYTES = 20;
public static int[] rNums =
{
619, 720, 127, 481, 931, 816, 813, 233, 566, 247,
985, 724, 205, 454, 863, 491, 741, 242, 949, 214,
733, 859, 335, 708, 621, 574, 73, 654, 730, 472,
419, 436, 278, 496, 867, 210, 399, 680, 480, 51,
878, 465, 811, 169, 869, 675, 611, 697, 867, 561,
862, 687, 507, 283, 482, 129, 807, 591, 733, 623,
150, 238, 59, 379, 684, 877, 625, 169, 643, 105,
170, 607, 520, 932, 727, 476, 693, 425, 174, 647,
73, 122, 335, 530, 442, 853, 695, 249, 445, 515,
909, 545, 703, 919, 874, 474, 882, 500, 594, 612,
641, 801, 220, 162, 819, 984, 589, 513, 495, 799,
161, 604, 958, 533, 221, 400, 386, 867, 600, 782,
382, 596, 414, 171, 516, 375, 682, 485, 911, 276,
98, 553, 163, 354, 666, 933, 424, 341, 533, 870,
227, 730, 475, 186, 263, 647, 537, 686, 600, 224,
469, 68, 770, 919, 190, 373, 294, 822, 808, 206,
184, 943, 795, 384, 383, 461, 404, 758, 839, 887,
715, 67, 618, 276, 204, 918, 873, 777, 604, 560,
951, 160, 578, 722, 79, 804, 96, 409, 713, 940,
652, 934, 970, 447, 318, 353, 859, 672, 112, 785,
645, 863, 803, 350, 139, 93, 354, 99, 820, 908,
609, 772, 154, 274, 580, 184, 79, 626, 630, 742,
653, 282, 762, 623, 680, 81, 927, 626, 789, 125,
411, 521, 938, 300, 821, 78, 343, 175, 128, 250,
170, 774, 972, 275, 999, 639, 495, 78, 352, 126,
857, 956, 358, 619, 580, 124, 737, 594, 701, 612,
669, 112, 134, 694, 363, 992, 809, 743, 168, 974,
944, 375, 748, 52, 600, 747, 642, 182, 862, 81,
344, 805, 988, 739, 511, 655, 814, 334, 249, 515,
897, 955, 664, 981, 649, 113, 974, 459, 893, 228,
433, 837, 553, 268, 926, 240, 102, 654, 459, 51,
686, 754, 806, 760, 493, 403, 415, 394, 687, 700,
946, 670, 656, 610, 738, 392, 760, 799, 887, 653,
978, 321, 576, 617, 626, 502, 894, 679, 243, 440,
680, 879, 194, 572, 640, 724, 926, 56, 204, 700,
707, 151, 457, 449, 797, 195, 791, 558, 945, 679,
297, 59, 87, 824, 713, 663, 412, 693, 342, 606,
134, 108, 571, 364, 631, 212, 174, 643, 304, 329,
343, 97, 430, 751, 497, 314, 983, 374, 822, 928,
140, 206, 73, 263, 980, 736, 876, 478, 430, 305,
170, 514, 364, 692, 829, 82, 855, 953, 676, 246,
369, 970, 294, 750, 807, 827, 150, 790, 288, 923,
804, 378, 215, 828, 592, 281, 565, 555, 710, 82,
896, 831, 547, 261, 524, 462, 293, 465, 502, 56,
661, 821, 976, 991, 658, 869, 905, 758, 745, 193,
768, 550, 608, 933, 378, 286, 215, 979, 792, 961,
61, 688, 793, 644, 986, 403, 106, 366, 905, 644,
372, 567, 466, 434, 645, 210, 389, 550, 919, 135,
780, 773, 635, 389, 707, 100, 626, 958, 165, 504,
920, 176, 193, 713, 857, 265, 203, 50, 668, 108,
645, 990, 626, 197, 510, 357, 358, 850, 858, 364,
936, 638
};
}
}

View File

@@ -1,203 +0,0 @@
/*
* Copyright 2001,2004-2005 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* This package is based on the work done by Keiron Liddle), Aftex Software
* <keiron@aftexsw.com> to whom the Ant project is very grateful for his
* great code.
*/
namespace SharpCompress.Compressor.BZip2
{
/**
* A simple class the hold and calculate the CRC for sanity checking
* of the data.
*
* @author <a href="mailto:keiron@aftexsw.com">Keiron Liddle</a>
*/
internal class CRC
{
public static int[] crc32Table =
{
unchecked((int) 0x00000000), unchecked((int) 0x04c11db7), unchecked((int) 0x09823b6e),
unchecked((int) 0x0d4326d9),
unchecked((int) 0x130476dc), unchecked((int) 0x17c56b6b), unchecked((int) 0x1a864db2),
unchecked((int) 0x1e475005),
unchecked((int) 0x2608edb8), unchecked((int) 0x22c9f00f), unchecked((int) 0x2f8ad6d6),
unchecked((int) 0x2b4bcb61),
unchecked((int) 0x350c9b64), unchecked((int) 0x31cd86d3), unchecked((int) 0x3c8ea00a),
unchecked((int) 0x384fbdbd),
unchecked((int) 0x4c11db70), unchecked((int) 0x48d0c6c7), unchecked((int) 0x4593e01e),
unchecked((int) 0x4152fda9),
unchecked((int) 0x5f15adac), unchecked((int) 0x5bd4b01b), unchecked((int) 0x569796c2),
unchecked((int) 0x52568b75),
unchecked((int) 0x6a1936c8), unchecked((int) 0x6ed82b7f), unchecked((int) 0x639b0da6),
unchecked((int) 0x675a1011),
unchecked((int) 0x791d4014), unchecked((int) 0x7ddc5da3), unchecked((int) 0x709f7b7a),
unchecked((int) 0x745e66cd),
unchecked((int) 0x9823b6e0), unchecked((int) 0x9ce2ab57), unchecked((int) 0x91a18d8e),
unchecked((int) 0x95609039),
unchecked((int) 0x8b27c03c), unchecked((int) 0x8fe6dd8b), unchecked((int) 0x82a5fb52),
unchecked((int) 0x8664e6e5),
unchecked((int) 0xbe2b5b58), unchecked((int) 0xbaea46ef), unchecked((int) 0xb7a96036),
unchecked((int) 0xb3687d81),
unchecked((int) 0xad2f2d84), unchecked((int) 0xa9ee3033), unchecked((int) 0xa4ad16ea),
unchecked((int) 0xa06c0b5d),
unchecked((int) 0xd4326d90), unchecked((int) 0xd0f37027), unchecked((int) 0xddb056fe),
unchecked((int) 0xd9714b49),
unchecked((int) 0xc7361b4c), unchecked((int) 0xc3f706fb), unchecked((int) 0xceb42022),
unchecked((int) 0xca753d95),
unchecked((int) 0xf23a8028), unchecked((int) 0xf6fb9d9f), unchecked((int) 0xfbb8bb46),
unchecked((int) 0xff79a6f1),
unchecked((int) 0xe13ef6f4), unchecked((int) 0xe5ffeb43), unchecked((int) 0xe8bccd9a),
unchecked((int) 0xec7dd02d),
unchecked((int) 0x34867077), unchecked((int) 0x30476dc0), unchecked((int) 0x3d044b19),
unchecked((int) 0x39c556ae),
unchecked((int) 0x278206ab), unchecked((int) 0x23431b1c), unchecked((int) 0x2e003dc5),
unchecked((int) 0x2ac12072),
unchecked((int) 0x128e9dcf), unchecked((int) 0x164f8078), unchecked((int) 0x1b0ca6a1),
unchecked((int) 0x1fcdbb16),
unchecked((int) 0x018aeb13), unchecked((int) 0x054bf6a4), unchecked((int) 0x0808d07d),
unchecked((int) 0x0cc9cdca),
unchecked((int) 0x7897ab07), unchecked((int) 0x7c56b6b0), unchecked((int) 0x71159069),
unchecked((int) 0x75d48dde),
unchecked((int) 0x6b93dddb), unchecked((int) 0x6f52c06c), unchecked((int) 0x6211e6b5),
unchecked((int) 0x66d0fb02),
unchecked((int) 0x5e9f46bf), unchecked((int) 0x5a5e5b08), unchecked((int) 0x571d7dd1),
unchecked((int) 0x53dc6066),
unchecked((int) 0x4d9b3063), unchecked((int) 0x495a2dd4), unchecked((int) 0x44190b0d),
unchecked((int) 0x40d816ba),
unchecked((int) 0xaca5c697), unchecked((int) 0xa864db20), unchecked((int) 0xa527fdf9),
unchecked((int) 0xa1e6e04e),
unchecked((int) 0xbfa1b04b), unchecked((int) 0xbb60adfc), unchecked((int) 0xb6238b25),
unchecked((int) 0xb2e29692),
unchecked((int) 0x8aad2b2f), unchecked((int) 0x8e6c3698), unchecked((int) 0x832f1041),
unchecked((int) 0x87ee0df6),
unchecked((int) 0x99a95df3), unchecked((int) 0x9d684044), unchecked((int) 0x902b669d),
unchecked((int) 0x94ea7b2a),
unchecked((int) 0xe0b41de7), unchecked((int) 0xe4750050), unchecked((int) 0xe9362689),
unchecked((int) 0xedf73b3e),
unchecked((int) 0xf3b06b3b), unchecked((int) 0xf771768c), unchecked((int) 0xfa325055),
unchecked((int) 0xfef34de2),
unchecked((int) 0xc6bcf05f), unchecked((int) 0xc27dede8), unchecked((int) 0xcf3ecb31),
unchecked((int) 0xcbffd686),
unchecked((int) 0xd5b88683), unchecked((int) 0xd1799b34), unchecked((int) 0xdc3abded),
unchecked((int) 0xd8fba05a),
unchecked((int) 0x690ce0ee), unchecked((int) 0x6dcdfd59), unchecked((int) 0x608edb80),
unchecked((int) 0x644fc637),
unchecked((int) 0x7a089632), unchecked((int) 0x7ec98b85), unchecked((int) 0x738aad5c),
unchecked((int) 0x774bb0eb),
unchecked((int) 0x4f040d56), unchecked((int) 0x4bc510e1), unchecked((int) 0x46863638),
unchecked((int) 0x42472b8f),
unchecked((int) 0x5c007b8a), unchecked((int) 0x58c1663d), unchecked((int) 0x558240e4),
unchecked((int) 0x51435d53),
unchecked((int) 0x251d3b9e), unchecked((int) 0x21dc2629), unchecked((int) 0x2c9f00f0),
unchecked((int) 0x285e1d47),
unchecked((int) 0x36194d42), unchecked((int) 0x32d850f5), unchecked((int) 0x3f9b762c),
unchecked((int) 0x3b5a6b9b),
unchecked((int) 0x0315d626), unchecked((int) 0x07d4cb91), unchecked((int) 0x0a97ed48),
unchecked((int) 0x0e56f0ff),
unchecked((int) 0x1011a0fa), unchecked((int) 0x14d0bd4d), unchecked((int) 0x19939b94),
unchecked((int) 0x1d528623),
unchecked((int) 0xf12f560e), unchecked((int) 0xf5ee4bb9), unchecked((int) 0xf8ad6d60),
unchecked((int) 0xfc6c70d7),
unchecked((int) 0xe22b20d2), unchecked((int) 0xe6ea3d65), unchecked((int) 0xeba91bbc),
unchecked((int) 0xef68060b),
unchecked((int) 0xd727bbb6), unchecked((int) 0xd3e6a601), unchecked((int) 0xdea580d8),
unchecked((int) 0xda649d6f),
unchecked((int) 0xc423cd6a), unchecked((int) 0xc0e2d0dd), unchecked((int) 0xcda1f604),
unchecked((int) 0xc960ebb3),
unchecked((int) 0xbd3e8d7e), unchecked((int) 0xb9ff90c9), unchecked((int) 0xb4bcb610),
unchecked((int) 0xb07daba7),
unchecked((int) 0xae3afba2), unchecked((int) 0xaafbe615), unchecked((int) 0xa7b8c0cc),
unchecked((int) 0xa379dd7b),
unchecked((int) 0x9b3660c6), unchecked((int) 0x9ff77d71), unchecked((int) 0x92b45ba8),
unchecked((int) 0x9675461f),
unchecked((int) 0x8832161a), unchecked((int) 0x8cf30bad), unchecked((int) 0x81b02d74),
unchecked((int) 0x857130c3),
unchecked((int) 0x5d8a9099), unchecked((int) 0x594b8d2e), unchecked((int) 0x5408abf7),
unchecked((int) 0x50c9b640),
unchecked((int) 0x4e8ee645), unchecked((int) 0x4a4ffbf2), unchecked((int) 0x470cdd2b),
unchecked((int) 0x43cdc09c),
unchecked((int) 0x7b827d21), unchecked((int) 0x7f436096), unchecked((int) 0x7200464f),
unchecked((int) 0x76c15bf8),
unchecked((int) 0x68860bfd), unchecked((int) 0x6c47164a), unchecked((int) 0x61043093),
unchecked((int) 0x65c52d24),
unchecked((int) 0x119b4be9), unchecked((int) 0x155a565e), unchecked((int) 0x18197087),
unchecked((int) 0x1cd86d30),
unchecked((int) 0x029f3d35), unchecked((int) 0x065e2082), unchecked((int) 0x0b1d065b),
unchecked((int) 0x0fdc1bec),
unchecked((int) 0x3793a651), unchecked((int) 0x3352bbe6), unchecked((int) 0x3e119d3f),
unchecked((int) 0x3ad08088),
unchecked((int) 0x2497d08d), unchecked((int) 0x2056cd3a), unchecked((int) 0x2d15ebe3),
unchecked((int) 0x29d4f654),
unchecked((int) 0xc5a92679), unchecked((int) 0xc1683bce), unchecked((int) 0xcc2b1d17),
unchecked((int) 0xc8ea00a0),
unchecked((int) 0xd6ad50a5), unchecked((int) 0xd26c4d12), unchecked((int) 0xdf2f6bcb),
unchecked((int) 0xdbee767c),
unchecked((int) 0xe3a1cbc1), unchecked((int) 0xe760d676), unchecked((int) 0xea23f0af),
unchecked((int) 0xeee2ed18),
unchecked((int) 0xf0a5bd1d), unchecked((int) 0xf464a0aa), unchecked((int) 0xf9278673),
unchecked((int) 0xfde69bc4),
unchecked((int) 0x89b8fd09), unchecked((int) 0x8d79e0be), unchecked((int) 0x803ac667),
unchecked((int) 0x84fbdbd0),
unchecked((int) 0x9abc8bd5), unchecked((int) 0x9e7d9662), unchecked((int) 0x933eb0bb),
unchecked((int) 0x97ffad0c),
unchecked((int) 0xafb010b1), unchecked((int) 0xab710d06), unchecked((int) 0xa6322bdf),
unchecked((int) 0xa2f33668),
unchecked((int) 0xbcb4666d), unchecked((int) 0xb8757bda), unchecked((int) 0xb5365d03),
unchecked((int) 0xb1f740b4)
};
public CRC()
{
InitialiseCRC();
}
internal void InitialiseCRC()
{
globalCrc = unchecked((int) 0xffffffff);
}
internal int GetFinalCRC()
{
return ~globalCrc;
}
internal int GetGlobalCRC()
{
return globalCrc;
}
internal void SetGlobalCRC(int newCrc)
{
globalCrc = newCrc;
}
internal void UpdateCRC(int inCh)
{
int temp = (globalCrc >> 24) ^ inCh;
if (temp < 0)
{
temp = 256 + temp;
}
globalCrc = (globalCrc << 8) ^ CRC.crc32Table[temp];
}
internal int globalCrc;
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,464 +0,0 @@
// Tree.cs
// ------------------------------------------------------------------
//
// Copyright (c) 2009 Dino Chiesa and Microsoft Corporation.
// All rights reserved.
//
// This code module is part of DotNetZip, a zipfile class library.
//
// ------------------------------------------------------------------
//
// This code is licensed under the Microsoft Public License.
// See the file License.txt for the license details.
// More info on: http://dotnetzip.codeplex.com
//
// ------------------------------------------------------------------
//
// last saved (in emacs):
// Time-stamp: <2009-October-28 13:29:50>
//
// ------------------------------------------------------------------
//
// This module defines classes for zlib compression and
// decompression. This code is derived from the jzlib implementation of
// zlib. In keeping with the license for jzlib, the copyright to that
// code is below.
//
// ------------------------------------------------------------------
//
// Copyright (c) 2000,2001,2002,2003 ymnk, JCraft,Inc. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in
// the documentation and/or other materials provided with the distribution.
//
// 3. The names of the authors may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES,
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
// FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JCRAFT,
// INC. OR ANY CONTRIBUTORS TO THIS SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
// OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// -----------------------------------------------------------------------
//
// This program is based on zlib-1.1.3; credit to authors
// Jean-loup Gailly(jloup@gzip.org) and Mark Adler(madler@alumni.caltech.edu)
// and contributors of zlib.
//
// -----------------------------------------------------------------------
using System;
namespace SharpCompress.Compressor.Deflate
{
internal sealed partial class DeflateManager
{
#region Nested type: Tree
private sealed class Tree
{
internal const int Buf_size = 8*2;
private static readonly int HEAP_SIZE = (2*InternalConstants.L_CODES + 1);
internal static readonly sbyte[] bl_order = new sbyte[]
{
16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2,
14,
1, 15
};
// The lengths of the bit length codes are sent in order of decreasing
// probability, to avoid transmitting the lengths for unused bit
// length codes.
// see definition of array dist_code below
//internal const int DIST_CODE_LEN = 512;
private static readonly sbyte[] _dist_code = new sbyte[]
{
0, 1, 2, 3, 4, 4, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7,
8, 8, 8, 8, 8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 9, 9,
10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
10, 10,
11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
11, 11,
12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12,
12, 12,
12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12,
12, 12,
13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
13, 13,
13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
13, 13,
14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
14, 14,
14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
14, 14,
14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
14, 14,
14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
14, 14,
15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
15, 15,
15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
15, 15,
15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
15, 15,
15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
15, 15,
0, 0, 16, 17, 18, 18, 19, 19, 20, 20, 20, 20, 21, 21,
21, 21,
22, 22, 22, 22, 22, 22, 22, 22, 23, 23, 23, 23, 23, 23,
23, 23,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24,
25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25,
25, 25,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
26, 26,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
26, 26,
27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27,
27, 27,
27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27,
27, 27,
28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
28, 28,
28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
28, 28,
28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
28, 28,
28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
28, 28,
29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
29, 29,
29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
29, 29,
29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
29, 29,
29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
29, 29
};
internal static readonly sbyte[] LengthCode = new sbyte[]
{
0, 1, 2, 3, 4, 5, 6, 7, 8, 8, 9, 9, 10, 10, 11, 11,
12, 12, 12, 12, 13, 13, 13, 13, 14, 14, 14, 14, 15, 15
, 15, 15,
16, 16, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 17, 17
, 17, 17,
18, 18, 18, 18, 18, 18, 18, 18, 19, 19, 19, 19, 19, 19
, 19, 19,
20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20
, 20, 20,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21
, 21, 21,
22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22
, 22, 22,
23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23
, 23, 23,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24
, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24
, 24, 24,
25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25
, 25, 25,
25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25
, 25, 25,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26
, 26, 26,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26
, 26, 26,
27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27
, 27, 27,
27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27
, 27, 28
};
internal static readonly int[] LengthBase = new[]
{
0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 12, 14, 16, 20, 24, 28,
32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 0
};
internal static readonly int[] DistanceBase = new[]
{
0, 1, 2, 3, 4, 6, 8, 12, 16, 24, 32, 48, 64, 96, 128,
192,
256, 384, 512, 768, 1024, 1536, 2048, 3072, 4096, 6144
, 8192, 12288, 16384, 24576
};
internal short[] dyn_tree; // the dynamic tree
internal int max_code; // largest code with non zero frequency
internal StaticTree staticTree; // the corresponding static tree
/// <summary>
/// Map from a distance to a distance code.
/// </summary>
/// <remarks>
/// No side effects. _dist_code[256] and _dist_code[257] are never used.
/// </remarks>
internal static int DistanceCode(int dist)
{
return (dist < 256)
? _dist_code[dist]
: _dist_code[256 + SharedUtils.URShift(dist, 7)];
}
// Compute the optimal bit lengths for a tree and update the total bit length
// for the current block.
// IN assertion: the fields freq and dad are set, heap[heap_max] and
// above are the tree nodes sorted by increasing frequency.
// OUT assertions: the field len is set to the optimal bit length, the
// array bl_count contains the frequencies for each bit length.
// The length opt_len is updated; static_len is also updated if stree is
// not null.
internal void gen_bitlen(DeflateManager s)
{
short[] tree = dyn_tree;
short[] stree = staticTree.treeCodes;
int[] extra = staticTree.extraBits;
int base_Renamed = staticTree.extraBase;
int max_length = staticTree.maxLength;
int h; // heap index
int n, m; // iterate over the tree elements
int bits; // bit length
int xbits; // extra bits
short f; // frequency
int overflow = 0; // number of elements with bit length too large
for (bits = 0; bits <= InternalConstants.MAX_BITS; bits++)
s.bl_count[bits] = 0;
// In a first pass, compute the optimal bit lengths (which may
// overflow in the case of the bit length tree).
tree[s.heap[s.heap_max]*2 + 1] = 0; // root of the heap
for (h = s.heap_max + 1; h < HEAP_SIZE; h++)
{
n = s.heap[h];
bits = tree[tree[n*2 + 1]*2 + 1] + 1;
if (bits > max_length)
{
bits = max_length;
overflow++;
}
tree[n*2 + 1] = (short) bits;
// We overwrite tree[n*2+1] which is no longer needed
if (n > max_code)
continue; // not a leaf node
s.bl_count[bits]++;
xbits = 0;
if (n >= base_Renamed)
xbits = extra[n - base_Renamed];
f = tree[n*2];
s.opt_len += f*(bits + xbits);
if (stree != null)
s.static_len += f*(stree[n*2 + 1] + xbits);
}
if (overflow == 0)
return;
// This happens for example on obj2 and pic of the Calgary corpus
// Find the first bit length which could increase:
do
{
bits = max_length - 1;
while (s.bl_count[bits] == 0)
bits--;
s.bl_count[bits]--; // move one leaf down the tree
s.bl_count[bits + 1] = (short) (s.bl_count[bits + 1] + 2); // move one overflow item as its brother
s.bl_count[max_length]--;
// The brother of the overflow item also moves one step up,
// but this does not affect bl_count[max_length]
overflow -= 2;
} while (overflow > 0);
for (bits = max_length; bits != 0; bits--)
{
n = s.bl_count[bits];
while (n != 0)
{
m = s.heap[--h];
if (m > max_code)
continue;
if (tree[m*2 + 1] != bits)
{
s.opt_len = (int) (s.opt_len + (bits - (long) tree[m*2 + 1])*tree[m*2]);
tree[m*2 + 1] = (short) bits;
}
n--;
}
}
}
// Construct one Huffman tree and assigns the code bit strings and lengths.
// Update the total bit length for the current block.
// IN assertion: the field freq is set for all tree elements.
// OUT assertions: the fields len and code are set to the optimal bit length
// and corresponding code. The length opt_len is updated; static_len is
// also updated if stree is not null. The field max_code is set.
internal void build_tree(DeflateManager s)
{
short[] tree = dyn_tree;
short[] stree = staticTree.treeCodes;
int elems = staticTree.elems;
int n, m; // iterate over heap elements
int max_code = -1; // largest code with non zero frequency
int node; // new node being created
// Construct the initial heap, with least frequent element in
// heap[1]. The sons of heap[n] are heap[2*n] and heap[2*n+1].
// heap[0] is not used.
s.heap_len = 0;
s.heap_max = HEAP_SIZE;
for (n = 0; n < elems; n++)
{
if (tree[n*2] != 0)
{
s.heap[++s.heap_len] = max_code = n;
s.depth[n] = 0;
}
else
{
tree[n*2 + 1] = 0;
}
}
// The pkzip format requires that at least one distance code exists,
// and that at least one bit should be sent even if there is only one
// possible code. So to avoid special checks later on we force at least
// two codes of non zero frequency.
while (s.heap_len < 2)
{
node = s.heap[++s.heap_len] = (max_code < 2 ? ++max_code : 0);
tree[node*2] = 1;
s.depth[node] = 0;
s.opt_len--;
if (stree != null)
s.static_len -= stree[node*2 + 1];
// node is 0 or 1 so it does not have extra bits
}
this.max_code = max_code;
// The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
// establish sub-heaps of increasing lengths:
for (n = s.heap_len/2; n >= 1; n--)
s.pqdownheap(tree, n);
// Construct the Huffman tree by repeatedly combining the least two
// frequent nodes.
node = elems; // next internal node of the tree
do
{
// n = node of least frequency
n = s.heap[1];
s.heap[1] = s.heap[s.heap_len--];
s.pqdownheap(tree, 1);
m = s.heap[1]; // m = node of next least frequency
s.heap[--s.heap_max] = n; // keep the nodes sorted by frequency
s.heap[--s.heap_max] = m;
// Create a new node father of n and m
tree[node*2] = unchecked((short) (tree[n*2] + tree[m*2]));
s.depth[node] = (sbyte) (Math.Max((byte) s.depth[n], (byte) s.depth[m]) + 1);
tree[n*2 + 1] = tree[m*2 + 1] = (short) node;
// and insert the new node in the heap
s.heap[1] = node++;
s.pqdownheap(tree, 1);
} while (s.heap_len >= 2);
s.heap[--s.heap_max] = s.heap[1];
// At this point, the fields freq and dad are set. We can now
// generate the bit lengths.
gen_bitlen(s);
// The field len is now set, we can generate the bit codes
gen_codes(tree, max_code, s.bl_count);
}
// Generate the codes for a given tree and bit counts (which need not be
// optimal).
// IN assertion: the array bl_count contains the bit length statistics for
// the given tree and the field len is set for all tree elements.
// OUT assertion: the field code is set for all tree elements of non
// zero code length.
internal static void gen_codes(short[] tree, int max_code, short[] bl_count)
{
var next_code = new short[InternalConstants.MAX_BITS + 1]; // next code value for each bit length
short code = 0; // running code value
int bits; // bit index
int n; // code index
// The distribution counts are first used to generate the code values
// without bit reversal.
for (bits = 1; bits <= InternalConstants.MAX_BITS; bits++)
unchecked
{
next_code[bits] = code = (short) ((code + bl_count[bits - 1]) << 1);
}
// Check that the bit counts in bl_count are consistent. The last code
// must be all ones.
//Assert (code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1,
// "inconsistent bit counts");
//Tracev((stderr,"\ngen_codes: max_code %d ", max_code));
for (n = 0; n <= max_code; n++)
{
int len = tree[n*2 + 1];
if (len == 0)
continue;
// Now reverse the bits
tree[n*2] = unchecked((short) (bi_reverse(next_code[len]++, len)));
}
}
// Reverse the first len bits of a code, using straightforward code (a faster
// method would use a table)
// IN assertion: 1 <= len <= 15
internal static int bi_reverse(int code, int len)
{
int res = 0;
do
{
res |= code & 1;
code >>= 1; //SharedUtils.URShift(code, 1);
res <<= 1;
} while (--len > 0);
return res >> 1;
}
}
#endregion
}
}

View File

@@ -1,207 +0,0 @@
using System;
using System.IO;
namespace SharpCompress.Compressor.Filters
{
internal class BCJ2Filter : Stream
{
private readonly Stream baseStream;
private readonly byte[] input = new byte[4096];
private int inputOffset = 0;
private int inputCount = 0;
private bool endReached = false;
private long position = 0;
private byte[] output = new byte[4];
private int outputOffset = 0;
private int outputCount = 0;
private byte[] control;
private byte[] data1;
private byte[] data2;
private int controlPos = 0;
private int data1Pos = 0;
private int data2Pos = 0;
private ushort[] p = new ushort[256 + 2];
private uint range, code;
private byte prevByte = 0;
private const int kNumTopBits = 24;
private const int kTopValue = 1 << kNumTopBits;
private const int kNumBitModelTotalBits = 11;
private const int kBitModelTotal = 1 << kNumBitModelTotalBits;
private const int kNumMoveBits = 5;
private static bool IsJ(byte b0, byte b1)
{
return (b1 & 0xFE) == 0xE8 || IsJcc(b0, b1);
}
private static bool IsJcc(byte b0, byte b1)
{
return b0 == 0x0F && (b1 & 0xF0) == 0x80;
}
public BCJ2Filter(byte[] control, byte[] data1, byte[] data2, Stream baseStream)
{
this.control = control;
this.data1 = data1;
this.data2 = data2;
this.baseStream = baseStream;
int i;
for (i = 0; i < p.Length; i++)
p[i] = kBitModelTotal >> 1;
code = 0;
range = 0xFFFFFFFF;
for (i = 0; i < 5; i++)
code = (code << 8) | control[controlPos++];
}
public override bool CanRead
{
get { return true; }
}
public override bool CanSeek
{
get { return false; }
}
public override bool CanWrite
{
get { return false; }
}
public override void Flush()
{
throw new NotImplementedException();
}
public override long Length
{
get { return baseStream.Length + data1.Length + data2.Length; }
}
public override long Position
{
get { return position; }
set { throw new NotImplementedException(); }
}
public override int Read(byte[] buffer, int offset, int count)
{
int size = 0;
byte b = 0;
while (!endReached && size < count)
{
while (outputOffset < outputCount)
{
b = output[outputOffset++];
buffer[offset++] = b;
size++;
position++;
prevByte = b;
if (size == count)
return size;
}
if (inputOffset == inputCount)
{
inputOffset = 0;
inputCount = baseStream.Read(input, 0, input.Length);
if (inputCount == 0)
{
endReached = true;
break;
}
}
b = input[inputOffset++];
buffer[offset++] = b;
size++;
position++;
if (!IsJ(prevByte, b))
prevByte = b;
else
{
int prob;
if (b == 0xE8)
prob = prevByte;
else if (b == 0xE9)
prob = 256;
else
prob = 257;
uint bound = (range >> kNumBitModelTotalBits) * p[prob];
if (code < bound)
{
range = bound;
p[prob] += (ushort)((kBitModelTotal - p[prob]) >> kNumMoveBits);
if (range < kTopValue)
{
range <<= 8;
code = (code << 8) | control[controlPos++];
}
prevByte = b;
}
else
{
range -= bound;
code -= bound;
p[prob] -= (ushort)(p[prob] >> kNumMoveBits);
if (range < kTopValue)
{
range <<= 8;
code = (code << 8) | control[controlPos++];
}
uint dest;
if (b == 0xE8)
dest =
(uint)
((data1[data1Pos++] << 24) | (data1[data1Pos++] << 16) | (data1[data1Pos++] << 8) |
data1[data1Pos++]);
else
dest =
(uint)
((data2[data2Pos++] << 24) | (data2[data2Pos++] << 16) | (data2[data2Pos++] << 8) |
data2[data2Pos++]);
dest -= (uint)(position + 4);
output[0] = (byte)dest;
output[1] = (byte)(dest >> 8);
output[2] = (byte)(dest >> 16);
output[3] = (byte)(dest >> 24);
outputOffset = 0;
outputCount = 4;
}
}
}
return size;
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotImplementedException();
}
public override void SetLength(long value)
{
throw new NotImplementedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotImplementedException();
}
}
}

View File

@@ -1,142 +0,0 @@
using System;
using System.IO;
namespace SharpCompress.Compressor.Filters
{
internal abstract class Filter : Stream
{
protected bool isEncoder;
protected Stream baseStream;
private byte[] tail;
private byte[] window;
private int transformed = 0;
private int read = 0;
private bool endReached = false;
protected Filter(bool isEncoder, Stream baseStream, int lookahead)
{
this.isEncoder = isEncoder;
this.baseStream = baseStream;
tail = new byte[lookahead - 1];
window = new byte[tail.Length * 2];
}
public override bool CanRead
{
get { return !isEncoder; }
}
public override bool CanSeek
{
get { return false; }
}
public override bool CanWrite
{
get { return isEncoder; }
}
public override void Flush()
{
throw new NotImplementedException();
}
public override long Length
{
get { return baseStream.Length; }
}
public override long Position
{
get { return baseStream.Position; }
set { throw new NotImplementedException(); }
}
public override int Read(byte[] buffer, int offset, int count)
{
int size = 0;
if (transformed > 0)
{
int copySize = transformed;
if (copySize > count)
copySize = count;
Buffer.BlockCopy(tail, 0, buffer, offset, copySize);
transformed -= copySize;
read -= copySize;
offset += copySize;
count -= copySize;
size += copySize;
Buffer.BlockCopy(tail, copySize, tail, 0, read);
}
if (count == 0)
return size;
int inSize = read;
if (inSize > count)
inSize = count;
Buffer.BlockCopy(tail, 0, buffer, offset, inSize);
read -= inSize;
Buffer.BlockCopy(tail, inSize, tail, 0, read);
while (!endReached && inSize < count)
{
int baseRead = baseStream.Read(buffer, offset + inSize, count - inSize);
inSize += baseRead;
if (baseRead == 0)
endReached = true;
}
while (!endReached && read < tail.Length)
{
int baseRead = baseStream.Read(tail, read, tail.Length - read);
read += baseRead;
if (baseRead == 0)
endReached = true;
}
if (inSize > tail.Length)
{
transformed = Transform(buffer, offset, inSize);
offset += transformed;
count -= transformed;
size += transformed;
inSize -= transformed;
transformed = 0;
}
if (count == 0)
return size;
Buffer.BlockCopy(buffer, offset, window, 0, inSize);
Buffer.BlockCopy(tail, 0, window, inSize, read);
if (inSize + read > tail.Length)
transformed = Transform(window, 0, inSize + read);
else
transformed = inSize + read;
Buffer.BlockCopy(window, 0, buffer, offset, inSize);
Buffer.BlockCopy(window, inSize, tail, 0, read);
size += inSize;
transformed -= inSize;
return size;
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotImplementedException();
}
public override void SetLength(long value)
{
throw new NotImplementedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
Transform(buffer, offset, count);
baseStream.Write(buffer, offset, count);
}
protected abstract int Transform(byte[] buffer, int offset, int count);
}
}

View File

@@ -1,231 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Compressor.LZMA
{
internal class Bcj2DecoderStream : DecoderStream2
{
private const int kNumTopBits = 24;
private const uint kTopValue = (1 << kNumTopBits);
private class RangeDecoder
{
internal Stream mStream;
internal uint Range;
internal uint Code;
public RangeDecoder(Stream stream)
{
mStream = stream;
Range = 0xFFFFFFFF;
for (int i = 0; i < 5; i++)
Code = (Code << 8) | ReadByte();
}
public byte ReadByte()
{
int bt = mStream.ReadByte();
if (bt < 0)
throw new EndOfStreamException();
return (byte) bt;
}
public void Dispose()
{
mStream.Dispose();
}
}
private class StatusDecoder
{
private const int numMoveBits = 5;
private const int kNumBitModelTotalBits = 11;
private const uint kBitModelTotal = 1u << kNumBitModelTotalBits;
private uint Prob;
public StatusDecoder()
{
Prob = kBitModelTotal/2;
}
private void UpdateModel(uint symbol)
{
/*
Prob -= (Prob + ((symbol - 1) & ((1 << numMoveBits) - 1))) >> numMoveBits;
Prob += (1 - symbol) << (kNumBitModelTotalBits - numMoveBits);
*/
if (symbol == 0)
Prob += (kBitModelTotal - Prob) >> numMoveBits;
else
Prob -= (Prob) >> numMoveBits;
}
public uint Decode(RangeDecoder decoder)
{
uint newBound = (decoder.Range >> kNumBitModelTotalBits)*Prob;
if (decoder.Code < newBound)
{
decoder.Range = newBound;
Prob += (kBitModelTotal - Prob) >> numMoveBits;
if (decoder.Range < kTopValue)
{
decoder.Code = (decoder.Code << 8) | decoder.ReadByte();
decoder.Range <<= 8;
}
return 0;
}
else
{
decoder.Range -= newBound;
decoder.Code -= newBound;
Prob -= Prob >> numMoveBits;
if (decoder.Range < kTopValue)
{
decoder.Code = (decoder.Code << 8) | decoder.ReadByte();
decoder.Range <<= 8;
}
return 1;
}
}
}
private Stream mMainStream;
private Stream mCallStream;
private Stream mJumpStream;
private RangeDecoder mRangeDecoder;
private StatusDecoder[] mStatusDecoder;
private long mWritten;
private long mLimit;
private IEnumerator<byte> mIter;
private bool mFinished;
public Bcj2DecoderStream(Stream[] streams, byte[] info, long limit)
{
if (info != null && info.Length > 0)
throw new NotSupportedException();
if (streams.Length != 4)
throw new NotSupportedException();
mLimit = limit;
mMainStream = streams[0];
mCallStream = streams[1];
mJumpStream = streams[2];
mRangeDecoder = new RangeDecoder(streams[3]);
mStatusDecoder = new StatusDecoder[256 + 2];
for (int i = 0; i < mStatusDecoder.Length; i++)
mStatusDecoder[i] = new StatusDecoder();
mIter = Run().GetEnumerator();
}
private static bool IsJcc(byte b0, byte b1)
{
return b0 == 0x0F
&& (b1 & 0xF0) == 0x80;
}
private static bool IsJ(byte b0, byte b1)
{
return (b1 & 0xFE) == 0xE8
|| IsJcc(b0, b1);
}
private static int GetIndex(byte b0, byte b1)
{
if (b1 == 0xE8)
return b0;
else if (b1 == 0xE9)
return 256;
else
return 257;
}
public override int Read(byte[] buffer, int offset, int count)
{
if (count == 0 || mFinished)
return 0;
for (int i = 0; i < count; i++)
{
if (!mIter.MoveNext())
{
mFinished = true;
return i;
}
buffer[offset + i] = mIter.Current;
}
return count;
}
public IEnumerable<byte> Run()
{
const uint kBurstSize = (1u << 18);
byte prevByte = 0;
uint processedBytes = 0;
for (;;)
{
byte b = 0;
uint i;
for (i = 0; i < kBurstSize; i++)
{
int tmp = mMainStream.ReadByte();
if (tmp < 0)
yield break;
b = (byte) tmp;
mWritten++;
yield return b;
if (IsJ(prevByte, b))
break;
prevByte = b;
}
processedBytes += i;
if (i == kBurstSize)
continue;
if (mStatusDecoder[GetIndex(prevByte, b)].Decode(mRangeDecoder) == 1)
{
Stream s = (b == 0xE8) ? mCallStream : mJumpStream;
uint src = 0;
for (i = 0; i < 4; i++)
{
int b0 = s.ReadByte();
if (b0 < 0)
throw new EndOfStreamException();
src <<= 8;
src |= (uint) b0;
}
uint dest = src - (uint) (mWritten + 4);
mWritten++;
yield return (byte) dest;
mWritten++;
yield return (byte) (dest >> 8);
mWritten++;
yield return (byte) (dest >> 16);
mWritten++;
yield return (byte) (dest >> 24);
prevByte = (byte) (dest >> 24);
processedBytes += 4;
}
else
{
prevByte = b;
}
}
}
}
}

View File

@@ -1,87 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;
namespace SharpCompress.Compressor.LZMA
{
internal class BitVector
{
private uint[] mBits;
private int mLength;
public BitVector(int length)
{
mLength = length;
mBits = new uint[(length + 31) >> 5];
}
public BitVector(int length, bool initValue)
{
mLength = length;
mBits = new uint[(length + 31) >> 5];
if (initValue)
for (int i = 0; i < mBits.Length; i++)
mBits[i] = ~0u;
}
public BitVector(List<bool> bits)
: this(bits.Count)
{
for (int i = 0; i < bits.Count; i++)
if (bits[i])
SetBit(i);
}
public bool[] ToArray()
{
bool[] bits = new bool[mLength];
for (int i = 0; i < bits.Length; i++)
bits[i] = this[i];
return bits;
}
public int Length
{
get { return mLength; }
}
public bool this[int index]
{
get
{
if (index < 0 || index >= mLength)
throw new ArgumentOutOfRangeException("index");
return (mBits[index >> 5] & (1u << (index & 31))) != 0;
}
}
public void SetBit(int index)
{
if (index < 0 || index >= mLength)
throw new ArgumentOutOfRangeException("index");
mBits[index >> 5] |= 1u << (index & 31);
}
internal bool GetAndSet(int index)
{
if (index < 0 || index >= mLength)
throw new ArgumentOutOfRangeException("index");
uint bits = mBits[index >> 5];
uint mask = 1u << (index & 31);
mBits[index >> 5] |= mask;
return (bits & mask) != 0;
}
public override string ToString()
{
StringBuilder sb = new StringBuilder(mLength);
for (int i = 0; i < mLength; i++)
sb.Append(this[i] ? 'x' : '.');
return sb.ToString();
}
}
}

View File

@@ -1,111 +0,0 @@
using System;
using System.IO;
namespace SharpCompress.Compressor.LZMA
{
internal static class CRC
{
public const uint kInitCRC = 0xFFFFFFFF;
private static uint[] kTable = new uint[4*256];
static CRC()
{
const uint kCrcPoly = 0xEDB88320;
for (uint i = 0; i < 256; i++)
{
uint r = i;
for (int j = 0; j < 8; j++)
r = (r >> 1) ^ (kCrcPoly & ~((r & 1) - 1));
kTable[i] = r;
}
for (uint i = 256; i < kTable.Length; i++)
{
uint r = kTable[i - 256];
kTable[i] = kTable[r & 0xFF] ^ (r >> 8);
}
}
public static uint From(Stream stream, long length)
{
uint crc = kInitCRC;
byte[] buffer = new byte[Math.Min(length, 4 << 10)];
while (length > 0)
{
int delta = stream.Read(buffer, 0, (int) Math.Min(length, buffer.Length));
if (delta == 0)
throw new EndOfStreamException();
crc = Update(crc, buffer, 0, delta);
length -= delta;
}
return Finish(crc);
}
public static uint Finish(uint crc)
{
return ~crc;
}
public static uint Update(uint crc, byte bt)
{
return kTable[(crc & 0xFF) ^ bt] ^ (crc >> 8);
}
public static uint Update(uint crc, uint value)
{
crc ^= value;
return kTable[0x300 + (crc & 0xFF)]
^ kTable[0x200 + ((crc >> 8) & 0xFF)]
^ kTable[0x100 + ((crc >> 16) & 0xFF)]
^ kTable[0x000 + (crc >> 24)];
}
public static uint Update(uint crc, ulong value)
{
return Update(Update(crc, (uint) value), (uint) (value >> 32));
}
public static uint Update(uint crc, long value)
{
return Update(crc, (ulong) value);
}
public static uint Update(uint crc, byte[] buffer, int offset, int length)
{
for (int i = 0; i < length; i++)
crc = Update(crc, buffer[offset + i]);
return crc;
}
#if !SILVERLIGHT && !PORTABLE
public static unsafe uint Update(uint crc, byte* buffer, int length)
{
while (length > 0 && ((int) buffer & 3) != 0)
{
crc = Update(crc, *buffer);
buffer++;
length--;
}
while (length >= 4)
{
crc = Update(crc, *(uint*) buffer);
buffer += 4;
length -= 4;
}
while (length > 0)
{
crc = Update(crc, *buffer);
length--;
}
return crc;
}
#endif
}
}

View File

@@ -1,59 +0,0 @@
namespace SharpCompress.Compressor.LZMA.LZ
{
internal class CRC
{
public static readonly uint[] Table;
static CRC()
{
Table = new uint[256];
const uint kPoly = 0xEDB88320;
for (uint i = 0; i < 256; i++)
{
uint r = i;
for (int j = 0; j < 8; j++)
if ((r & 1) != 0)
r = (r >> 1) ^ kPoly;
else
r >>= 1;
Table[i] = r;
}
}
private uint _value = 0xFFFFFFFF;
public void Init()
{
_value = 0xFFFFFFFF;
}
public void UpdateByte(byte b)
{
_value = Table[(((byte) (_value)) ^ b)] ^ (_value >> 8);
}
public void Update(byte[] data, uint offset, uint size)
{
for (uint i = 0; i < size; i++)
_value = Table[(((byte) (_value)) ^ data[offset + i])] ^ (_value >> 8);
}
public uint GetDigest()
{
return _value ^ 0xFFFFFFFF;
}
private static uint CalculateDigest(byte[] data, uint offset, uint size)
{
CRC crc = new CRC();
// crc.Init();
crc.Update(data, offset, size);
return crc.GetDigest();
}
private static bool VerifyDigest(uint digest, byte[] data, uint offset, uint size)
{
return (CalculateDigest(data, offset, size) == digest);
}
}
}

View File

@@ -1,91 +0,0 @@
using System;
using System.Collections.Generic;
namespace SharpCompress.Compressor.LZMA
{
internal static class Log
{
private static Stack<string> _indent = new Stack<string>();
private static bool _needsIndent = true;
static Log()
{
_indent.Push("");
}
public static void PushIndent(string indent = " ")
{
_indent.Push(_indent.Peek() + indent);
}
public static void PopIndent()
{
if (_indent.Count == 1)
throw new InvalidOperationException();
_indent.Pop();
}
private static void EnsureIndent()
{
if (_needsIndent)
{
_needsIndent = false;
#if !SILVERLIGHT && !PORTABLE
System.Diagnostics.Debug.Write(_indent.Peek());
#endif
}
}
public static void Write(object value)
{
EnsureIndent();
#if !SILVERLIGHT && !PORTABLE
System.Diagnostics.Debug.Write(value);
#endif
}
public static void Write(string text)
{
EnsureIndent();
#if !SILVERLIGHT && !PORTABLE
System.Diagnostics.Debug.Write(text);
#endif
}
public static void Write(string format, params object[] args)
{
EnsureIndent();
#if !SILVERLIGHT && !PORTABLE
System.Diagnostics.Debug.Write(string.Format(format, args));
#endif
}
public static void WriteLine()
{
System.Diagnostics.Debug.WriteLine("");
_needsIndent = true;
}
public static void WriteLine(object value)
{
EnsureIndent();
System.Diagnostics.Debug.WriteLine(value);
_needsIndent = true;
}
public static void WriteLine(string text)
{
EnsureIndent();
System.Diagnostics.Debug.WriteLine(text);
_needsIndent = true;
}
public static void WriteLine(string format, params object[] args)
{
EnsureIndent();
System.Diagnostics.Debug.WriteLine(string.Format(format, args));
_needsIndent = true;
}
}
}

View File

@@ -1,97 +0,0 @@
namespace SharpCompress.Compressor.LZMA
{
internal abstract class Base
{
public const uint kNumRepDistances = 4;
public const uint kNumStates = 12;
// static byte []kLiteralNextStates = {0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 4, 5};
// static byte []kMatchNextStates = {7, 7, 7, 7, 7, 7, 7, 10, 10, 10, 10, 10};
// static byte []kRepNextStates = {8, 8, 8, 8, 8, 8, 8, 11, 11, 11, 11, 11};
// static byte []kShortRepNextStates = {9, 9, 9, 9, 9, 9, 9, 11, 11, 11, 11, 11};
public struct State
{
public uint Index;
public void Init()
{
Index = 0;
}
public void UpdateChar()
{
if (Index < 4) Index = 0;
else if (Index < 10) Index -= 3;
else Index -= 6;
}
public void UpdateMatch()
{
Index = (uint) (Index < 7 ? 7 : 10);
}
public void UpdateRep()
{
Index = (uint) (Index < 7 ? 8 : 11);
}
public void UpdateShortRep()
{
Index = (uint) (Index < 7 ? 9 : 11);
}
public bool IsCharState()
{
return Index < 7;
}
}
public const int kNumPosSlotBits = 6;
public const int kDicLogSizeMin = 0;
// public const int kDicLogSizeMax = 30;
// public const uint kDistTableSizeMax = kDicLogSizeMax * 2;
public const int kNumLenToPosStatesBits = 2; // it's for speed optimization
public const uint kNumLenToPosStates = 1 << kNumLenToPosStatesBits;
public const uint kMatchMinLen = 2;
public static uint GetLenToPosState(uint len)
{
len -= kMatchMinLen;
if (len < kNumLenToPosStates)
return len;
return (uint) (kNumLenToPosStates - 1);
}
public const int kNumAlignBits = 4;
public const uint kAlignTableSize = 1 << kNumAlignBits;
public const uint kAlignMask = (kAlignTableSize - 1);
public const uint kStartPosModelIndex = 4;
public const uint kEndPosModelIndex = 14;
public const uint kNumPosModels = kEndPosModelIndex - kStartPosModelIndex;
public const uint kNumFullDistances = 1 << ((int) kEndPosModelIndex/2);
public const uint kNumLitPosStatesBitsEncodingMax = 4;
public const uint kNumLitContextBitsMax = 8;
public const int kNumPosStatesBitsMax = 4;
public const uint kNumPosStatesMax = (1 << kNumPosStatesBitsMax);
public const int kNumPosStatesBitsEncodingMax = 4;
public const uint kNumPosStatesEncodingMax = (1 << kNumPosStatesBitsEncodingMax);
public const int kNumLowLenBits = 3;
public const int kNumMidLenBits = 3;
public const int kNumHighLenBits = 8;
public const uint kNumLowLenSymbols = 1 << kNumLowLenBits;
public const uint kNumMidLenSymbols = 1 << kNumMidLenBits;
public const uint kNumLenSymbols = kNumLowLenSymbols + kNumMidLenSymbols +
(1 << kNumHighLenBits);
public const uint kMatchMaxLen = kMatchMinLen + kNumLenSymbols - 1;
}
}

View File

@@ -1,418 +0,0 @@
using System;
using SharpCompress.Compressor.LZMA.RangeCoder;
namespace SharpCompress.Compressor.LZMA
{
internal class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Stream
{
private class LenDecoder
{
private BitDecoder m_Choice = new BitDecoder();
private BitDecoder m_Choice2 = new BitDecoder();
private BitTreeDecoder[] m_LowCoder = new BitTreeDecoder[Base.kNumPosStatesMax];
private BitTreeDecoder[] m_MidCoder = new BitTreeDecoder[Base.kNumPosStatesMax];
private BitTreeDecoder m_HighCoder = new BitTreeDecoder(Base.kNumHighLenBits);
private uint m_NumPosStates = 0;
public void Create(uint numPosStates)
{
for (uint posState = m_NumPosStates; posState < numPosStates; posState++)
{
m_LowCoder[posState] = new BitTreeDecoder(Base.kNumLowLenBits);
m_MidCoder[posState] = new BitTreeDecoder(Base.kNumMidLenBits);
}
m_NumPosStates = numPosStates;
}
public void Init()
{
m_Choice.Init();
for (uint posState = 0; posState < m_NumPosStates; posState++)
{
m_LowCoder[posState].Init();
m_MidCoder[posState].Init();
}
m_Choice2.Init();
m_HighCoder.Init();
}
public uint Decode(RangeCoder.Decoder rangeDecoder, uint posState)
{
if (m_Choice.Decode(rangeDecoder) == 0)
return m_LowCoder[posState].Decode(rangeDecoder);
else
{
uint symbol = Base.kNumLowLenSymbols;
if (m_Choice2.Decode(rangeDecoder) == 0)
symbol += m_MidCoder[posState].Decode(rangeDecoder);
else
{
symbol += Base.kNumMidLenSymbols;
symbol += m_HighCoder.Decode(rangeDecoder);
}
return symbol;
}
}
}
private class LiteralDecoder
{
private struct Decoder2
{
private BitDecoder[] m_Decoders;
public void Create()
{
m_Decoders = new BitDecoder[0x300];
}
public void Init()
{
for (int i = 0; i < 0x300; i++) m_Decoders[i].Init();
}
public byte DecodeNormal(RangeCoder.Decoder rangeDecoder)
{
uint symbol = 1;
do
symbol = (symbol << 1) | m_Decoders[symbol].Decode(rangeDecoder); while (symbol < 0x100);
return (byte) symbol;
}
public byte DecodeWithMatchByte(RangeCoder.Decoder rangeDecoder, byte matchByte)
{
uint symbol = 1;
do
{
uint matchBit = (uint) (matchByte >> 7) & 1;
matchByte <<= 1;
uint bit = m_Decoders[((1 + matchBit) << 8) + symbol].Decode(rangeDecoder);
symbol = (symbol << 1) | bit;
if (matchBit != bit)
{
while (symbol < 0x100)
symbol = (symbol << 1) | m_Decoders[symbol].Decode(rangeDecoder);
break;
}
} while (symbol < 0x100);
return (byte) symbol;
}
}
private Decoder2[] m_Coders;
private int m_NumPrevBits;
private int m_NumPosBits;
private uint m_PosMask;
public void Create(int numPosBits, int numPrevBits)
{
if (m_Coders != null && m_NumPrevBits == numPrevBits &&
m_NumPosBits == numPosBits)
return;
m_NumPosBits = numPosBits;
m_PosMask = ((uint) 1 << numPosBits) - 1;
m_NumPrevBits = numPrevBits;
uint numStates = (uint) 1 << (m_NumPrevBits + m_NumPosBits);
m_Coders = new Decoder2[numStates];
for (uint i = 0; i < numStates; i++)
m_Coders[i].Create();
}
public void Init()
{
uint numStates = (uint) 1 << (m_NumPrevBits + m_NumPosBits);
for (uint i = 0; i < numStates; i++)
m_Coders[i].Init();
}
private uint GetState(uint pos, byte prevByte)
{
return ((pos & m_PosMask) << m_NumPrevBits) + (uint) (prevByte >> (8 - m_NumPrevBits));
}
public byte DecodeNormal(RangeCoder.Decoder rangeDecoder, uint pos, byte prevByte)
{
return m_Coders[GetState(pos, prevByte)].DecodeNormal(rangeDecoder);
}
public byte DecodeWithMatchByte(RangeCoder.Decoder rangeDecoder, uint pos, byte prevByte, byte matchByte)
{
return m_Coders[GetState(pos, prevByte)].DecodeWithMatchByte(rangeDecoder, matchByte);
}
};
private LZ.OutWindow m_OutWindow;
private BitDecoder[] m_IsMatchDecoders = new BitDecoder[Base.kNumStates << Base.kNumPosStatesBitsMax];
private BitDecoder[] m_IsRepDecoders = new BitDecoder[Base.kNumStates];
private BitDecoder[] m_IsRepG0Decoders = new BitDecoder[Base.kNumStates];
private BitDecoder[] m_IsRepG1Decoders = new BitDecoder[Base.kNumStates];
private BitDecoder[] m_IsRepG2Decoders = new BitDecoder[Base.kNumStates];
private BitDecoder[] m_IsRep0LongDecoders = new BitDecoder[Base.kNumStates << Base.kNumPosStatesBitsMax];
private BitTreeDecoder[] m_PosSlotDecoder = new BitTreeDecoder[Base.kNumLenToPosStates];
private BitDecoder[] m_PosDecoders = new BitDecoder[Base.kNumFullDistances - Base.kEndPosModelIndex];
private BitTreeDecoder m_PosAlignDecoder = new BitTreeDecoder(Base.kNumAlignBits);
private LenDecoder m_LenDecoder = new LenDecoder();
private LenDecoder m_RepLenDecoder = new LenDecoder();
private LiteralDecoder m_LiteralDecoder = new LiteralDecoder();
private int m_DictionarySize;
private uint m_PosStateMask;
private Base.State state = new Base.State();
private uint rep0, rep1, rep2, rep3;
public Decoder()
{
m_DictionarySize = -1;
for (int i = 0; i < Base.kNumLenToPosStates; i++)
m_PosSlotDecoder[i] = new BitTreeDecoder(Base.kNumPosSlotBits);
}
private void CreateDictionary()
{
if (m_DictionarySize < 0)
throw new InvalidParamException();
m_OutWindow = new LZ.OutWindow();
int blockSize = Math.Max(m_DictionarySize, (1 << 12));
m_OutWindow.Create(blockSize);
}
private void SetLiteralProperties(int lp, int lc)
{
if (lp > 8)
throw new InvalidParamException();
if (lc > 8)
throw new InvalidParamException();
m_LiteralDecoder.Create(lp, lc);
}
private void SetPosBitsProperties(int pb)
{
if (pb > Base.kNumPosStatesBitsMax)
throw new InvalidParamException();
uint numPosStates = (uint) 1 << pb;
m_LenDecoder.Create(numPosStates);
m_RepLenDecoder.Create(numPosStates);
m_PosStateMask = numPosStates - 1;
}
private void Init()
{
uint i;
for (i = 0; i < Base.kNumStates; i++)
{
for (uint j = 0; j <= m_PosStateMask; j++)
{
uint index = (i << Base.kNumPosStatesBitsMax) + j;
m_IsMatchDecoders[index].Init();
m_IsRep0LongDecoders[index].Init();
}
m_IsRepDecoders[i].Init();
m_IsRepG0Decoders[i].Init();
m_IsRepG1Decoders[i].Init();
m_IsRepG2Decoders[i].Init();
}
m_LiteralDecoder.Init();
for (i = 0; i < Base.kNumLenToPosStates; i++)
m_PosSlotDecoder[i].Init();
// m_PosSpecDecoder.Init();
for (i = 0; i < Base.kNumFullDistances - Base.kEndPosModelIndex; i++)
m_PosDecoders[i].Init();
m_LenDecoder.Init();
m_RepLenDecoder.Init();
m_PosAlignDecoder.Init();
state.Init();
rep0 = 0;
rep1 = 0;
rep2 = 0;
rep3 = 0;
}
public void Code(System.IO.Stream inStream, System.IO.Stream outStream,
Int64 inSize, Int64 outSize, ICodeProgress progress)
{
if (m_OutWindow == null)
CreateDictionary();
m_OutWindow.Init(outStream);
if (outSize > 0)
m_OutWindow.SetLimit(outSize);
else
m_OutWindow.SetLimit(Int64.MaxValue - m_OutWindow.Total);
RangeCoder.Decoder rangeDecoder = new RangeCoder.Decoder();
rangeDecoder.Init(inStream);
Code(m_DictionarySize, m_OutWindow, rangeDecoder);
m_OutWindow.ReleaseStream();
rangeDecoder.ReleaseStream();
if (!rangeDecoder.IsFinished || (inSize > 0 && rangeDecoder.Total != inSize))
throw new DataErrorException();
if (m_OutWindow.HasPending)
throw new DataErrorException();
m_OutWindow = null;
}
internal bool Code(int dictionarySize, LZ.OutWindow outWindow, RangeCoder.Decoder rangeDecoder)
{
int dictionarySizeCheck = Math.Max(dictionarySize, 1);
outWindow.CopyPending();
while (outWindow.HasSpace)
{
uint posState = (uint) outWindow.Total & m_PosStateMask;
if (m_IsMatchDecoders[(state.Index << Base.kNumPosStatesBitsMax) + posState].Decode(rangeDecoder) == 0)
{
byte b;
byte prevByte = outWindow.GetByte(0);
if (!state.IsCharState())
b = m_LiteralDecoder.DecodeWithMatchByte(rangeDecoder,
(uint) outWindow.Total, prevByte,
outWindow.GetByte((int) rep0));
else
b = m_LiteralDecoder.DecodeNormal(rangeDecoder, (uint) outWindow.Total, prevByte);
outWindow.PutByte(b);
state.UpdateChar();
}
else
{
uint len;
if (m_IsRepDecoders[state.Index].Decode(rangeDecoder) == 1)
{
if (m_IsRepG0Decoders[state.Index].Decode(rangeDecoder) == 0)
{
if (
m_IsRep0LongDecoders[(state.Index << Base.kNumPosStatesBitsMax) + posState].Decode(
rangeDecoder) == 0)
{
state.UpdateShortRep();
outWindow.PutByte(outWindow.GetByte((int) rep0));
continue;
}
}
else
{
UInt32 distance;
if (m_IsRepG1Decoders[state.Index].Decode(rangeDecoder) == 0)
{
distance = rep1;
}
else
{
if (m_IsRepG2Decoders[state.Index].Decode(rangeDecoder) == 0)
distance = rep2;
else
{
distance = rep3;
rep3 = rep2;
}
rep2 = rep1;
}
rep1 = rep0;
rep0 = distance;
}
len = m_RepLenDecoder.Decode(rangeDecoder, posState) + Base.kMatchMinLen;
state.UpdateRep();
}
else
{
rep3 = rep2;
rep2 = rep1;
rep1 = rep0;
len = Base.kMatchMinLen + m_LenDecoder.Decode(rangeDecoder, posState);
state.UpdateMatch();
uint posSlot = m_PosSlotDecoder[Base.GetLenToPosState(len)].Decode(rangeDecoder);
if (posSlot >= Base.kStartPosModelIndex)
{
int numDirectBits = (int) ((posSlot >> 1) - 1);
rep0 = ((2 | (posSlot & 1)) << numDirectBits);
if (posSlot < Base.kEndPosModelIndex)
rep0 += BitTreeDecoder.ReverseDecode(m_PosDecoders,
rep0 - posSlot - 1, rangeDecoder, numDirectBits);
else
{
rep0 += (rangeDecoder.DecodeDirectBits(
numDirectBits - Base.kNumAlignBits) << Base.kNumAlignBits);
rep0 += m_PosAlignDecoder.ReverseDecode(rangeDecoder);
}
}
else
rep0 = posSlot;
}
if (rep0 >= outWindow.Total || rep0 >= dictionarySizeCheck)
{
if (rep0 == 0xFFFFFFFF)
return true;
throw new DataErrorException();
}
outWindow.CopyBlock((int) rep0, (int) len);
}
}
return false;
}
public void SetDecoderProperties(byte[] properties)
{
if (properties.Length < 1)
throw new InvalidParamException();
int lc = properties[0]%9;
int remainder = properties[0]/9;
int lp = remainder%5;
int pb = remainder/5;
if (pb > Base.kNumPosStatesBitsMax)
throw new InvalidParamException();
SetLiteralProperties(lp, lc);
SetPosBitsProperties(pb);
Init();
if (properties.Length >= 5)
{
m_DictionarySize = 0;
for (int i = 0; i < 4; i++)
m_DictionarySize += properties[1 + i] << (i*8);
}
}
public void Train(System.IO.Stream stream)
{
if (m_OutWindow == null)
CreateDictionary();
m_OutWindow.Train(stream);
}
/*
public override bool CanRead { get { return true; }}
public override bool CanWrite { get { return true; }}
public override bool CanSeek { get { return true; }}
public override long Length { get { return 0; }}
public override long Position
{
get { return 0; }
set { }
}
public override void Flush() { }
public override int Read(byte[] buffer, int offset, int count)
{
return 0;
}
public override void Write(byte[] buffer, int offset, int count)
{
}
public override long Seek(long offset, System.IO.SeekOrigin origin)
{
return 0;
}
public override void SetLength(long value) {}
*/
}
}

Some files were not shown because too many files have changed in this diff Show More