Compare commits

...

439 Commits
0.11.2 ... 0.22

Author SHA1 Message Date
Adam Hathcock
d577fe1ac6 Merge pull request #385 from twirpx/master
Fixed EFS flag handling
2018-07-09 08:48:34 +01:00
Adam Hathcock
9f976aaf78 Merge branch 'master' into master 2018-07-09 08:48:26 +01:00
Adam Hathcock
8a7d7e366f Merge pull request #396 from andreas-eriksson/Rar5IsEncrypted
Correctly set IsEncrypted for entries in Rar5.
2018-07-09 08:48:12 +01:00
Adam Hathcock
540ab1c6fa Merge branch 'master' into master 2018-07-09 08:47:32 +01:00
Adam Hathcock
6792afbdb1 Merge branch 'master' into Rar5IsEncrypted 2018-07-09 08:44:32 +01:00
Adam Hathcock
e5a7185671 Mark for 0.22 2018-07-09 08:42:45 +01:00
Adam Hathcock
cdaf453b2d Update dependencies and tests to .NET Core 2.1 2018-07-09 08:39:37 +01:00
Andreas Eriksson
f9cc80e1de Correctly set IsEncrypted for entries in Rar5. 2018-06-29 15:51:40 +02:00
Adam Hathcock
7beff9e83c Merge pull request #395 from adamhathcock/zip-slip-readers
Zip slip for Readers
2018-06-28 11:56:44 +01:00
Adam Hathcock
8f49f1b6f8 Merge remote-tracking branch 'origin/master' into zip-slip-readers 2018-06-28 11:52:43 +01:00
Adam Hathcock
7e336a0247 Slip in new SDK 2018-06-28 11:51:17 +01:00
Adam Hathcock
e37e8bdadc Move path handling for extraction to be common
Reader and Archive now share more extraction logic
2018-06-28 11:46:51 +01:00
Adam Hathcock
40bd61b16b Merge pull request #389 from frankyjuang/patch-1
Fix comment
2018-06-08 08:59:52 +01:00
Juang, Yi-Lin
87fbb45099 Fix comment 2018-06-08 11:27:43 +08:00
twirpx
e822f9a95c Tests fixed to use explicit use of 866 encoding because of usage file named in Russian in several tests 2018-05-30 22:17:27 +05:00
twirpx
8a5a9159e1 Fixed DirectoryEntryHeader Name/Comment decoding in case of EFS flags set 2018-05-30 21:47:31 +05:00
twirpx
73b3c6b419 Merge branch 'master' of https://github.com/adamhathcock/sharpcompress 2018-05-30 20:28:15 +05:00
Adam Hathcock
f9bd7ebdb0 Merge pull request #384 from MrJul/perf-readbyte
Implemented ReadByte/WriteByte on streams to improve performance
2018-05-28 09:21:28 +01:00
Julien Lebosquain
540618c062 Implemented ReadByte/WriteByte on streams to improve performance 2018-05-27 16:31:44 +02:00
Adam Hathcock
9e96dec8c9 Merge pull request #383 from itn3000/add-filename-encoding-example
add example for custom file encoding
2018-05-23 09:14:46 +01:00
itn3000
7b7af612ba add example for custom file encoding 2018-05-23 09:46:36 +09:00
Adam Hathcock
3a747ba87e Update USAGE with new stream handling 2018-05-16 08:51:33 +01:00
Adam Hathcock
87e57e3a9a Mark for 0.21.1 2018-05-15 09:14:56 +01:00
Adam Hathcock
785d0dcebf Merge pull request #381 from adamhathcock/issue-380
Allow forced encoding to override default encoding
2018-05-15 09:13:16 +01:00
Adam Hathcock
2314776f55 Also check for CustomDecoder 2018-05-15 08:28:11 +01:00
Adam Hathcock
473f5d8189 Make GetDecoder use GetEncoding for forced 2018-05-14 16:20:57 +01:00
Adam Hathcock
be971cb6f7 Allow forced encoding to override default encoding 2018-05-14 16:08:31 +01:00
Adam Hathcock
3f94c1a50d Remove lingering uses of non disposing stream 2018-05-08 14:10:49 +01:00
Adam Hathcock
2919ec293a mark for 0.21 2018-05-06 09:39:36 +01:00
Adam Hathcock
19d25152e5 Merge pull request #378 from adamhathcock/fix_crypto_namespace
Fix namespaces to not interfere with bouncy castle users
2018-05-06 09:20:17 +01:00
Adam Hathcock
747180203c Rider isn’t quite good enough in refactoring 2018-05-06 09:12:18 +01:00
Adam Hathcock
9f89a0844d Fix namespaces to not interfere with bouncy castle users 2018-05-06 09:09:45 +01:00
Adam Hathcock
c64282e915 more naming 2018-05-06 09:07:06 +01:00
Adam Hathcock
c44a80bab2 Merge remote-tracking branch 'origin/master' 2018-05-06 08:59:23 +01:00
Adam Hathcock
8dfc4a2ffb more variable naming 2018-05-06 08:59:00 +01:00
Adam Hathcock
c341c626a5 variable naming clean up 2018-05-06 08:49:32 +01:00
Adam Hathcock
173a0fe659 Some naming clean up 2018-05-05 19:35:58 +01:00
Adam Hathcock
5fdae1cf82 Make readonly and fix visibility 2018-05-05 19:23:34 +01:00
Adam Hathcock
9e892ab397 Merge pull request #376 from leezer3/master
Fix broken link in usage.md
2018-05-05 16:18:17 +01:00
Christopher Lees
e95559b4fc Fix broken link in usage.md 2018-05-05 15:08:56 +01:00
Adam Hathcock
18475cc86d Use proper xunit single threading 2018-05-05 09:38:56 +01:00
Adam Hathcock
88b59600cd Merge pull request #369 from adamhathcock/leaveOpen
Rework LeaveOpen to be consistent
2018-05-05 09:27:32 +01:00
Adam Hathcock
9a9d64bcbe Merge branch 'master' into leaveOpen
# Conflicts:
#	src/SharpCompress/Compressors/LZMA/LZipStream.cs
2018-05-05 09:25:26 +01:00
Adam Hathcock
4f3408ec25 Merge pull request #375 from adamhathcock/issue_360
Fixes lzip stream disposal
2018-05-05 09:20:35 +01:00
Adam Hathcock
e9d0fb85ac Merge branch 'master' into leaveOpen 2018-05-05 09:19:38 +01:00
Adam Hathcock
1ce37ef7a8 Fixes lzip stream disposal 2018-05-05 09:18:01 +01:00
Adam Hathcock
ecad356e30 Merge pull request #363 from sridhar6668/sridhar6668/support_extended_ascii
ZipArchive Reader: Uses IBM PC character encoding to decode filename …
2018-05-05 09:12:22 +01:00
Adam Hathcock
fafd8da91d Merge branch 'master' into leaveOpen 2018-05-05 09:10:19 +01:00
Adam Hathcock
2fb31d4b84 Merge branch 'master' into sridhar6668/support_extended_ascii 2018-05-05 09:09:09 +01:00
Adam Hathcock
8b478451ac Evil zip is a windows only test because of paths 2018-05-05 09:05:32 +01:00
Adam Hathcock
42b1205fb4 Merge pull request #374 from odinn1984/feat/fail_on_outside_target_files
fix: prevent extracting archived files outside of target path
2018-05-02 22:51:02 +01:00
odinn1986
80ceb1c375 fix: prevent extracting archived files outside of target path
This PR is meant to fix an arbitrary file write vulnerability, that can be
achieved using a specially crafted zip archive, that holds path traversal
filenames. When the filename gets concatenated to the target extraction
directory, the final path ends up outside of the target folder.

A sample malicious zip file named Zip.Evil.zip was used,
and when running the code below, resulted in the creation of C:/Temp/evil.txt
outside of the intended target directory.

There are various possible ways to avoid this issue, some include checking
for .. (dot dot) characters in the filename, but the best solution in our
opinion is to check if the final target filename, starts with the target
folder (after both are resolved to their absolute path).

Stay secure,
Snyk Team
2018-05-02 23:12:33 +03:00
Adam Hathcock
501407c3fe Change flag name to be closer to spec 2018-04-29 16:33:15 +01:00
Adam Hathcock
abddabf18e Proper fixes for all platforms 2018-04-29 16:27:26 +01:00
Adam Hathcock
91d753cbdb Merge branch 'master' into sridhar6668/support_extended_ascii 2018-04-29 15:12:02 +01:00
Adam Hathcock
259acd0694 misc additions 2018-04-29 15:09:26 +01:00
Adam Hathcock
33f7258ea2 Merge branch 'master' into leaveOpen
# Conflicts:
#	src/SharpCompress/Common/Rar/Headers/RarHeaderFactory.cs
#	src/SharpCompress/Readers/Rar/RarReader.cs
2018-04-29 14:47:08 +01:00
Adam Hathcock
1ea7bb57e5 Merge branch 'master' into sridhar6668/support_extended_ascii 2018-04-29 11:39:09 +01:00
Adam Hathcock
3e60e796fb Merge pull request #340 from adamhathcock/rar5
Rar5 Feature
2018-04-29 11:36:49 +01:00
Adam Hathcock
d9c178cbee Fix all platform support 2018-04-29 11:33:49 +01:00
Adam Hathcock
031b3c55f6 FIx solid support. I did it wrong 2018-04-29 11:12:28 +01:00
Adam Hathcock
b43d2c3d95 Disabled decryption tests 2018-04-29 10:55:51 +01:00
Adam Hathcock
d865120480 ArchiveCryptHeader renamed 2018-04-29 10:13:46 +01:00
Adam Hathcock
15534f466a Add basic rar5 crypt header 2018-04-28 18:20:40 +01:00
Adam Hathcock
9d63dcb8d6 Uncommit some tests 2018-04-28 18:14:47 +01:00
Adam Hathcock
6efe30bd6e Merge branch 'master' into rar5
# Conflicts:
#	.gitignore
2018-04-28 18:09:10 +01:00
Adam Hathcock
52dd9f0609 Merge pull request #371 from adamhathcock/Issue-370
Expose stream length.  Clean up entry stream
2018-04-26 11:33:23 +01:00
Adam Hathcock
bee7f43880 Expose stream length. Clean up entry stream 2018-04-26 09:46:01 +01:00
Adam Hathcock
d38276e8cf Fix solid and some other tests 2018-04-23 10:29:46 +01:00
Adam Hathcock
f3daaeb200 Try to use both for Rarv5 support 2018-04-23 09:39:50 +01:00
Adam Hathcock
9b152a40a9 Merge branch 'master' into rar5 2018-04-22 11:35:33 +01:00
Adam Hathcock
89ae8ca526 Rejigger read only substream 2018-04-22 11:32:47 +01:00
Adam Hathcock
68a5e474a6 More testing of file handling 2018-04-22 11:19:11 +01:00
Adam Hathcock
bf58742ddf rework of leave stream open for readers 2018-04-22 11:09:03 +01:00
Adam Hathcock
f18e5b75bb Archives set up correctly 2018-04-22 10:06:30 +01:00
Adam Hathcock
e919c99b14 First pass of removing explicit leaveOpen on streams. 2018-04-22 10:02:18 +01:00
Adam Hathcock
b960f2e5ba Minor build updates 2018-04-22 09:17:03 +01:00
srperias@microsoft.com
5d8728d592 Decode without setting the default Encoding type 2018-03-28 13:12:54 -07:00
srperias@microsoft.com
04ba6c2d73 ZipArchive Reader: Uses IBM PC character encoding to decode filename and comment if the general purpose bit 11 is not set in the header 2018-03-27 13:54:16 -07:00
Adam Hathcock
0cab9bd4b4 Mark for 0.20.0 2018-03-24 07:42:20 +00:00
Adam Hathcock
279d305013 Merge pull request #359 from prettierci-commits/prettierci-master-1521104105
PrettierCI master Sync
2018-03-15 08:56:49 +00:00
PrettierCI
750c1fb069 Sync with Prettier 2018-03-15 08:55:06 +00:00
Adam Hathcock
359a6042cd Merge pull request #352 from adamhathcock/cake-026
Cake 0.26
2018-03-01 15:40:30 +00:00
Adam Hathcock
e27d2ec660 Remove netcoreapp1.x testing 2018-03-01 15:35:55 +00:00
Adam Hathcock
da56bfc01f Merge pull request #354 from frabar666/deflate64-decompress
Support Deflate64 decompression
2018-03-01 09:14:06 +00:00
frabar666
6e2c7d2857 support Deflate64 decompression 2018-02-27 23:31:11 +01:00
Adam Hathcock
5481609554 Build with new cake 2018-02-27 08:52:55 +00:00
Frederik Carlier
a62f4df0b1 Implement entry.ToString(), let it return entry.Key (#351) 2018-02-16 13:43:23 +00:00
Adam Hathcock
f893c1272c Merge pull request #337 from 4ybaka/issue-323-tar-archive-finalization
Added ability to leave tar archive open after stream is closed
2018-01-14 19:52:08 +00:00
Dmitry
e701f5277e Merge branch 'master' into issue-323-tar-archive-finalization 2018-01-13 00:47:04 +01:00
Dmitry Nesterov
f85fd1f6a4 Added ability to leave tar archive open after stream is closed 2018-01-13 00:44:42 +01:00
Dmitry Nesterov
8f7ea420b3 Revert "Added ability to leave tar archive open after stream is closed"
This reverts commit 9092ecf331.
2018-01-13 00:41:35 +01:00
Adam Hathcock
b39f389a67 Merge branch 'master' into rar5 2018-01-10 14:58:40 +00:00
Adam Hathcock
d8c8dabb52 Merge pull request #336 from diontools/ImproveStreamSkipping
Utility.Skip uses seek
2018-01-10 11:23:24 +00:00
Dmitry Nesterov
9092ecf331 Added ability to leave tar archive open after stream is closed 2018-01-04 22:57:32 +01:00
diontools
2fd9fe96ad Utility.Skip uses seek 2018-01-03 00:23:34 +09:00
coderb
554153e6a0 rar5: port old algos 2017-12-20 19:10:06 -05:00
coderb
3c29122dfe rar5: porting old algos 2017-12-20 19:04:41 -05:00
coderb
c42dc646ae rar5: reporting older algos 2017-12-20 18:51:48 -05:00
coderb
edd6206a03 rar5: fix checkin 2017-12-20 18:32:36 -05:00
coderb
a1e7f55b95 rar5: unpack working!!! 2017-12-20 18:32:12 -05:00
coderb
218823e9b2 rar5: unpack bugfix 2017-12-20 18:13:04 -05:00
coderb
3e8f52689e unrar5: unpack wip 2017-12-20 16:01:55 -05:00
coderb
0219fc3ea9 unrar5: unpack wip 2017-12-20 14:58:49 -05:00
coderb
66cb2ab662 unrar5: unpack wip 2017-12-20 14:28:22 -05:00
coderb
91c85f0aa6 unrar5: unpack wip 2017-12-20 14:22:47 -05:00
coderb
fd9790cc36 unrar5: unpack wip 2017-12-20 14:15:20 -05:00
coderb
0d40883176 unrar5: unpack wip 2017-12-20 11:19:30 -05:00
coderb
2d0319f779 rar5: unpack wip 2017-12-20 02:11:45 -05:00
coderb
aaeaa44c0b rar5: unpack wip 2017-12-20 01:55:54 -05:00
coderb
9fc77db073 rar5: unpack wip 2017-12-20 01:39:13 -05:00
coderb
bdaa060003 rar5: unpack wip 2017-12-20 01:21:53 -05:00
coderb
b5cb9901b1 rar5: wip 2017-12-20 00:29:07 -05:00
coderb
fb4d503c9a rar5: wip 2017-12-20 00:17:47 -05:00
coderb
c3c2fcf4d0 rar5: unpack wip 2017-12-19 21:41:36 -05:00
coderb
f2d2d94d1b rar5: unpack wip 2017-12-19 14:03:53 -05:00
coderb
11c1b45335 rar5: unpack wip 2017-12-19 13:52:57 -05:00
coderb
b8a308012f rar5: unpack work 2017-12-18 11:01:37 -05:00
coderb
122a732206 unrar5: pull in fast memset code 2017-12-18 10:18:17 -05:00
coderb
59e4383209 rar5: unpack wip 2017-12-18 10:02:34 -05:00
coderb
22b008f723 rar5: unpack wip 2017-12-18 09:51:24 -05:00
coderb
c770094425 rar5: fileheader fixes 2017-12-18 09:46:18 -05:00
coderb
093a5fdf31 rar5: oops 2017-12-18 09:25:55 -05:00
coderb
f811835f02 rar5: rename Entry.IsSplit -> IsSplitAfter, misc wip 2017-12-18 09:20:20 -05:00
coderb
5dda13b800 rar5: reorg code for side by side implementations 2017-12-18 08:23:18 -05:00
coderb
a20306afe0 rar5: port wip 2017-12-17 21:23:32 -05:00
coderb
476b1df323 rar5: clean port unrarsrc 5.5.8 wip 2017-12-17 20:59:34 -05:00
coderb
aec5a913da rar5: checkin missing file 2017-12-17 20:54:54 -05:00
coderb
e13795651d rar: import unrarsrc-5.5.8 for reference
this will allow us to diff against newer version of unrarsrc if we wish to update our port
2017-12-17 17:52:00 -05:00
coderb
cc175da0d8 rar5: unpack wip 2017-12-17 17:08:53 -05:00
coderb
a2369fb531 rar5: uppercase method names 2017-12-17 12:08:09 -05:00
coderb
c5b8a444f2 rar5: unpack tighten up access modifiers 2017-12-17 12:03:41 -05:00
coderb
f450608073 rar5: unpack use partial class instead of inheritance 2017-12-17 11:46:17 -05:00
coderb
0e57537167 rar5: unpack wip 2017-12-17 11:36:33 -05:00
coderb
1445b0a48b rar5: refactor unpack classes 2017-12-17 11:33:31 -05:00
coderb
d268cc7685 rar5: wip unpack50() 2017-12-17 11:16:42 -05:00
coderb
7969bbaac4 rar5: working on decompression 2017-12-17 11:04:13 -05:00
coderb
b9e89ca64b rar5: wip 2017-12-17 10:40:41 -05:00
coderb
f802b41665 rar5: wip 2017-12-17 10:08:21 -05:00
coderb
f0eac57bb9 rar5: more cleanup 2017-12-17 09:14:08 -05:00
coderb
c9d3563f31 rar5: some cleanup 2017-12-17 09:00:11 -05:00
coderb
8563179592 rar5: wip 2017-12-17 08:35:41 -05:00
coderb
1c49ff63e2 rar5: replicate rar test cases 2017-12-17 08:09:41 -05:00
coderb
5f121c5da4 rar5 wip 2017-12-17 07:39:02 -05:00
coderb
72f52359e6 rar5: wip 2017-12-17 01:58:40 -05:00
coderb
9f549b98da rar5: support ArchiveHeader, additional rar5 header implementation 2017-12-16 20:39:53 -05:00
coderb
c346a4ca94 rar5: change rar MarkHeader detection logic to support rar5 2017-12-16 17:35:07 -05:00
coderb
d334b54846 rar5: add test archives 2017-12-16 12:54:32 -05:00
Adam Hathcock
02f68b793c Mark for 0.19.2 2017-12-16 09:08:17 +00:00
Adam Hathcock
57b9133a0f Change namespace and visibility to avoid collisions (#333) 2017-12-16 09:05:21 +00:00
Adam Hathcock
815f5e09e8 Mark for 0.19.1 2017-12-15 14:46:14 +00:00
Adam Hathcock
5bdf01ee59 Absorb arraypool from CoreFX (#331) 2017-12-15 14:45:02 +00:00
Adam Hathcock
bd9417e74c Mark for 0.19 2017-12-12 11:17:57 +00:00
Adam Hathcock
694e869162 Use arraypool for transfer/skip (#326)
* Use arraypool for transfer/skip

* Merge fixes

* Remove redundant constant
2017-12-08 13:58:38 +00:00
Adam Hathcock
45845f8963 Add Circle CI build 2017-12-08 12:03:28 +00:00
Adam Hathcock
a8b6def76a Netcore2 (#302)
* Add netstandard 2.0 target and netcoreapp2.0 tests

* Update xunit

* set tests explicitly to netcore2

* update travis

* Don't say build as netcoreapp1.0

* try adding dotnet 1 too

* Remove .NET Core 1 support

* switch to circle

* update cake

* fix circle build

* try fix file ending test again

* Fix casing on files

* Another casing fix

* Add back netstandard1.0

* Finish adding netstandard 1.0 back

* Add netstandard1.3 back
2017-12-08 12:00:29 +00:00
Sors
a4ebd5fb3d Rar 5 format (#310)
Fix rar 5 format comment
2017-12-04 18:59:49 +00:00
Adam Hathcock
3da3b212fa create new memorystream to allow proper resizing as memorystream could be a user provided buffer. Update xunit (#307) 2017-12-04 18:48:38 +00:00
Martijn Kant
c2528cf93e Mk/add support for extracting password protected LZMA(2) 7z archives (#324)
* Added possibility to decompress a password protected 7z LZMA archive

* Fix tests
2017-12-04 10:55:30 +00:00
coderb
550fecd4d3 bugfix: eliminate spurious rar crc exception when Read() is called with count = 0 (#313) 2017-10-23 11:58:02 +01:00
Adam Hathcock
50b01428b4 Mark for 0.18.2 2017-09-22 09:16:42 +01:00
Thritton
bb59f28b22 Update ArchiveReader.cs (#303)
#227
Added check if argument is in range in method TranslateTime(long? time)
2017-09-19 15:25:10 +01:00
François
7064cda6de Zlib: fix Adler32 implementation (#301) 2017-09-17 22:21:09 +01:00
Adam Hathcock
525c1873e8 Fix merge 2017-09-17 22:16:57 +01:00
François
3d91b4eb5e XZ: fix padding issues (#300)
* XZ: fix variable-length integers decoding

* XZ: fix block and index padding issues

* cleanup in XZStreamTests
2017-09-17 22:14:23 +01:00
François
f20c03180e XZ: fix variable-length integers decoding (#299) 2017-09-17 22:05:20 +01:00
Vladimir Kozlov
08fee76b4e Fixes Double Dispose() of ZipWritingStream #294 https://github.com/adamhathcock/sharpcompress/issues/294 (#295) 2017-09-08 13:25:53 +01:00
twirpx
149f5e4fb5 Minor fixes 2017-08-22 11:46:32 +05:00
Adam Hathcock
0f511c4b2a Mark for 0.18.1 2017-08-17 11:43:34 +01:00
twirpx
42d9dfd117 Fixed bug: Passing default ReaderOptions when creating ZipReader for solid extraction (#287) 2017-08-16 08:19:23 +01:00
twirpx
1793fc949d Fixed bug: Passing default ReaderOptions when creating ZipReader for solid extraction 2017-08-16 08:57:36 +05:00
Adam Hathcock
3983db08ff Use nameof 2017-07-27 11:05:33 -05:00
Adam Hathcock
72114bceea Add release link 2017-07-17 10:22:58 -05:00
Adam Hathcock
c303f96682 mark for 0.18 2017-07-17 10:11:27 -05:00
Adam Hathcock
0e785968c4 Rework usage of WriterOptions for writers since it was inconsistently used. (#271) 2017-07-17 11:05:42 -04:00
Adam Hathcock
15110e18e2 Don't skip ZipReader data twice. (#272)
* Don't skip ZipReader data twice.

* Add archive for a new test
2017-07-17 11:05:21 -04:00
Adam Hathcock
5465af041b Use Skip and ReadFully extension methods where possible. (#276) 2017-07-17 10:55:22 -04:00
Adam Hathcock
310d56fc16 Made ArchiveEncoding a non-static class that is used with options. (#274)
* Made ArchiveEncoding a non-static class that is used with options.

* Revert some formatting.

* Optional string decoder delegate (#278)
2017-07-17 10:53:20 -04:00
eklann
231258ef69 Force encoding (#266)
* Fixing build

* Fixing build

* Fixing build

* Fixed build (seems working now)

* Added support to force specific encoding when reading or writing an archive

* Minor fixed related to force encoding

* Removed obsolete project file not present in master
2017-07-05 10:15:49 -05:00
Sam Bott
16b7e3ffc8 Add XZ tests (#258)
* tests added and converted to xunit

* reordered two assertions
2017-06-11 13:44:00 +01:00
Adam Hathcock
513e59f830 Mark for 0.17.1 2017-06-09 08:28:35 +01:00
Adam Hathcock
b10a1cf2bd Bug on Windows on .NET Core fix (#257)
* Bug on Windows on .NET Core fix: https://github.com/dotnet/corefx/issues/20676

* Add comment
2017-06-09 08:22:47 +01:00
Adam Hathcock
1656edaa29 Add some more details to nuget package 2017-06-01 12:36:01 +01:00
Adam Hathcock
cff49aacba Added explicit tar skip check. Caught skip issue. 2017-06-01 11:25:32 +01:00
Adam Hathcock
19c32aff6c README fixes 2017-06-01 10:56:11 +01:00
Adam Hathcock
db3ec8337f Mark for 0.17 2017-06-01 10:54:50 +01:00
Adam Hathcock
e7bfc40461 Fix Skipping when compressed size is unknown (fallback to decompressing) 2017-06-01 09:26:08 +01:00
Adam Hathcock
3d3ca254ba Zip64 introduced seekable behavior into ZipWriter. The position may … (#252)
* Zip64 introduced seekable behavior into ZipWriter.  The position may not be zero.

* Remove some dead code

* Update formats for zip64

* Make version created by and version needed to extract the same

* Running tests is faster than skipping
2017-05-31 16:55:49 +01:00
Adam Hathcock
b45bc859a4 XZ Format (#247)
* Started integrated XZ format from https://github.com/sambott/XZ.NET

* Add readme line as it was copy/pasted

* Tar used with XZ

* update formats
2017-05-31 16:55:26 +01:00
Adam Hathcock
912d7a8775 Lzip (#245)
* First pass.  Writing isn't implemented on stream.  Tests are busted.

* LZipReader works...no file name :(

* LZipWriter works

* Writing tests are actually correct now.  LZipStream correctly writes trailer now.  lzip command line tool likes it.

* Add recommendation blurb

* Update notes for formats

* LZip isn't an archive format

* Attempting to fix and implement crc32

* LZip writing test passes

* Had to invert crc to check uncompressed data.
2017-05-31 16:51:24 +01:00
Adam Hathcock
16885da1b5 Mark for 0.16.2 2017-05-31 14:47:51 +01:00
Adam Hathcock
26714052eb Merge pull request #249 from adamhathcock/zip_entry_compression_fix
Per entry compression was being written out incorrectly on the centra…
2017-05-31 12:55:37 +01:00
Adam Hathcock
3df763a783 Merge branch 'master' into zip_entry_compression_fix 2017-05-31 11:15:30 +01:00
Adam Hathcock
925842bc4b Merge pull request #251 from dbaumber/Issue-250
Fix for Issue #250: remove extra build flags for .NET 3.5
2017-05-31 10:54:52 +01:00
Dan Baumberger
cead62704e Fix for Issue #250: remove extra build flags for .NET 3.5 as to
enable WinZipAes for .NET 3.5.
2017-05-30 13:43:48 -07:00
Adam Hathcock
3f24a744c0 Merge branch 'master' into zip_entry_compression_fix 2017-05-30 16:10:41 +01:00
Adam Hathcock
cce97548a2 Merge pull request #212 from kenkendk/remove_unused_code
Removed the unused code to write entries in Zip Headers
2017-05-30 16:09:04 +01:00
Adam Hathcock
9270d7cabf Add cache for dotnet packages 2017-05-30 16:04:55 +01:00
Adam Hathcock
264aa6d366 Merge branch 'master' into remove_unused_code 2017-05-30 15:58:44 +01:00
Adam Hathcock
69fc74e376 Per entry compression was being written out incorrectly on the central directory. Fix for that. 2017-05-30 15:37:41 +01:00
Adam Hathcock
a361d41e68 Fix test namespaces 2017-05-30 15:14:02 +01:00
Adam Hathcock
38766dac99 Wrong logic for skipping tests 2017-05-30 12:50:03 +01:00
Adam Hathcock
c30bc65281 Don't run tests on travis either 2017-05-30 12:46:34 +01:00
Adam Hathcock
296ebd942a Shrink script a bit 2017-05-30 12:37:16 +01:00
Adam Hathcock
afa19f7ad8 Add xplat cake and travis build 2017-05-30 12:35:12 +01:00
Adam Hathcock
a193b2d3b1 Add xplat build 2017-05-29 10:35:55 +01:00
Adam Hathcock
be4a65e572 update readme 2017-05-24 08:52:12 +01:00
Adam Hathcock
6832918e71 Mark for 0.16.1 2017-05-23 16:21:07 +01:00
Adam Hathcock
fd9a3ffbcc Merge commit '18641d4f9b849daea7b6fbb7edad51369534ffa3'
* commit '18641d4f9b849daea7b6fbb7edad51369534ffa3':
  Normalize Rar keys
2017-05-23 16:15:58 +01:00
Adam Hathcock
41added690 Private setter clean up 2017-05-23 16:15:47 +01:00
Adam Hathcock
18641d4f9b Merge pull request #238 from adamhathcock/issue_201
Normalize Rar keys
2017-05-23 16:14:55 +01:00
Adam Hathcock
4d0c5099d4 Merge branch 'master' into issue_201 2017-05-23 16:13:09 +01:00
Adam Hathcock
9d9d491245 Slightly better fix for https://github.com/adamhathcock/sharpcompress/pull/235 2017-05-23 16:10:15 +01:00
Adam Hathcock
7b81d18071 Merge pull request #235 from dbaumber/Issue-230
Issue #230: preserve the compression method when getting a compressed…
2017-05-23 15:50:32 +01:00
Dan Baumberger
7d0acbc988 Merge branch 'Issue-230' of https://github.com/dbaumber/sharpcompress into Issue-230 2017-05-23 07:46:48 -07:00
Dan Baumberger
313c044c41 Added a unit test for the WinZipAes multiple OpenEntryStream() bug. 2017-05-23 07:44:45 -07:00
Dan Baumberger
f6f8adf97e Merge branch 'master' into Issue-230 2017-05-23 07:43:02 -07:00
Adam Hathcock
bc97d325ca Normalize Rar keys 2017-05-22 10:55:15 +01:00
Adam Hathcock
0f2d325f20 oh yeah, appveyor doesn't like the tests 2017-05-22 09:08:16 +01:00
Adam Hathcock
63d5503e12 forgot to actually add tests to script 2017-05-22 09:06:33 +01:00
Adam Hathcock
e53f2cac4a Mark for 0.16.0 2017-05-22 08:58:52 +01:00
Adam Hathcock
3b73464233 Merge pull request #236 from damieng/zip-min-version-of-20
Default zip ver to 20 (deflate/encyption), fixes #164
2017-05-22 08:38:18 +01:00
Damien Guard
575f10f766 Default zip ver to 20 (deflate/encyption), fixes #164 2017-05-19 16:37:20 -07:00
Dan Baumberger
8d3fc3533b Issue #230: preserve the compression method when getting a compressed stream for encrypted ZIP archives. 2017-05-19 08:36:11 -07:00
Adam Hathcock
60370b8539 don't run appveyor tests 2017-05-19 15:51:06 +01:00
Adam Hathcock
f6db114865 Remove console writelines 2017-05-19 15:47:53 +01:00
Adam Hathcock
1c6c344b6b Tests don't run on appveyor 2017-05-19 15:45:29 +01:00
Adam Hathcock
d0302898e0 Add back net45,net35 and cake 2017-05-19 13:33:12 +01:00
Adam Hathcock
057ac9b001 Enable test 2017-05-19 11:03:31 +01:00
Adam Hathcock
8be931bbcb Doing some resharper clean up 2017-05-19 10:52:49 +01:00
Adam Hathcock
3197ef289c Forgot to hit save 2017-05-19 10:15:19 +01:00
Adam Hathcock
631578c175 Update to next version. Stop Zip64 tests from running all the time and some clean up 2017-05-19 10:10:23 +01:00
Adam Hathcock
f1809163c7 correct gitignore 2017-05-19 09:44:45 +01:00
Adam Hathcock
60e1fe86f2 Fix test running 2017-05-19 09:40:37 +01:00
Adam Hathcock
59d7de5bfc Try again appveyor 2017-05-19 09:36:05 +01:00
Adam Hathcock
6e95c1d84a Drop net35 support as dot net tooling doesn’t support it currently 2017-05-19 09:34:02 +01:00
Adam Hathcock
ee64670755 Move test folder to be tests 2017-05-19 09:19:37 +01:00
Adam Hathcock
3f7d0f5b68 Update test project 2017-05-19 09:14:43 +01:00
Adam Hathcock
e3514c5c4b Don’t attempt to autodeploy 2017-05-19 09:06:18 +01:00
Adam Hathcock
cc3a9cff88 Merge pull request #231 from adamhathcock/VS2017
Vs2017
2017-05-19 09:02:12 +01:00
Adam Hathcock
15e821aa39 Remove unused events 2017-05-19 08:49:44 +01:00
Adam Hathcock
8dd1dbab5f Remove Cake as it’s unnecessary for basic build/test/publish 2017-05-19 08:47:17 +01:00
Adam Hathcock
65ce91ddf6 Update. Only use net35, net standard 1.0 and net standard 1.3 2017-05-19 08:46:27 +01:00
Adam Hathcock
bf55595d6f Merge pull request #226 from gardebring/master
Add new event handler to allow tracking of progress of extraction progress for individual entry
2017-04-25 13:07:44 +01:00
Anders Gardebring
2aa123ccd7 Remove begin and end events since this can now be tracked via progress instead 2017-04-25 13:21:04 +02:00
Anders Gardebring
0990b06cc9 Create new TransferTo method and pass Entry and IReaderExtractionListener instead of passing an action lambda. 2017-04-25 12:48:56 +02:00
Anders Gardebring
e05f9843ba Use strongly typed ReaderProgress instead of object[] 2017-04-25 12:36:32 +02:00
Anders Gardebring
683d2714d0 Add new event to be able to track progress of extraction of individual entry when extracting an archive. This allows for showing or logging progress of the extraction process, especially useful for large files that might take a long time to extract. 2017-04-24 13:50:45 +02:00
Anders Gardebring
b8ef1ecafc Revert "Add new feature to allow injection of an action into the extraction process. This allows for showing or logging progress of the extraction process, especially useful for large files that might take a long time to extract."
This reverts commit 467fc2d03d.
2017-04-24 10:22:49 +02:00
Anders Gardebring
467fc2d03d Add new feature to allow injection of an action into the extraction process. This allows for showing or logging progress of the extraction process, especially useful for large files that might take a long time to extract. 2017-04-20 11:45:53 +02:00
Adam Hathcock
58b4fe4f28 Merge pull request #220 from coderb/master
verify RAR crc on header and file data
2017-04-07 11:56:06 +01:00
Brien Oberstein
97d5e0aac4 verify rar CRC on header and file data 2017-04-04 12:20:06 -04:00
Adam Hathcock
356c977cff Merge pull request #215 from mnadareski/master
Removed restriction on 7zip file entries
2017-03-17 09:20:59 +00:00
Matt Nadareski
99d6062376 Removed restriction on 7zip file entries 2017-03-16 15:55:20 -07:00
Adam Hathcock
f8538403e4 Merge pull request #211 from kenkendk/add_zip64
Add zip64
2017-03-13 10:23:26 +00:00
Kenneth Skovhede
ba12019bc7 Removed the unused code to write entries in Zip Headers 2017-03-11 08:05:49 +01:00
Kenneth Skovhede
726b9c80f6 Fixed compiling the unittest 2017-03-11 01:05:58 +01:00
Kenneth Skovhede
2894711c51 Added a test suite to verify zip64 write support is working, and can be read in both Archive and Stream mode 2017-03-11 00:54:06 +01:00
Kenneth Skovhede
85280f6f4f Changed the logic to throw exceptions when sizes exceed the zip archive limits, and zip64 is not enabled.
This changes the logic, such that archives larger than 4GiB are still automatically written correct (only the central header is special).
Archives with individual streams larger than 4 GiB must set the zip64 flag, either on the archive or the individual streams.
2017-03-11 00:53:42 +01:00
Kenneth Skovhede
d7f4c0ee32 Fixed an error in the zip64 central end of header: the signature + length (12 bytes) are not included in the reported length. 2017-03-10 23:10:06 +01:00
Kenneth Skovhede
1263c0d976 Added support for writing zip64 headers 2017-03-09 23:56:42 +01:00
Kenneth Skovhede
cd3cbd2b32 Support for writing zip64 headers in the unused code 2017-03-09 23:18:57 +01:00
Adam Hathcock
b3a4fed8be Mark for 0.15.2 2017-03-09 11:02:44 +00:00
Adam Hathcock
d0b4af6666 Merge pull request #210 from kenkendk/fix_invalid_headers
Fix invalid headers
2017-03-09 10:41:18 +00:00
Kenneth Skovhede
81ab5c189d Fixed writing correct headers in zip archives 2017-03-09 11:34:24 +01:00
Kenneth Skovhede
6ef3be4b5c Fixed writing correct headers in zip archives 2017-03-09 11:32:20 +01:00
Adam Hathcock
9f90a1d651 Mark for 0.15.1 2017-01-25 09:31:01 +00:00
Adam Hathcock
ce9a3fd1ef Add file ordering fix for OS X 2017-01-25 09:29:13 +00:00
Adam Hathcock
7c6f05058e Merge pull request #206 from markryd/zip64-extraction
Zip64 extending information and ZipReader
2017-01-25 09:03:43 +00:00
Mark Rydstrom
a8c3a7439e Add support for zip64 to ZipReader 2017-01-25 17:05:48 +10:00
Mark Rydstrom
839b3ab0cf Add support for zip64 extended information field 2017-01-25 16:51:15 +10:00
Adam Hathcock
44d54db80e Fix some path issues on OS X when running tests. 2017-01-24 17:36:51 +00:00
Adam Hathcock
a67d7bc429 Mark for 0.15 2017-01-24 17:25:19 +00:00
Adam Hathcock
079a818c6c Merge pull request #205 from markryd/zip64-extraction
Add zip64 support for ZipArchive extraction
2017-01-24 16:56:42 +00:00
Mark Rydstrom
6be6ef0b5c Add zip64 support for ZipArchive extraction 2017-01-24 13:04:03 +10:00
Adam Hathcock
8e51d9d646 0.14.1 2016-11-30 14:26:18 +00:00
Adam Hathcock
ea206f4f02 Merge pull request #199 from adamhathcock/Issue-198
Gzip entry can't be read multiple times
2016-11-25 09:33:56 +00:00
Adam Hathcock
f175a2a252 Merge branch 'master' into Issue-198 2016-11-25 09:21:44 +00:00
Adam Hathcock
3f7e559b86 Merge pull request #200 from ITnent/bug/Issue-197
Open branch, to fix multiple crashes on repeated zip archives reading…
2016-11-25 09:21:34 +00:00
Vladimir Demidov
2959b4d701 Modified check integrity condition for the encrypted file. 2016-11-24 20:41:08 +03:00
Vladimir Demidov
031286c5eb Fixed defects after review. 2016-11-24 18:01:49 +03:00
Vladimir Demidov
e181fa8c4a Restored original tabs. 2016-11-24 17:11:43 +03:00
Vladimir Demidov
7b035bec5d Fixed some issues after review. 2016-11-24 16:21:02 +03:00
Vladimir Demidov
f39d2bf53a Open branch, to fix multiple crashes on repeated zip archives reading. Added fix. 2016-11-24 15:14:29 +03:00
Adam Hathcock
7c8e407182 Merge branch 'master' into Issue-198 2016-11-21 12:21:29 +00:00
Adam Hathcock
a09136d46b Merge pull request #195 from jskeet/strong-naming
Strong-name both the main and test projects
2016-11-21 12:06:13 +00:00
Adam Hathcock
5fe1363ee1 Gzip entry can't be read multiple times https://github.com/adamhathcock/sharpcompress/issues/198 2016-11-21 12:04:35 +00:00
Jon Skeet
b41823fc10 Strong-name both the main and test projects
It's not clear whether SharpCompress.Test.Portable (as referenced
in AssemblyInfo.cs) still exists, but build.ps1 certainly works.
2016-11-15 18:42:56 +00:00
Adam Hathcock
0a64fe28b0 Oops, removed too much from project.json 2016-10-14 09:03:15 +01:00
Adam Hathcock
e320ccfa9a 0.14.0 2016-10-14 08:59:19 +01:00
Adam Hathcock
9628ff9456 Merge pull request #191 from jskeet/lzip
Initial read-only support for LZip
2016-10-14 08:50:32 +01:00
Jon Skeet
d540f78cfc Initial read-only support for LZip
LZip has no notion of flienames, so an LzipReader wouldn't make very much sense;
I've just implemented the stream, and hooked it into tar support.
2016-10-12 15:08:56 +01:00
Adam Hathcock
66420cd299 Merge pull request #189 from ziaa/master
Remove unbalanced parentheses in code samples
2016-10-08 18:25:30 +01:00
Seyed Zia Azimi
dd0594471f Remove unbalanced parentheses in samples 2016-10-07 19:33:41 +03:30
Adam Hathcock
844ba228ee Make 0.13.1 2016-10-03 13:44:19 +01:00
Adam Hathcock
7efc701b32 Merge pull request #188 from adamhathcock/fix_nulls
Fix null password on ReaderFactory.  Fix null options on SevenZipArchive
2016-10-03 13:41:55 +01:00
Adam Hathcock
d7e29f7c4d Fix occasionally failing test 2016-10-03 13:37:04 +01:00
Adam Hathcock
f26ba91386 Fix null password on ReaderFactory. Fix null options on SevenZipArchive 2016-10-03 13:32:53 +01:00
Adam Hathcock
c73ac2039c Merge pull request #185 from adamhathcock/ppmd_allocation_zipwriter
Make PpmdProperties lazy to avoid unnecessary allocations.
2016-10-03 13:04:14 +01:00
Adam Hathcock
671f9cd0cb Empty commit to kick build 2016-10-03 12:58:23 +01:00
Adam Hathcock
131b5b9714 Can't use Lazy on .NET 3.5 :( 2016-10-03 11:20:29 +01:00
Adam Hathcock
74af0889b9 Make PpmdProperties lazy to avoid unnecessary allocations. 2016-10-03 10:16:26 +01:00
Adam Hathcock
e5ee399045 Merge pull request #181 from claunia/patch-1
Update FORMATS.md
2016-09-30 07:08:52 +01:00
deeb7a0f64 Update FORMATS.md
Add ADC to formats list.
2016-09-29 22:53:51 +01:00
Adam Hathcock
5af3bab1dc Merge pull request #180 from adamhathcock/documenting
Add Markdown files to document things.
2016-09-29 11:58:19 +01:00
Adam Hathcock
28be84d315 For all branches 2016-09-29 11:35:54 +01:00
Adam Hathcock
a0528c737d Trying just to build once 2016-09-29 11:34:50 +01:00
Adam Hathcock
b506e488e8 Add build badge 2016-09-29 11:32:31 +01:00
Adam Hathcock
58eb0e08d6 Don't save artifacts for PRs 2016-09-29 11:22:26 +01:00
Adam Hathcock
562701894f Save nupkgs 2016-09-29 11:13:05 +01:00
Adam Hathcock
54a562273b Incomplete refactoring 2016-09-29 11:10:11 +01:00
Adam Hathcock
3f8c9c4cb0 Update for 0.13.0 2016-09-29 11:03:11 +01:00
Adam Hathcock
3e7d28b043 Can I fix tables? 2016-09-29 10:57:49 +01:00
Adam Hathcock
40b10d4a26 Add Markdown files to document things. 2016-09-29 10:55:04 +01:00
Adam Hathcock
f367630a2a Merge pull request #179 from adamhathcock/tar_fix
Allow empty tar header to be read to know there are no more tar heade…
2016-09-28 13:57:09 +01:00
Adam Hathcock
b9e4f00862 Merge branch 'master' into tar_fix 2016-09-28 13:50:45 +01:00
Adam Hathcock
d6e74d6163 Merge pull request #178 from adamhathcock/7zip_deflate
Allow deflate decoder for 7zip
2016-09-28 13:50:35 +01:00
Adam Hathcock
4a4522b842 Merge branch 'master' into 7zip_deflate 2016-09-28 13:44:46 +01:00
Adam Hathcock
710ba4423d Merge branch 'master' into tar_fix 2016-09-28 13:43:21 +01:00
Adam Hathcock
2a5494a804 Merge pull request #174 from adamhathcock/redo_options
Redo options
2016-09-28 13:40:54 +01:00
Adam Hathcock
568909800c Allow empty tar header to be read to know there are no more tar headers to read 2016-09-28 12:00:48 +01:00
Adam Hathcock
7513a608b1 Allow deflate decoder 2016-09-28 11:59:31 +01:00
Adam Hathcock
911e9878bd Merge branch 'master' into redo_options 2016-09-27 13:09:07 +01:00
Adam Hathcock
899d7d6e61 Appveyor (#175)
* First pass of Cake build

* Update Cake but still need cake itself to run on full CLR

* Test out appveyor

* 3.5 build fix

* Build master and PRs differently.  Still scared to auto publish to nuget.
2016-09-27 13:08:42 +01:00
Adam Hathcock
260c0ee776 Add SaveTo overload for zip archives 2016-09-27 11:19:52 +01:00
Adam Hathcock
d71520808d Helps if I rename everything 2016-09-27 11:08:54 +01:00
Adam Hathcock
177fc2a12c Flags were a better idea when I was younger. It's not clear though. 2016-09-27 10:50:36 +01:00
Adam Hathcock
5dafcb02d4 Redo options classes 2016-09-27 10:23:35 +01:00
Adam Hathcock
c4fde80c5e Create proper options objects to remove flags from API 2016-09-27 10:14:08 +01:00
Adam Hathcock
06e3486ec4 Bump version 2016-09-26 11:53:35 +01:00
Adam Hathcock
bd7c783aaf Test fixes 2016-09-26 11:51:35 +01:00
Adam Hathcock
d732e3cfa4 Renamespace for proper pluralization 2016-09-26 11:49:49 +01:00
Adam Hathcock
c24cdc66ed Clean up from clean up 2016-09-26 11:03:15 +01:00
Adam Hathcock
efa6f7a82e Huge Resharper clean up. Fixed up test project.json 2016-09-26 10:55:52 +01:00
ddbbc3b847 Adds support for Apple Data Compression. (#168) 2016-09-12 17:41:31 +01:00
Adam Hathcock
7037161c07 Update README 2016-08-12 12:15:45 +01:00
Adam Hathcock
b0b62fcf91 Try to fix frameworks again by matching JSON.NET 2016-08-12 12:14:22 +01:00
Adam Hathcock
bd8ba7b854 Test with ForwardOnlyStream. RewindableStream shouldn't corrupt a ForwardOnlyStream (#161) 2016-08-12 11:56:49 +01:00
Adam Hathcock
3a52c68270 0.12.3 2016-07-22 16:50:33 +01:00
Adam Hathcock
89fd778bd8 Make all framework assemblies be build targets as a fix https://github.com/NuGet/Home/issues/3103 2016-07-22 16:47:59 +01:00
Tobias Käs
6e3e8343a8 Ignore unofficial extension of file attributes. (#153)
The high bits may contain posix file attributes when the archive was written by certain third party 7z implementations. These must be removed before we can interpret the attributes as windows (or .NET) file attributes.
2016-07-21 11:06:42 +01:00
Tobias Käs
9224237a99 Fix for issue #73 (#154)
7z archives may require alternating reads from multiple substreams so it is important to seek before reading from the underlying stream. To keep performance at an acceptable level it is necessary to perform buffering because seeking on every single one-byte-read will destroy performance.
2016-07-21 11:06:25 +01:00
Adam Hathcock
8d16925662 Add Profile259 2016-07-18 14:37:39 +01:00
Adam Hathcock
ef0bf2758e Add Silverlight 5 2016-07-13 13:26:01 +01:00
Adam Hathcock
351a7552b9 0.12.0 2016-07-13 10:01:32 +01:00
Adam Hathcock
9dcc127454 .NET Core RTM 2016-07-13 09:58:13 +01:00
Adam Hathcock
81ff334aae Fix line endings 2016-07-13 09:54:42 +01:00
Pawel Pabich
e76ed60483 Tar long paths (#147)
* Now we can handle long file names

* Refactored code a bit
2016-07-06 09:57:52 +01:00
Adam Hathcock
04d04576bf Merge pull request #84 from adamhathcock/dnx
Using DNX and project.json
2016-06-06 10:04:02 -04:00
Adam Hathcock
dbd713756f Introducing the lock again for tests 2016-06-06 14:59:28 +01:00
Adam Hathcock
6d821dffa6 Everything builds and basically runs 2016-06-06 14:35:33 +01:00
Adam Hathcock
40e559e608 First pass. Doesn't work yet. 2016-05-20 17:31:35 +01:00
Adam Hathcock
b3fe26fc56 Merge branch 'master' into dnx
Conflicts:
	NuGet/sharpcompress.nuspec
	SharpCompress/VersionInfo.cs
	src/SharpCompress/Common/Tar/Headers/TarHeader.cs
	src/SharpCompress/Common/Zip/Headers/ZipFileEntry.cs
	src/SharpCompress/Compressor/BZip2/BZip2Stream.cs
	src/SharpCompress/Reader/ReaderFactory.cs
	src/SharpCompress/Writer/GZip/GZipWriter.cs
	src/SharpCompress/Writer/Tar/TarWriter.cs
	src/SharpCompress/Writer/WriterFactory.cs
	src/SharpCompress/Writer/Zip/ZipCentralDirectoryEntry.cs
	src/SharpCompress/Writer/Zip/ZipWriter.cs
	test/SharpCompress.Test/WriterTests.cs
2016-05-20 17:19:43 +01:00
Adam Hathcock
bb930da2f1 0.11.6 packaging 2016-04-07 09:44:28 +01:00
Adam Hathcock
732e352261 Merge pull request #138 from adamhathcock/leaveopen_writers
Leave open writers
2016-04-06 08:17:09 +01:00
Adam Hathcock
8f2ada3f20 Update README.md 2016-03-31 10:10:20 +01:00
Adam Hathcock
762381fac6 Explicit finish for BZip2 writing 2016-03-31 08:54:23 +01:00
Adam Hathcock
c440fdf88d Give writers leaveOpen option 2016-03-30 20:18:46 +01:00
Adam Hathcock
7f3f6bb135 Merge pull request #136 from zentron/master
Fix for `System.IO.Compression` created empty directory in zip
2016-03-03 13:13:59 +00:00
Rob
cb7853174d Fix for System.IO.Compression created empty directory in zip 2016-03-03 14:40:14 +10:00
Adam Hathcock
ef8575a107 Matching 77b034cbe7 to have 20 as the version made by 2016-02-26 09:22:23 +00:00
Adam Hathcock
6f3f82e0ad Merge pull request #129 from benshoof/tar-globalextendedheader
Support tars with global extended headers
2016-02-18 07:18:55 +00:00
benshoof
7b1609abe2 Support tars with global extended headers 2016-02-17 15:39:17 -09:00
Adam Hathcock
691c44a3b5 Clean up targets 2016-02-13 19:59:05 +00:00
Adam Hathcock
98c629c789 Fix tests and DataConverter compile issue 2016-02-13 19:56:20 +00:00
Adam Hathcock
8367bff9fe Fix up DataConverter...removed pack 2016-02-13 09:42:59 +00:00
Adam Hathcock
ff22cd774e Merge branch 'master' into dnx
Conflicts:
	NuGet/sharpcompress.nuspec
	SharpCompress/Common/Zip/WinzipAesCryptoStream.Portable.cs
	SharpCompress/Common/Zip/WinzipAesEncryptionData.Portable.cs
	SharpCompress/Crypto/PBKDF2.cs
	SharpCompress/SharpCompress.Portable.csproj
	SharpCompress/SharpCompress.PortableTest.csproj
	SharpCompress/SharpCompress.Unsigned.csproj
	SharpCompress/SharpCompress.WindowsStore.csproj
	SharpCompress/SharpCompress.csproj
	src/SharpCompress/Archive/ArchiveFactory.cs
	src/SharpCompress/Archive/Rar/FileInfoRarFilePart.cs
	src/SharpCompress/Archive/Rar/RarArchive.cs
	src/SharpCompress/Archive/Rar/RarArchiveEntry.cs
	src/SharpCompress/Archive/SevenZip/SevenZipArchiveEntry.cs
	src/SharpCompress/Common/GZip/GZipFilePart.cs
	src/SharpCompress/Common/GZip/GZipVolume.cs
	src/SharpCompress/Common/Rar/Headers/MarkHeader.cs
	src/SharpCompress/Common/Rar/Headers/RarHeaderFactory.cs
	src/SharpCompress/Common/SevenZip/ArchiveReader.cs
	src/SharpCompress/Common/SevenZip/DataReader.cs
	src/SharpCompress/Common/Tar/Headers/TarHeader.cs
	src/SharpCompress/Common/Zip/Headers/ZipFileEntry.cs
	src/SharpCompress/Common/Zip/WinzipAesCryptoStream.cs
	src/SharpCompress/Common/Zip/WinzipAesEncryptionData.cs
	src/SharpCompress/Common/Zip/ZipFilePart.cs
	src/SharpCompress/Compressor/Deflate/GZipStream.cs
	src/SharpCompress/Compressor/Deflate/Inflate.cs
	src/SharpCompress/Compressor/Deflate/ZlibBaseStream.cs
	src/SharpCompress/Compressor/LZMA/Bcj2DecoderStream.cs
	src/SharpCompress/Compressor/LZMA/LzmaStream.cs
	src/SharpCompress/Compressor/PPMd/H/FreqData.cs
	src/SharpCompress/Compressor/PPMd/H/PPMContext.cs
	src/SharpCompress/Compressor/PPMd/H/RarMemBlock.cs
	src/SharpCompress/Compressor/PPMd/H/RarNode.cs
	src/SharpCompress/Compressor/PPMd/H/State.cs
	src/SharpCompress/Compressor/PPMd/PpmdProperties.cs
	src/SharpCompress/Compressor/Rar/VM/RarVM.cs
	src/SharpCompress/EnumExtensions.cs
	src/SharpCompress/IO/MarkingBinaryReader.cs
	src/SharpCompress/Reader/ReaderFactory.cs
	src/SharpCompress/Utility.cs
	src/SharpCompress/Writer/IWriter.Extensions.cs
	src/SharpCompress/Writer/Zip/ZipCentralDirectoryEntry.cs
	src/SharpCompress/Writer/Zip/ZipWriter.cs
	test/SharpCompress.Test/Tar/TarArchiveTests.cs
	test/SharpCompress.Test/Tar/TarReaderTests.cs
2016-02-13 09:24:44 +00:00
Adam Hathcock
ee5e3fbc1d Update to 0.11.5 2016-02-13 09:17:29 +00:00
Adam Hathcock
d13b2ad073 Do less than equals 2016-02-13 09:16:43 +00:00
Adam Hathcock
e9a7efc371 Merge pull request #127 from eklann/bugfix-incomplete-rar
Fixed bug triggered by incomplete rar file.
2016-02-10 19:38:10 +00:00
Josef Eklann
9b8ddda191 Fixed bug triggered by incomplete rar file. 2016-02-10 14:56:05 +01:00
Adam Hathcock
1fc14e1075 Update to 0.11.4 2016-02-07 10:17:45 +00:00
Adam Hathcock
b3a5204e74 Bug fix for previous PR 2016-02-07 10:14:07 +00:00
Adam Hathcock
0fab1ff976 Merge pull request #125 from kenkendk/set_compression_info_on_stream
Fix setting compressioninfo on Zip streams
2016-02-04 10:00:20 +00:00
Kenneth Skovhede
a05b692fc3 More whitespace fix 2016-02-04 10:39:58 +01:00
Kenneth Skovhede
ed7f140364 Whitespace fix 2016-02-04 10:39:02 +01:00
Kenneth Skovhede
a4b594121e Added code to forward the compression info to the stream, such that it is possible to override the compression level and method on a per-stream basis. 2016-02-04 10:35:36 +01:00
Adam Hathcock
fe8da55c95 Merge pull request #122 from benshoof/net35_compat
Added support for .NET 3.5
2016-01-26 21:00:20 +00:00
benshoof
42c4eab4be Fix conflicts 2016-01-26 09:56:54 -09:00
Adam Hathcock
2e8844c896 This doesn't work :) 2016-01-26 11:57:40 +00:00
Adam Hathcock
aed7ff003d Merge pull request #121 from benshoof/rar_detection_fix
Fix false positives in Rar file detection
2016-01-26 11:54:07 +00:00
Adam Hathcock
681b28f654 Merge pull request #119 from benshoof/endian-neutral
SharpCompress now endian neutral
2016-01-26 11:51:30 +00:00
Adam Hathcock
0de64b1551 Use ExtractAllEntries if archives are SOLID types 2016-01-26 11:49:57 +00:00
Adam Hathcock
526df2404e Merge pull request #123 from kenkendk/remove_warnings
Fix various warnings
2016-01-26 09:21:03 +00:00
Kenneth Skovhede
f20274aac7 Removed debug comments 2016-01-26 10:18:23 +01:00
Kenneth Skovhede
08b899fdac Deleted unused variables 2016-01-26 10:18:13 +01:00
Kenneth Skovhede
7b91b6e7c8 Commented out various unused fields that cause warnings and makes the WarningsAsErrors directive abort the build 2016-01-26 09:48:00 +01:00
benshoof
1661b7ec36 Added support for .NET 3.5
SharpCompress can now be compiled for .NET 3.5 by defining symbol NET35
2016-01-25 09:02:19 -09:00
benshoof
e5ab9dc883 Fix false positives in Rar file detection 2016-01-25 08:03:40 -09:00
Adam Hathcock
25d22e33a7 Merge pull request #118 from kenkendk/master
Bugfix for inflate algorithm cutting of the end of certain blocks
2016-01-23 10:24:50 +00:00
benshoof
8ceac9000c SharpCompress now endian neutral
SharpCompress can now be used on machines with big endian architecture
such as powerpc. All byte conversions now run through Mono's
DataConverter (or a portable version for builds that don't allow unsafe
code) instead of BitConverter, as BitConverter's behavior depends on the
host cpu.
2016-01-22 14:32:35 -09:00
Kenneth Skovhede
ecceec8e1a Tabs -> spaces fix 2016-01-22 15:06:55 +01:00
Kenneth Skovhede
d5c88ebab3 Bugfix for inflate algorithm cutting of the end of certain blocks 2016-01-22 14:51:05 +01:00
Adam Hathcock
0a2adbc205 Merge pull request #115 from maxpiva/master
Adds Non Lineal "Solid Rar" Extraction to RarArchive.
2016-01-17 13:54:24 +00:00
mpiva
3be7f9da37 Adds Non Lineal "Solid Rar" Extraction to RarArchive. 2016-01-17 02:31:55 -03:00
Adam Hathcock
3f2ca67416 Forgot the file overload 2016-01-15 10:04:28 +00:00
Adam Hathcock
21087323af Make Tar last in detection as it contains other files 2016-01-09 13:29:29 +00:00
Adam Hathcock
505838a32a Fixes from last merge 2016-01-04 21:08:30 +00:00
Adam Hathcock
fd3f9eb382 Merge remote-tracking branch 'origin/master' into dnx
Conflicts:
	SharpCompress/SharpCompress.PortableTest.csproj
	SharpCompress/SharpCompress.Unsigned.csproj
	SharpCompress/SharpCompress.csproj
	src/SharpCompress/Archive/IArchiveEntry.Extensions.cs
	src/SharpCompress/Reader/IReader.Extensions.cs
2016-01-04 21:01:12 +00:00
Adam Hathcock
05f92018c3 Merge pull request #113 from zentron/master
Preserve File Timestamps with IReader
2016-01-04 12:04:05 +00:00
Rob
b8fc4a2415 Preserve File Timestamps with IReader 2016-01-04 16:26:43 +10:00
Adam Hathcock
ccd2fc6568 Merge branch 'master' into dnx
Conflicts:
	src/SharpCompress/Archive/Rar/RarArchive.cs
2016-01-03 11:19:04 +00:00
Adam Hathcock
a30872809d Merge pull request #99 from Rovak/rarreader-add-password
Add password when opening RarReader
2016-01-03 03:18:12 -08:00
Adam Hathcock
7abf2ed58b Update README 2016-01-03 11:15:36 +00:00
Adam Hathcock
4822f571c3 Add explicit target for NET 4.5 2016-01-02 15:04:04 +00:00
Adam Hathcock
b0fdac3e6f Have NO_CRYPTO and add Profile259 2016-01-02 15:02:45 +00:00
Adam Hathcock
df62c1d3b2 NO_FILE tag 2016-01-02 14:57:05 +00:00
Adam Hathcock
56912ade7a reorg test project.json 2016-01-01 12:37:37 +00:00
Adam Hathcock
43698b41a7 Need RijndaelEngine for some reason 2016-01-01 12:21:23 +00:00
Adam Hathcock
fd2beeab48 trying to fix RarRijndael 2016-01-01 11:47:38 +00:00
Adam Hathcock
5647a424e7 Fix test execution 2016-01-01 11:23:01 +00:00
Adam Hathcock
a0a418e90b Remove bad character 2016-01-01 11:16:42 +00:00
Adam Hathcock
75e09b24c0 Remove bouncy castle code 2016-01-01 11:06:14 +00:00
Adam Hathcock
0b06023b82 Moved tests and fixed up nuget metadata 2015-12-30 16:59:27 +00:00
Adam Hathcock
49707498a5 move files around 2015-12-30 11:19:42 +00:00
Adam Hathcock
a4c2d27985 tests run...need parallel none 2015-12-30 11:04:16 +00:00
Adam Hathcock
9fac34fb21 intermediate checkin of xunit conversion 2015-12-29 15:26:38 +00:00
Adam Hathcock
d4b22936af fix net 35 2015-12-29 14:06:35 +00:00
Adam Hathcock
f0d0143be0 Remove csproj and start converting tests 2015-12-29 13:52:55 +00:00
Adam Hathcock
7d1593e887 Support NET3.5+ and DNXCORE50 and DOTNET 5.1 + 2015-12-28 23:40:33 +00:00
Adam Hathcock
2588444948 Using DOTNET51 and DNXCORE50 targets 2015-12-28 23:33:18 +00:00
Adam Hathcock
69abb8446f Merge branch 'master' into dnx 2015-12-28 18:47:12 +00:00
Adam Hathcock
bec2662d23 Update version 2015-12-28 18:40:35 +00:00
Adam Hathcock
dd35052de9 Merge pull request #105 from benshoof/fix-tests-release-build
Fix Release build of Tests
2015-12-17 08:54:31 +00:00
Adam Hathcock
2a630e04b2 Merge pull request #107 from benshoof/fix-nonzip-perf-regression
Fixed serious performance regression (revert 0f12a073af)
2015-12-15 16:33:06 +00:00
benshoof
231b78e096 Revert 0f12a073af
Revert commit that caused all non-zip files to be read entirely upon
opening.
IsZipArchive() would read and process the entire file looking for a zip
header.
2015-12-15 07:28:50 -09:00
Adam Hathcock
ce6e1d26f4 Merge pull request #104 from benshoof/fix-vs2013-build
Fix VS2013 compiler warnings (errors)
2015-12-15 09:10:49 +00:00
benshoof
69a25cd142 Fix Release build of Tests
Fixes release builds of SharpCompress.Test and
SharpCompress.Test.Portable. The UNSIGNED symbol was missing from the
Release configurations of SharpCompress.Unsigned and
SharpCompress.PortableTest
2015-12-14 15:32:49 -09:00
benshoof
cc2ad7d8d5 Fix VS2013 compiler warnings (errors)
Fixes broken build in VS2013 introduced by
18bd810228. That commit attempted to fix a
compiler warning from VS2015, but this turns out to be a compiler bug:
https://github.com/dotnet/roslyn/issues/4027 . That commit added code
which VS2013 correctly treats as a compiler warning, breaking the VS2013
build.
I have reverted this unnecessary change to the deflate code, fixing the
VS2013 build, and disabled warning CS0675 on send_bits() which will
satisfy VS2015.
2015-12-14 15:24:33 -09:00
Adam Hathcock
6fee1f6dc3 Merge branch 'master' into dnx 2015-11-28 15:35:09 +00:00
Roy van Kaathoven
6e0f4ecbc9 Add password when opening RarReader 2015-10-27 18:52:06 +01:00
Adam Hathcock
e5d10e3dba Use only dotnet 2015-08-22 21:36:05 +01:00
Adam Hathcock
0ba87b6c62 Trying to get netcore45 in 2015-08-06 15:42:31 +01:00
Adam Hathcock
da47306f04 Use bouncy Sha256Digest 2015-08-06 14:51:36 +01:00
Adam Hathcock
1930126a59 Add Sha256Digest and Update other classes 2015-08-06 14:46:36 +01:00
Adam Hathcock
baf9f391f1 Setup bouncy castle crypto for dnx core 2015-08-06 14:46:07 +01:00
Adam Hathcock
979703dd1f ignore lock file 2015-08-06 14:25:54 +01:00
Adam Hathcock
ab9e8063dc remove lock file 2015-08-06 14:24:00 +01:00
Adam Hathcock
68c09d7221 Making a pass at DNX 2015-08-05 14:21:34 +01:00
753 changed files with 94757 additions and 52814 deletions

15
.circleci/config.yml Normal file
View File

@@ -0,0 +1,15 @@
version: 2
jobs:
build:
docker:
- image: microsoft/dotnet:2.1.301-sdk
steps:
- checkout
- run:
name: Install unzip
command: |
apt-get update
apt-get install -y unzip
- run:
name: Build
command: ./build.sh

5
.gitattributes vendored Normal file
View File

@@ -0,0 +1,5 @@
# Set the default behavior, in case people don't have core.autocrlf set.
* text=auto
# need original files to be windows
*.txt text eol=crlf

30
.gitignore vendored
View File

@@ -1,11 +1,19 @@
**/bin/*
**/obj/*
_ReSharper.SharpCompress/
bin/
*.suo
*.user
TestArchives/Scratch/
TestArchives/Scratch2/
TestResults/
*.nupkg
packages/*/
**/bin/*
**/obj/*
_ReSharper.SharpCompress/
bin/
*.suo
*.user
TestArchives/Scratch/
TestArchives/Scratch2/
TestResults/
*.nupkg
packages/*/
project.lock.json
tests/TestArchives/Scratch
.vs
tools
.vscode
.idea/
.DS_Store

60
FORMATS.md Normal file
View File

@@ -0,0 +1,60 @@
# Formats
## Accessing Archives
* Archive classes allow random access to a seekable stream.
* Reader classes allow forward-only reading on a stream.
* Writer classes allow forward-only Writing on a stream.
## Supported Format Table
| Archive Format | Compression Format(s) | Compress/Decompress | Archive API | Reader API | Writer API |
| ---------------------- | ------------------------------------------------- | ------------------- | --------------- | ---------- | ------------- |
| Rar | Rar | Decompress (1) | RarArchive | RarReader | N/A |
| Zip (2) | None, DEFLATE, Deflate64, BZip2, LZMA/LZMA2, PPMd | Both | ZipArchive | ZipReader | ZipWriter |
| Tar | None | Both | TarArchive | TarReader | TarWriter (3) |
| Tar.GZip | DEFLATE | Both | TarArchive | TarReader | TarWriter (3) |
| Tar.BZip2 | BZip2 | Both | TarArchive | TarReader | TarWriter (3) |
| Tar.LZip | LZMA | Both | TarArchive | TarReader | TarWriter (3) |
| Tar.XZ | LZMA2 | Decompress | TarArchive | TarReader | TarWriter (3) |
| GZip (single file) | DEFLATE | Both | GZipArchive | GZipReader | GZipWriter |
| 7Zip (4) | LZMA, LZMA2, BZip2, PPMd, BCJ, BCJ2, Deflate | Decompress | SevenZipArchive | N/A | N/A |
| LZip (single file) (5) | LZip (LZMA) | Both | LZipArchive | LZipReader | LZipWriter |
1. SOLID Rars are only supported in the RarReader API.
2. Zip format supports pkware and WinzipAES encryption. However, encrypted LZMA is not supported. Zip64 reading/writing is supported but only with seekable streams as the Zip spec doesn't support Zip64 data in post data descriptors. Deflate64 is only supported for reading.
3. The Tar format requires a file size in the header. If no size is specified to the TarWriter and the stream is not seekable, then an exception will be thrown.
4. The 7Zip format doesn't allow for reading as a forward-only stream so 7Zip is only supported through the Archive API
5. LZip has no support for extra data like the file name or timestamp. There is a default filename used when looking at the entry Key on the archive.
## Compression Streams
For those who want to directly compress/decompress bits. The single file formats are represented here as well. However, BZip2, LZip and XZ have no metadata (GZip has a little) so using them without something like a Tar file makes little sense.
| Compressor | Compress/Decompress |
| --------------- | ------------------- |
| BZip2Stream | Both |
| GZipStream | Both |
| DeflateStream | Both |
| Deflate64Stream | Decompress |
| LZMAStream | Both |
| PPMdStream | Both |
| ADCStream | Decompress |
| LZipStream | Both |
| XZStream | Decompress |
## Archive Formats vs Compression
Sometimes the terminology gets mixed.
### Compression
DEFLATE, LZMA are pure compression algorithms
### Formats
Formats like Zip, 7Zip, Rar are archive formats only. They use other compression methods (e.g. DEFLATE, LZMA, etc.) or propriatory (e.g RAR)
### Overlap
GZip, BZip2 and LZip are single file archival formats. The overlap in the API happens because Tar uses the single file formats as "compression" methods and the API tries to hide this a bit.

View File

@@ -1,24 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<package xmlns="http://schemas.microsoft.com/packaging/2010/07/nuspec.xsd">
<metadata>
<id>sharpcompress</id>
<version>0.11.2</version>
<title>SharpCompress - Pure C# Decompression/Compression</title>
<authors>Adam Hathcock</authors>
<owners>Adam Hathcock</owners>
<licenseUrl>https://github.com/adamhathcock/sharpcompress/blob/master/LICENSE.txt</licenseUrl>
<projectUrl>https://github.com/adamhathcock/sharpcompress</projectUrl>
<requireLicenseAcceptance>false</requireLicenseAcceptance>
<description>SharpCompress is a compression library for .NET/Mono/Silverlight/WP7/WindowsStore that can unrar, decompress 7zip, zip/unzip, tar/untar bzip2/unbzip2 and gzip/ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip is implemented.</description>
<releaseNotes />
<language>en-US</language>
<tags>rar unrar zip unzip bzip2 gzip tar 7zip</tags>
<dependencies>
</dependencies>
</metadata>
<files>
<file src="..\bin\Full\SharpCompress.dll" target="lib\net40\SharpCompress.dll" />
<file src="..\bin\WindowsStore\SharpCompress.dll" target="lib\netcore45\SharpCompress.dll" />
<file src="..\bin\Portable\SharpCompress.dll" target="lib\portable-net4+sl5+wp8+win8\SharpCompress.dll" />
</files>
</package>

207
README.md
View File

@@ -1,62 +1,189 @@
SharpCompress
=============
# SharpCompress
Github mirror of http://sharpcompress.codeplex.com
SharpCompress is a compression library in pure C# for .NET 3.5, 4.5, .NET Standard 1.0, 1.3 that can unrar, un7zip, unzip, untar unbzip2 and ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip are implemented.
SharpCompress is a compression library for .NET/Mono/Silverlight/WP7 that can unrar, un7zip, unzip, untar unbzip2 and ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip are implemented.
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).
AppVeyor Build -
[![Build status](https://ci.appveyor.com/api/projects/status/voxg971oemmvxh1e/branch/master?svg=true)](https://ci.appveyor.com/project/adamhathcock/sharpcompress/branch/master)
A Simple Request
Circle CI Build -
[![CircleCI](https://circleci.com/gh/adamhathcock/sharpcompress.svg?style=svg)](https://circleci.com/gh/adamhathcock/sharpcompress)
## Need Help?
Post Issues on Github!
Check the [Supported Formats](FORMATS.md) and [Basic Usage.](USAGE.md)
## Recommended Formats
In general, I recommend GZip (Deflate)/BZip2 (BZip)/LZip (LZMA) as the simplicity of the formats lend to better long term archival as well as the streamability. Tar is often used in conjunction for multiple files in a single archive (e.g. `.tar.gz`)
Zip is okay, but it's a very hap-hazard format and the variation in headers and implementations makes it hard to get correct. Uses Deflate by default but supports a lot of compression methods.
RAR is not recommended as it's a propriatory format and the compression is closed source. Use Tar/LZip for LZMA
7Zip and XZ both are overly complicated. 7Zip does not support streamable formats. XZ has known holes explained here: (http://www.nongnu.org/lzip/xz_inadequate.html) Use Tar/LZip for LZMA compression instead.
## A Simple Request
Hi everyone. I hope you're using SharpCompress and finding it useful. Please give me feedback on what you'd like to see changed especially as far as usability goes. New feature suggestions are always welcome as well. I would also like to know what projects SharpCompress is being used in. I like seeing how it is used to give me ideas for future versions. Thanks!
Want to contribute?
Please do not email me directly to ask for help. If you think there is a real issue, please report it here.
## Want to contribute?
I'm always looking for help or ideas. Please submit code or email with ideas. Unfortunately, just letting me know you'd like to help is not enough because I really have no overall plan of what needs to be done. I'll definitely accept code submissions and add you as a member of the project!
TODOs (always lots):
* RAR 5 support
## TODOs (always lots)
* RAR 5 decryption support
* 7Zip writing
* Zip64
* Zip64 (Need writing and extend Reading)
* Multi-volume Zip support.
Version 0.11.1:
==============
- Added Cancel on IReader
- Removed .NET 2.0 support and LinqBridge dependency
## Version Log
Version 0.11:
==============
- Been over a year, contains mainly fixes from contributors!
- Possible breaking change: ArchiveEncoding is UTF8 by default now.
- TAR supports writing long names using longlink
- RAR Protect Header added
### Version 0.18
Version 0.10.3:
==============
- Finally fixed Disposal issue when creating a new archive with the Archive API
* [Now on Github releases](https://github.com/adamhathcock/sharpcompress/releases/tag/0.18)
Version 0.10.2:
==============
- Fixed Rar Header reading for invalid extended time headers.
- Windows Store assembly is now strong named
- Known issues with Long Tar names being worked on
- Updated to VS2013
- Portable targets SL5 and Windows Phone 8 (up from SL4 and WP7)
### Version 0.17.1
Version 0.10.1:
==============
- Fixed 7Zip extraction performance problem
* Fix - [Bug Fix for .NET Core on Windows](https://github.com/adamhathcock/sharpcompress/pull/257)
Version 0.10:
==============
- Added support for RAR Decryption (thanks to https://github.com/hrasyid)
- Embedded some BouncyCastle crypto classes to allow RAR Decryption and Winzip AES Decryption in Portable and Windows Store DLLs
- Built in Release (I think)
### Version 0.17.0
Some Help/Discussion:
https://sharpcompress.codeplex.com/discussions
* New - Full LZip support! Can read and write LZip files and Tars inside LZip files. [Make LZip a first class citizen. #241](https://github.com/adamhathcock/sharpcompress/issues/241)
* New - XZ read support! Can read XZ files and Tars inside XZ files. [XZ in SharpCompress #91](https://github.com/adamhathcock/sharpcompress/issues/94)
* Fix - [Regression - zip file writing on seekable streams always assumed stream start was 0. Introduced with Zip64 writing.](https://github.com/adamhathcock/sharpcompress/issues/244)
* Fix - [Zip files with post-data descriptors can be properly skipped via decompression](https://github.com/adamhathcock/sharpcompress/issues/162)
### Version 0.16.2
* Fix [.NET 3.5 should support files and cryptography (was a regression from 0.16.0)](https://github.com/adamhathcock/sharpcompress/pull/251)
* Fix [Zip per entry compression customization wrote the wrong method into the zip archive](https://github.com/adamhathcock/sharpcompress/pull/249)
### Version 0.16.1
* Fix [Preserve compression method when getting a compressed stream](https://github.com/adamhathcock/sharpcompress/pull/235)
* Fix [RAR entry key normalization fix](https://github.com/adamhathcock/sharpcompress/issues/201)
### Version 0.16.0
* Breaking - [Progress Event Tracking rethink](https://github.com/adamhathcock/sharpcompress/pull/226)
* Update to VS2017 - [VS2017](https://github.com/adamhathcock/sharpcompress/pull/231) - Framework targets have been changed.
* New - [Add Zip64 writing](https://github.com/adamhathcock/sharpcompress/pull/211)
* [Fix invalid/mismatching Zip version flags.](https://github.com/adamhathcock/sharpcompress/issues/164) - This allows nuget/System.IO.Packaging to read zip files generated by SharpCompress
* [Fix 7Zip directory hiding](https://github.com/adamhathcock/sharpcompress/pull/215/files)
* [Verify RAR CRC headers](https://github.com/adamhathcock/sharpcompress/pull/220)
### Version 0.15.2
* [Fix invalid headers](https://github.com/adamhathcock/sharpcompress/pull/210) - fixes an issue creating large-ish zip archives that was introduced with zip64 reading.
### Version 0.15.1
* [Zip64 extending information and ZipReader](https://github.com/adamhathcock/sharpcompress/pull/206)
### Version 0.15.0
* [Add zip64 support for ZipArchive extraction](https://github.com/adamhathcock/sharpcompress/pull/205)
### Version 0.14.1
* [.NET Assemblies aren't strong named](https://github.com/adamhathcock/sharpcompress/issues/158)
* [Pkware encryption for Zip files didn't allow for multiple reads of an entry](https://github.com/adamhathcock/sharpcompress/issues/197)
* [GZip Entry couldn't be read multiple times](https://github.com/adamhathcock/sharpcompress/issues/198)
### Version 0.14.0
* [Support for LZip reading in for Tars](https://github.com/adamhathcock/sharpcompress/pull/191)
### Version 0.13.1
* [Fix null password on ReaderFactory. Fix null options on SevenZipArchive](https://github.com/adamhathcock/sharpcompress/pull/188)
* [Make PpmdProperties lazy to avoid unnecessary allocations.](https://github.com/adamhathcock/sharpcompress/pull/185)
### Version 0.13.0
* Breaking change: Big refactor of Options on API.
* 7Zip supports Deflate
### Version 0.12.4
* Forward only zip issue fix https://github.com/adamhathcock/sharpcompress/issues/160
* Try to fix frameworks again by copying targets from JSON.NET
### Version 0.12.3
* 7Zip fixes https://github.com/adamhathcock/sharpcompress/issues/73
* Maybe all profiles will work with project.json now
### Version 0.12.2
* Support Profile 259 again
### Version 0.12.1
* Support Silverlight 5
### Version 0.12.0
* .NET Core RTM!
* Bug fix for Tar long paths
### Version 0.11.6
* Bug fix for global header in Tar
* Writers now have a leaveOpen `bool` overload. They won't close streams if not-requested to.
### Version 0.11.5
* Bug fix in Skip method
### Version 0.11.4
* SharpCompress is now endian neutral (matters for Mono platforms)
* Fix for Inflate (need to change implementation)
* Fixes for RAR detection
### Version 0.11.1
* Added Cancel on IReader
* Removed .NET 2.0 support and LinqBridge dependency
### Version 0.11
* Been over a year, contains mainly fixes from contributors!
* Possible breaking change: ArchiveEncoding is UTF8 by default now.
* TAR supports writing long names using longlink
* RAR Protect Header added
### Version 0.10.3
* Finally fixed Disposal issue when creating a new archive with the Archive API
### Version 0.10.2
* Fixed Rar Header reading for invalid extended time headers.
* Windows Store assembly is now strong named
* Known issues with Long Tar names being worked on
* Updated to VS2013
* Portable targets SL5 and Windows Phone 8 (up from SL4 and WP7)
### Version 0.10.1
* Fixed 7Zip extraction performance problem
### Version 0.10:
* Added support for RAR Decryption (thanks to https://github.com/hrasyid)
* Embedded some BouncyCastle crypto classes to allow RAR Decryption and Winzip AES Decryption in Portable and Windows Store DLLs
* Built in Release (I think)
XZ implementation based on: https://github.com/sambott/XZ.NET by @sambott
7Zip implementation based on: https://code.google.com/p/managed-lzma/

View File

@@ -1,183 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using SharpCompress.Archive;
using SharpCompress.Common;
namespace SharpCompress.Test
{
public class ArchiveTests : TestBase
{
protected void ArchiveStreamReadExtractAll(string testArchive, CompressionType compression)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
ArchiveStreamReadExtractAll(testArchive.AsEnumerable(), compression);
}
protected void ArchiveStreamReadExtractAll(IEnumerable<string> testArchives, CompressionType compression)
{
foreach (var path in testArchives)
{
ResetScratch();
using (Stream stream = File.OpenRead(path))
using (var archive = ArchiveFactory.Open(stream))
{
Assert.IsTrue(archive.IsSolid);
using (var reader = archive.ExtractAllEntries())
{
ReaderTests.UseReader(this, reader, compression);
}
VerifyFiles();
if (archive.Entries.First().CompressionType == CompressionType.Rar)
{
return;
}
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(SCRATCH_FILES_PATH,
ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
VerifyFiles();
}
}
protected void ArchiveStreamRead(string testArchive)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
ArchiveStreamRead(testArchive.AsEnumerable());
}
protected void ArchiveStreamRead(params string[] testArchives)
{
ArchiveStreamRead(testArchives.Select(x => Path.Combine(TEST_ARCHIVES_PATH, x)));
}
protected void ArchiveStreamRead(IEnumerable<string> testArchives)
{
foreach (var path in testArchives)
{
ResetScratch();
using (Stream stream = File.OpenRead(path))
using (var archive = ArchiveFactory.Open(stream))
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(SCRATCH_FILES_PATH, ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
VerifyFiles();
}
}
protected void ArchiveFileRead(string testArchive)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
ArchiveFileRead(testArchive.AsEnumerable());
}
protected void ArchiveFileRead(IEnumerable<string> testArchives)
{
foreach (var path in testArchives)
{
ResetScratch();
using (var archive = ArchiveFactory.Open(path))
{
archive.EntryExtractionBegin += archive_EntryExtractionBegin;
archive.FilePartExtractionBegin += archive_FilePartExtractionBegin;
archive.CompressedBytesRead += archive_CompressedBytesRead;
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(SCRATCH_FILES_PATH,
ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
VerifyFiles();
}
}
void archive_CompressedBytesRead(object sender, CompressedBytesReadEventArgs e)
{
Console.WriteLine("Read Compressed File Part Bytes: {0} Percentage: {1}%",
e.CurrentFilePartCompressedBytesRead, CreatePercentage(e.CurrentFilePartCompressedBytesRead, partTotal));
string percentage = entryTotal.HasValue ? CreatePercentage(e.CompressedBytesRead,
entryTotal.Value).ToString() : "Unknown";
Console.WriteLine("Read Compressed File Entry Bytes: {0} Percentage: {1}%",
e.CompressedBytesRead, percentage);
}
void archive_FilePartExtractionBegin(object sender, FilePartExtractionBeginEventArgs e)
{
this.partTotal = e.Size;
Console.WriteLine("Initializing File Part Extraction: " + e.Name);
}
void archive_EntryExtractionBegin(object sender, ArchiveExtractionEventArgs<IArchiveEntry> e)
{
this.entryTotal = e.Item.Size;
Console.WriteLine("Initializing File Entry Extraction: " + e.Item.Key);
}
private long? entryTotal;
private long partTotal;
private long totalSize;
protected void ArchiveFileReadEx(string testArchive)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
ArchiveFileReadEx(testArchive.AsEnumerable());
}
/// <summary>
/// Demonstrate the TotalUncompressSize property, and the ExtractOptions.PreserveFileTime and ExtractOptions.PreserveAttributes extract options
/// </summary>
protected void ArchiveFileReadEx(IEnumerable<string> testArchives)
{
foreach (var path in testArchives)
{
ResetScratch();
using (var archive = ArchiveFactory.Open(path))
{
this.totalSize = archive.TotalUncompressSize;
archive.EntryExtractionBegin += Archive_EntryExtractionBeginEx;
archive.EntryExtractionEnd += Archive_EntryExtractionEndEx;
archive.CompressedBytesRead += Archive_CompressedBytesReadEx;
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(SCRATCH_FILES_PATH,
ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite | ExtractOptions.PreserveFileTime | ExtractOptions.PreserveAttributes);
}
}
VerifyFilesEx();
}
}
private void Archive_EntryExtractionEndEx(object sender, ArchiveExtractionEventArgs<IArchiveEntry> e)
{
this.partTotal += e.Item.Size;
}
private void Archive_CompressedBytesReadEx(object sender, CompressedBytesReadEventArgs e)
{
string percentage = this.entryTotal.HasValue ? this.CreatePercentage(e.CompressedBytesRead, this.entryTotal.Value).ToString() : "-";
string tortalPercentage = this.CreatePercentage(this.partTotal + e.CompressedBytesRead, this.totalSize).ToString();
Console.WriteLine(@"Read Compressed File Progress: {0}% Total Progress {1}%", percentage, tortalPercentage);
}
private void Archive_EntryExtractionBeginEx(object sender, ArchiveExtractionEventArgs<IArchiveEntry> e)
{
this.entryTotal = e.Item.Size;
}
private int CreatePercentage(long n, long d)
{
return (int)(((double)n / (double)d) * 100);
}
}
}

View File

@@ -1,61 +0,0 @@
using System;
using System.IO;
using System.Linq;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using SharpCompress.Archive;
using SharpCompress.Archive.GZip;
namespace SharpCompress.Test
{
[TestClass]
public class GZipArchiveTests : ArchiveTests
{
public GZipArchiveTests()
{
UseExtensionInsteadOfNameToVerify = true;
}
[TestMethod]
public void GZip_Archive_Generic()
{
ResetScratch();
using (Stream stream = File.Open(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"), FileMode.Open))
using (var archive = ArchiveFactory.Open(stream))
{
var entry = archive.Entries.First();
entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.Key));
}
CompareArchivesByPath(Path.Combine(SCRATCH_FILES_PATH, "Tar.tar"),
Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar"));
}
[TestMethod]
public void GZip_Archive()
{
ResetScratch();
using (Stream stream = File.Open(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"), FileMode.Open))
using (var archive = GZipArchive.Open(stream))
{
var entry = archive.Entries.First();
entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.Key));
}
CompareArchivesByPath(Path.Combine(SCRATCH_FILES_PATH, "Tar.tar"),
Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar"));
}
[TestMethod]
[ExpectedException(typeof(InvalidOperationException))]
public void GZip_Archive_NoAdd()
{
string jpg = Path.Combine(ORIGINAL_FILES_PATH, "jpg\\test.jpg");
ResetScratch();
using (Stream stream = File.Open(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"), FileMode.Open))
using (var archive = GZipArchive.Open(stream))
{
archive.AddEntry("jpg\\test.jpg", jpg);
archive.SaveTo(Path.Combine(SCRATCH_FILES_PATH, "Tar.tar.gz"));
}
}
}
}

View File

@@ -1,36 +0,0 @@
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("SharpCompress.Test")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("Microsoft")]
[assembly: AssemblyProduct("SharpCompress.Test")]
[assembly: AssemblyCopyright("Copyright © Microsoft 2011")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("f01fddfb-445f-4548-9f69-88b69a8b71b0")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]

View File

@@ -1,63 +0,0 @@
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
// Runtime Version:4.0.30319.269
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace SharpCompress.Test.Properties {
using System;
/// <summary>
/// A strongly-typed resource class, for looking up localized strings, etc.
/// </summary>
// This class was auto-generated by the StronglyTypedResourceBuilder
// class via a tool like ResGen or Visual Studio.
// To add or remove a member, edit your .ResX file then rerun ResGen
// with the /str option, or rebuild your VS project.
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "4.0.0.0")]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
internal class Resources {
private static global::System.Resources.ResourceManager resourceMan;
private static global::System.Globalization.CultureInfo resourceCulture;
[global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
internal Resources() {
}
/// <summary>
/// Returns the cached ResourceManager instance used by this class.
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Resources.ResourceManager ResourceManager {
get {
if (object.ReferenceEquals(resourceMan, null)) {
global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("SharpCompress.Test.Properties.Resources", typeof(Resources).Assembly);
resourceMan = temp;
}
return resourceMan;
}
}
/// <summary>
/// Overrides the current thread's CurrentUICulture property for all
/// resource lookups using this strongly typed resource class.
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Globalization.CultureInfo Culture {
get {
return resourceCulture;
}
set {
resourceCulture = value;
}
}
}
}

View File

@@ -1,117 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<root>
<!--
Microsoft ResX Schema
Version 2.0
The primary goals of this format is to allow a simple XML format
that is mostly human readable. The generation and parsing of the
various data types are done through the TypeConverter classes
associated with the data types.
Example:
... ado.net/XML headers & schema ...
<resheader name="resmimetype">text/microsoft-resx</resheader>
<resheader name="version">2.0</resheader>
<resheader name="reader">System.Resources.ResXResourceReader, System.Windows.Forms, ...</resheader>
<resheader name="writer">System.Resources.ResXResourceWriter, System.Windows.Forms, ...</resheader>
<data name="Name1"><value>this is my long string</value><comment>this is a comment</comment></data>
<data name="Color1" type="System.Drawing.Color, System.Drawing">Blue</data>
<data name="Bitmap1" mimetype="application/x-microsoft.net.object.binary.base64">
<value>[base64 mime encoded serialized .NET Framework object]</value>
</data>
<data name="Icon1" type="System.Drawing.Icon, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64">
<value>[base64 mime encoded string representing a byte array form of the .NET Framework object]</value>
<comment>This is a comment</comment>
</data>
There are any number of "resheader" rows that contain simple
name/value pairs.
Each data row contains a name, and value. The row also contains a
type or mimetype. Type corresponds to a .NET class that support
text/value conversion through the TypeConverter architecture.
Classes that don't support this are serialized and stored with the
mimetype set.
The mimetype is used for serialized objects, and tells the
ResXResourceReader how to depersist the object. This is currently not
extensible. For a given mimetype the value must be set accordingly:
Note - application/x-microsoft.net.object.binary.base64 is the format
that the ResXResourceWriter will generate, however the reader can
read any of the formats listed below.
mimetype: application/x-microsoft.net.object.binary.base64
value : The object must be serialized with
: System.Serialization.Formatters.Binary.BinaryFormatter
: and then encoded with base64 encoding.
mimetype: application/x-microsoft.net.object.soap.base64
value : The object must be serialized with
: System.Runtime.Serialization.Formatters.Soap.SoapFormatter
: and then encoded with base64 encoding.
mimetype: application/x-microsoft.net.object.bytearray.base64
value : The object must be serialized into a byte array
: using a System.ComponentModel.TypeConverter
: and then encoded with base64 encoding.
-->
<xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:msdata="urn:schemas-microsoft-com:xml-msdata">
<xsd:element name="root" msdata:IsDataSet="true">
<xsd:complexType>
<xsd:choice maxOccurs="unbounded">
<xsd:element name="metadata">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" />
<xsd:attribute name="type" type="xsd:string" />
<xsd:attribute name="mimetype" type="xsd:string" />
</xsd:complexType>
</xsd:element>
<xsd:element name="assembly">
<xsd:complexType>
<xsd:attribute name="alias" type="xsd:string" />
<xsd:attribute name="name" type="xsd:string" />
</xsd:complexType>
</xsd:element>
<xsd:element name="data">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
<xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" msdata:Ordinal="1" />
<xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" />
<xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" />
</xsd:complexType>
</xsd:element>
<xsd:element name="resheader">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" use="required" />
</xsd:complexType>
</xsd:element>
</xsd:choice>
</xsd:complexType>
</xsd:element>
</xsd:schema>
<resheader name="resmimetype">
<value>text/microsoft-resx</value>
</resheader>
<resheader name="version">
<value>2.0</value>
</resheader>
<resheader name="reader">
<value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
<resheader name="writer">
<value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
</root>

View File

@@ -1,26 +0,0 @@
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
// Runtime Version:4.0.30319.269
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace SharpCompress.Test.Properties {
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "10.0.0.0")]
internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase {
private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
public static Settings Default {
get {
return defaultInstance;
}
}
}
}

View File

@@ -1,7 +0,0 @@
<?xml version='1.0' encoding='utf-8'?>
<SettingsFile xmlns="http://schemas.microsoft.com/VisualStudio/2004/01/settings" CurrentProfile="(Default)">
<Profiles>
<Profile Name="(Default)" />
</Profiles>
<Settings />
</SettingsFile>

View File

@@ -1,246 +0,0 @@
using System.IO;
using System.Linq;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using SharpCompress.Archive;
using SharpCompress.Archive.Rar;
using SharpCompress.Common;
namespace SharpCompress.Test
{
[TestClass]
public class RarArchiveTests : ArchiveTests
{
[TestMethod]
public void Rar_EncryptedFileAndHeader_Archive()
{
ReadRarPassword("Rar.encrypted_filesAndHeader.rar", "test");
}
[TestMethod]
public void Rar_EncryptedFileOnly_Archive()
{
ReadRarPassword("Rar.encrypted_filesOnly.rar", "test");
}
[TestMethod]
public void Rar_Encrypted_Archive()
{
ReadRarPassword("Encrypted.rar", "test");
}
private void ReadRarPassword(string testArchive, string password)
{
ResetScratch();
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, testArchive)))
using (var archive = RarArchive.Open(stream, Options.KeepStreamsOpen, password))
{
foreach (var entry in archive.Entries)
{
if (!entry.IsDirectory)
{
Assert.AreEqual(entry.CompressionType, CompressionType.Rar);
entry.WriteToDirectory(SCRATCH_FILES_PATH, ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
}
VerifyFiles();
}
[TestMethod]
[ExpectedException(typeof(InvalidFormatException))]
public void Rar_Multi_Archive_Encrypted()
{
ArchiveFileReadPassword("EncryptedParts.part01.rar", "test");
}
protected void ArchiveFileReadPassword(string archiveName, string password)
{
ResetScratch();
using (var archive = RarArchive.Open(Path.Combine(TEST_ARCHIVES_PATH, archiveName), Options.None, password))
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(SCRATCH_FILES_PATH,
ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
VerifyFiles();
}
[TestMethod]
public void Rar_None_ArchiveStreamRead()
{
ArchiveStreamRead("Rar.none.rar");
}
[TestMethod]
public void Rar_ArchiveStreamRead()
{
ArchiveStreamRead("Rar.rar");
}
[TestMethod]
public void Rar_test_invalid_exttime_ArchiveStreamRead()
{
ResetScratch();
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "test_invalid_exttime.rar")))
{
using (var archive = ArchiveFactory.Open(stream))
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(SCRATCH_FILES_PATH, ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
}
}
[TestMethod]
public void Rar_Jpg_ArchiveStreamRead()
{
ResetScratch();
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "RarJpeg.jpg")))
{
using (var archive = RarArchive.Open(stream, Options.LookForHeader))
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(SCRATCH_FILES_PATH,
ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
VerifyFiles();
}
}
[TestMethod]
public void Rar_IsSolidArchiveCheck()
{
ResetScratch();
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Rar.rar")))
{
using (var archive = RarArchive.Open(stream))
{
Assert.IsFalse(archive.IsSolid);
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(SCRATCH_FILES_PATH,
ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
}
VerifyFiles();
}
[TestMethod]
[ExpectedException(typeof(InvalidFormatException))]
public void Rar_Solid_ArchiveStreamRead()
{
ArchiveStreamRead("Rar.solid.rar");
}
[TestMethod]
public void Rar_Solid_StreamRead_Extract_All()
{
ArchiveStreamReadExtractAll("Rar.solid.rar", CompressionType.Rar);
}
[TestMethod]
public void Rar_Multi_ArchiveStreamRead()
{
var testArchives = new string[] { "Rar.multi.part01.rar",
"Rar.multi.part02.rar",
"Rar.multi.part03.rar",
"Rar.multi.part04.rar",
"Rar.multi.part05.rar",
"Rar.multi.part06.rar"};
ResetScratch();
using (var archive = RarArchive.Open(testArchives.Select(s => Path.Combine(TEST_ARCHIVES_PATH, s))
.Select(p => File.OpenRead(p))))
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(SCRATCH_FILES_PATH, ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
VerifyFiles();
}
[TestMethod]
public void RarNoneArchiveFileRead()
{
ArchiveFileRead("Rar.none.rar");
}
[TestMethod]
public void Rar_ArchiveFileRead()
{
ArchiveFileRead("Rar.rar");
}
[TestMethod]
public void Rar_ArchiveFileRead_HasDirectories()
{
ResetScratch();
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Rar.rar")))
{
using (var archive = RarArchive.Open(stream))
{
Assert.IsFalse(archive.IsSolid);
Assert.IsTrue(archive.Entries.Any(entry => entry.IsDirectory));
}
}
}
[TestMethod]
public void Rar_Jpg_ArchiveFileRead()
{
ResetScratch();
using (var archive = RarArchive.Open(Path.Combine(TEST_ARCHIVES_PATH, "RarJpeg.jpg"), Options.LookForHeader))
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(SCRATCH_FILES_PATH, ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
VerifyFiles();
}
[TestMethod]
[ExpectedException(typeof(InvalidFormatException))]
public void Rar_Solid_ArchiveFileRead()
{
ArchiveFileRead("Rar.solid.rar");
}
[TestMethod]
public void Rar_Multi_ArchiveFileRead()
{
ArchiveFileRead("Rar.multi.part01.rar");
}
[TestMethod]
public void Rar_IsFirstVolume_True()
{
using (var archive = RarArchive.Open(Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part01.rar")))
{
Assert.IsTrue(archive.IsMultipartVolume());
Assert.IsTrue(archive.IsFirstVolume());
}
}
[TestMethod]
public void Rar_IsFirstVolume_False()
{
using (var archive = RarArchive.Open(Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part03.rar")))
{
Assert.IsTrue(archive.IsMultipartVolume());
Assert.IsFalse(archive.IsFirstVolume());
}
}
}
}

View File

@@ -1,60 +0,0 @@
using System.IO;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using SharpCompress.Common;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
namespace SharpCompress.Test.Rar
{
/// <summary>
/// Summary description for RarFactoryReaderTest
/// </summary>
[TestClass]
public class RarHeaderFactoryTest : TestBase
{
private RarHeaderFactory rarHeaderFactory;
[TestInitialize]
public void Initialize()
{
ResetScratch();
rarHeaderFactory = new RarHeaderFactory(StreamingMode.Seekable, Options.KeepStreamsOpen);
}
[TestMethod]
public void ReadHeaders_RecognizeEncryptedFlag()
{
ReadEncryptedFlag("Rar.Encrypted_filesAndHeader.rar", true);
}
private void ReadEncryptedFlag(string testArchive, bool isEncrypted)
{
using (var stream = GetReaderStream(testArchive))
foreach (var header in rarHeaderFactory.ReadHeaders(stream))
{
if (header.HeaderType == HeaderType.ArchiveHeader)
{
Assert.AreEqual(isEncrypted, rarHeaderFactory.IsEncrypted);
break;
}
}
}
[TestMethod]
public void ReadHeaders_RecognizeNoEncryptedFlag()
{
ReadEncryptedFlag("Rar.rar", false);
}
private FileStream GetReaderStream(string testArchive)
{
return new FileStream(Path.Combine(TEST_ARCHIVES_PATH, testArchive),
FileMode.Open);
}
}
}

View File

@@ -1,255 +0,0 @@
using System.IO;
using System.Linq;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using SharpCompress.Common;
using SharpCompress.Reader;
using SharpCompress.Reader.Rar;
namespace SharpCompress.Test
{
[TestClass]
public class RarReaderTests : ReaderTests
{
[TestMethod]
public void Rar_Multi_Reader()
{
var testArchives = new string[] { "Rar.multi.part01.rar",
"Rar.multi.part02.rar",
"Rar.multi.part03.rar",
"Rar.multi.part04.rar",
"Rar.multi.part05.rar",
"Rar.multi.part06.rar"};
ResetScratch();
using (var reader = RarReader.Open(testArchives.Select(s => Path.Combine(TEST_ARCHIVES_PATH, s))
.Select(p => File.OpenRead(p))))
{
while (reader.MoveToNextEntry())
{
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH, ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
VerifyFiles();
}
//[TestMethod]
public void Rar_Multi_Reader_Encrypted()
{
var testArchives = new string[] { "EncryptedParts.part01.rar",
"EncryptedParts.part02.rar",
"EncryptedParts.part03.rar",
"EncryptedParts.part04.rar",
"EncryptedParts.part05.rar",
"EncryptedParts.part06.rar"};
ResetScratch();
using (var reader = RarReader.Open(testArchives.Select(s => Path.Combine(TEST_ARCHIVES_PATH, s))
.Select(p => File.OpenRead(p))))
{
while (reader.MoveToNextEntry())
{
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH, ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
VerifyFiles();
}
[TestMethod]
public void Rar_Multi_Reader_Delete_Files()
{
var testArchives = new string[] { "Rar.multi.part01.rar",
"Rar.multi.part02.rar",
"Rar.multi.part03.rar",
"Rar.multi.part04.rar",
"Rar.multi.part05.rar",
"Rar.multi.part06.rar"};
ResetScratch();
foreach (var file in testArchives)
{
File.Copy(Path.Combine(TEST_ARCHIVES_PATH, file), Path.Combine(SCRATCH2_FILES_PATH, file));
}
var streams = testArchives.Select(s => Path.Combine(SCRATCH2_FILES_PATH, s)).Select(File.OpenRead).ToList();
using (var reader = RarReader.Open(streams))
{
while (reader.MoveToNextEntry())
{
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH, ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
foreach (var stream in streams)
{
stream.Dispose();
}
VerifyFiles();
foreach (var file in testArchives.Select(s => Path.Combine(SCRATCH2_FILES_PATH, s)))
{
File.Delete(file);
}
}
[TestMethod]
public void Rar_None_Reader()
{
Read("Rar.none.rar", CompressionType.Rar);
}
[TestMethod]
public void Rar_Reader()
{
Read("Rar.rar", CompressionType.Rar);
}
[TestMethod]
public void Rar_EncryptedFileAndHeader_Reader()
{
ReadRar("Rar.encrypted_filesAndHeader.rar", "test");
}
[TestMethod]
public void Rar_EncryptedFileOnly_Reader()
{
ReadRar("Rar.encrypted_filesOnly.rar", "test");
}
[TestMethod]
public void Rar_Encrypted_Reader()
{
ReadRar("Encrypted.rar", "test");
}
private void ReadRar(string testArchive, string password)
{
ResetScratch();
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, testArchive)))
using (var reader = RarReader.Open(stream, password))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.AreEqual(reader.Entry.CompressionType, CompressionType.Rar);
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH, ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
}
VerifyFiles();
}
[TestMethod]
public void Rar_Entry_Stream()
{
ResetScratch();
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Rar.rar")))
using (var reader = RarReader.Open(stream))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.AreEqual(reader.Entry.CompressionType, CompressionType.Rar);
using (var entryStream = reader.OpenEntryStream())
{
string file = Path.GetFileName(reader.Entry.Key);
string folder = Path.GetDirectoryName(reader.Entry.Key);
string destdir = Path.Combine(SCRATCH_FILES_PATH, folder);
if (!Directory.Exists(destdir))
{
Directory.CreateDirectory(destdir);
}
string destinationFileName = Path.Combine(destdir, file);
using (FileStream fs = File.OpenWrite(destinationFileName))
{
entryStream.TransferTo(fs);
}
}
}
}
}
VerifyFiles();
}
[TestMethod]
public void Rar_Reader_Audio_program()
{
ResetScratch();
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Audio_program.rar")))
using (var reader = RarReader.Open(stream, Options.LookForHeader))
{
while (reader.MoveToNextEntry())
{
Assert.AreEqual(reader.Entry.CompressionType, CompressionType.Rar);
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH, ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
CompareFilesByPath(Path.Combine(SCRATCH_FILES_PATH, "test.dat"),
Path.Combine(MISC_TEST_FILES_PATH, "test.dat"));
}
[TestMethod]
public void Rar_Jpg_Reader()
{
ResetScratch();
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "RarJpeg.jpg")))
using (var reader = RarReader.Open(stream, Options.LookForHeader))
{
while (reader.MoveToNextEntry())
{
Assert.AreEqual(reader.Entry.CompressionType, CompressionType.Rar);
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH, ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
VerifyFiles();
}
[TestMethod]
public void Rar_Solid_Reader()
{
Read("Rar.solid.rar", CompressionType.Rar);
}
[TestMethod]
public void Rar_Solid_Skip_Reader()
{
ResetScratch();
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Rar.solid.rar")))
using (var reader = RarReader.Open(stream, Options.LookForHeader))
{
while (reader.MoveToNextEntry())
{
if (reader.Entry.Key.Contains("jpg"))
{
Assert.AreEqual(reader.Entry.CompressionType, CompressionType.Rar);
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH, ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
}
}
[TestMethod]
public void Rar_Reader_Skip()
{
ResetScratch();
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Rar.rar")))
using (var reader = RarReader.Open(stream, Options.LookForHeader))
{
while (reader.MoveToNextEntry())
{
if (reader.Entry.Key.Contains("jpg"))
{
Assert.AreEqual(reader.Entry.CompressionType, CompressionType.Rar);
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH, ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
}
}
}
}

View File

@@ -1,84 +0,0 @@
using System.IO;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using SharpCompress.IO;
namespace SharpCompress.Test
{
[TestClass]
public class RewindableStreamTest
{
[TestMethod]
public void TestRewind()
{
MemoryStream ms = new MemoryStream();
BinaryWriter bw = new BinaryWriter(ms);
bw.Write(1);
bw.Write(2);
bw.Write(3);
bw.Write(4);
bw.Write(5);
bw.Write(6);
bw.Write(7);
bw.Flush();
ms.Position = 0;
RewindableStream stream = new RewindableStream(ms);
stream.StartRecording();
BinaryReader br = new BinaryReader(stream);
Assert.AreEqual(br.ReadInt32(), 1);
Assert.AreEqual(br.ReadInt32(), 2);
Assert.AreEqual(br.ReadInt32(), 3);
Assert.AreEqual(br.ReadInt32(), 4);
stream.Rewind(true);
stream.StartRecording();
Assert.AreEqual(br.ReadInt32(), 1);
Assert.AreEqual(br.ReadInt32(), 2);
Assert.AreEqual(br.ReadInt32(), 3);
Assert.AreEqual(br.ReadInt32(), 4);
Assert.AreEqual(br.ReadInt32(), 5);
Assert.AreEqual(br.ReadInt32(), 6);
Assert.AreEqual(br.ReadInt32(), 7);
stream.Rewind(true);
stream.StartRecording();
Assert.AreEqual(br.ReadInt32(), 1);
Assert.AreEqual(br.ReadInt32(), 2);
Assert.AreEqual(br.ReadInt32(), 3);
Assert.AreEqual(br.ReadInt32(), 4);
}
[TestMethod]
public void TestIncompleteRewind()
{
MemoryStream ms = new MemoryStream();
BinaryWriter bw = new BinaryWriter(ms);
bw.Write(1);
bw.Write(2);
bw.Write(3);
bw.Write(4);
bw.Write(5);
bw.Write(6);
bw.Write(7);
bw.Flush();
ms.Position = 0;
RewindableStream stream = new RewindableStream(ms);
stream.StartRecording();
BinaryReader br = new BinaryReader(stream);
Assert.AreEqual(br.ReadInt32(), 1);
Assert.AreEqual(br.ReadInt32(), 2);
Assert.AreEqual(br.ReadInt32(), 3);
Assert.AreEqual(br.ReadInt32(), 4);
stream.Rewind(true);
Assert.AreEqual(br.ReadInt32(), 1);
Assert.AreEqual(br.ReadInt32(), 2);
stream.StartRecording();
Assert.AreEqual(br.ReadInt32(), 3);
Assert.AreEqual(br.ReadInt32(), 4);
Assert.AreEqual(br.ReadInt32(), 5);
stream.Rewind(true);
Assert.AreEqual(br.ReadInt32(), 3);
Assert.AreEqual(br.ReadInt32(), 4);
Assert.AreEqual(br.ReadInt32(), 5);
Assert.AreEqual(br.ReadInt32(), 6);
Assert.AreEqual(br.ReadInt32(), 7);
}
}
}

View File

@@ -1,79 +0,0 @@
using System;
using System.IO;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using SharpCompress.Common;
namespace SharpCompress.Test
{
[TestClass]
public class SevenZipArchiveTests : ArchiveTests
{
[TestMethod]
public void SevenZipArchive_LZMA_StreamRead()
{
ArchiveStreamRead("7Zip.LZMA.7z");
}
[TestMethod]
public void SevenZipArchive_LZMA_PathRead()
{
ArchiveFileRead("7Zip.LZMA.7z");
}
[TestMethod]
public void SevenZipArchive_PPMd_StreamRead()
{
ArchiveStreamRead("7Zip.PPMd.7z");
}
[TestMethod]
public void SevenZipArchive_PPMd_StreamRead_Extract_All()
{
ArchiveStreamReadExtractAll("7Zip.PPMd.7z", CompressionType.PPMd);
}
[TestMethod]
public void SevenZipArchive_PPMd_PathRead()
{
ArchiveFileRead("7Zip.PPMd.7z");
}
[TestMethod]
public void SevenZipArchive_LZMA2_StreamRead()
{
ArchiveStreamRead("7Zip.LZMA2.7z");
}
[TestMethod]
public void SevenZipArchive_LZMA2_PathRead()
{
ArchiveFileRead("7Zip.LZMA2.7z");
}
[TestMethod]
public void SevenZipArchive_BZip2_StreamRead()
{
ArchiveStreamRead("7Zip.BZip2.7z");
}
[TestMethod]
public void SevenZipArchive_BZip2_PathRead()
{
ArchiveFileRead("7Zip.BZip2.7z");
}
[TestMethod]
public void SevenZipArchive_LZMA_Time_Attributes_PathRead()
{
ArchiveFileReadEx("7Zip.LZMA.7z");
}
[TestMethod]
[ExpectedException(typeof(IndexOutOfRangeException))]
public void SevenZipArchive_BZip2_Split()
{
ArchiveStreamRead("Original.7z.001", "Original.7z.002",
"Original.7z.003", "Original.7z.004", "Original.7z.005",
"Original.7z.006", "Original.7z.007");
}
}
}

View File

@@ -1,113 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProductVersion>8.0.30703</ProductVersion>
<SchemaVersion>2.0</SchemaVersion>
<ProjectGuid>{E9C3C94B-FB27-4B4F-B225-57513C254D37}</ProjectGuid>
<OutputType>Library</OutputType>
<AppDesignerFolder>Properties</AppDesignerFolder>
<RootNamespace>SharpCompress.Test</RootNamespace>
<AssemblyName>SharpCompress.Test.Portable</AssemblyName>
<TargetFrameworkVersion>v4.0</TargetFrameworkVersion>
<TargetFrameworkProfile>
</TargetFrameworkProfile>
<FileAlignment>512</FileAlignment>
<SolutionDir Condition="$(SolutionDir) == '' Or $(SolutionDir) == '*Undefined*'">..\..\sharpcompress\</SolutionDir>
<RestorePackages>true</RestorePackages>
<ProjectTypeGuids>{3AC096D0-A1C2-E12C-1390-A8335801FDAB};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}</ProjectTypeGuids>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<PlatformTarget>AnyCPU</PlatformTarget>
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>bin\DebugPortable\</OutputPath>
<DefineConstants>DEBUG;TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<PlatformTarget>AnyCPU</PlatformTarget>
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>bin\Release\</OutputPath>
<DefineConstants>TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup>
<StartupObject>
</StartupObject>
</PropertyGroup>
<PropertyGroup>
<SignAssembly>false</SignAssembly>
</PropertyGroup>
<PropertyGroup>
<AssemblyOriginatorKeyFile>..\SharpCompress\SharpCompress.pfx</AssemblyOriginatorKeyFile>
</PropertyGroup>
<ItemGroup>
<Reference Include="Microsoft.VisualStudio.QualityTools.UnitTestFramework, Version=10.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL" />
<Reference Include="System" />
<Reference Include="System.Core" />
</ItemGroup>
<ItemGroup>
<Compile Include="ArchiveTests.cs" />
<Compile Include="GZip\GZipWriterTests.cs" />
<Compile Include="GZip\GZipArchiveTests.cs" />
<Compile Include="Rar\RarHeaderFactoryTest.cs" />
<Compile Include="SevenZip\SevenZipArchiveTests.cs" />
<Compile Include="Streams\StreamTests.cs" />
<Compile Include="Tar\TarWriterTests.cs" />
<Compile Include="Tar\TarReaderTests.cs" />
<Compile Include="Zip\ZipWriterTests.cs" />
<Compile Include="WriterTests.cs" />
<Compile Include="Rar\RarReaderTests.cs" />
<Compile Include="ReaderTests.cs" />
<Compile Include="Tar\TarArchiveTests.cs" />
<Compile Include="Zip\ZipArchiveTests.cs" />
<Compile Include="Rar\RarArchiveTests.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
<Compile Include="RewindableStreamTest.cs" />
<Compile Include="TestBase.cs" />
<Compile Include="TestStream.cs" />
<Compile Include="Zip\ZipReaderTests.cs" />
<EmbeddedResource Include="Properties\Resources.resx">
<Generator>ResXFileCodeGenerator</Generator>
<LastGenOutput>Resources.Designer.cs</LastGenOutput>
<SubType>Designer</SubType>
</EmbeddedResource>
<Compile Include="Properties\Resources.Designer.cs">
<AutoGen>True</AutoGen>
<DependentUpon>Resources.resx</DependentUpon>
<DesignTime>True</DesignTime>
</Compile>
<None Include="..\SharpCompress\SharpCompress.pfx">
<Link>SharpCompress.pfx</Link>
</None>
<None Include="Properties\Settings.settings">
<Generator>SettingsSingleFileGenerator</Generator>
<LastGenOutput>Settings.Designer.cs</LastGenOutput>
</None>
<Compile Include="Properties\Settings.Designer.cs">
<AutoGen>True</AutoGen>
<DependentUpon>Settings.settings</DependentUpon>
<DesignTimeSharedInput>True</DesignTimeSharedInput>
</Compile>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\SharpCompress\SharpCompress.PortableTest.csproj">
<Project>{efdcaf57-fd4d-4e5d-a3d5-f26b875817ed}</Project>
<Name>SharpCompress.PortableTest</Name>
</ProjectReference>
</ItemGroup>
<Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project>

View File

@@ -1,113 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProductVersion>8.0.30703</ProductVersion>
<SchemaVersion>2.0</SchemaVersion>
<ProjectGuid>{15679D7A-F22C-4943-87FF-BF5C76C4A6FD}</ProjectGuid>
<OutputType>Library</OutputType>
<AppDesignerFolder>Properties</AppDesignerFolder>
<RootNamespace>SharpCompress.Test</RootNamespace>
<AssemblyName>SharpCompress.Test</AssemblyName>
<TargetFrameworkVersion>v4.0</TargetFrameworkVersion>
<TargetFrameworkProfile>
</TargetFrameworkProfile>
<FileAlignment>512</FileAlignment>
<SolutionDir Condition="$(SolutionDir) == '' Or $(SolutionDir) == '*Undefined*'">..\..\sharpcompress\</SolutionDir>
<RestorePackages>true</RestorePackages>
<ProjectTypeGuids>{3AC096D0-A1C2-E12C-1390-A8335801FDAB};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}</ProjectTypeGuids>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<PlatformTarget>AnyCPU</PlatformTarget>
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>bin\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<PlatformTarget>AnyCPU</PlatformTarget>
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>bin\Release\</OutputPath>
<DefineConstants>TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup>
<StartupObject>
</StartupObject>
</PropertyGroup>
<PropertyGroup>
<SignAssembly>false</SignAssembly>
</PropertyGroup>
<PropertyGroup>
<AssemblyOriginatorKeyFile>..\SharpCompress\SharpCompress.pfx</AssemblyOriginatorKeyFile>
</PropertyGroup>
<ItemGroup>
<Reference Include="Microsoft.VisualStudio.QualityTools.UnitTestFramework, Version=10.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL" />
<Reference Include="System" />
<Reference Include="System.Core" />
</ItemGroup>
<ItemGroup>
<Compile Include="ArchiveTests.cs" />
<Compile Include="GZip\GZipWriterTests.cs" />
<Compile Include="GZip\GZipArchiveTests.cs" />
<Compile Include="Rar\RarHeaderFactoryTest.cs" />
<Compile Include="SevenZip\SevenZipArchiveTests.cs" />
<Compile Include="Streams\StreamTests.cs" />
<Compile Include="Tar\TarWriterTests.cs" />
<Compile Include="Tar\TarReaderTests.cs" />
<Compile Include="Zip\ZipWriterTests.cs" />
<Compile Include="WriterTests.cs" />
<Compile Include="Rar\RarReaderTests.cs" />
<Compile Include="ReaderTests.cs" />
<Compile Include="Tar\TarArchiveTests.cs" />
<Compile Include="Zip\ZipArchiveTests.cs" />
<Compile Include="Rar\RarArchiveTests.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
<Compile Include="RewindableStreamTest.cs" />
<Compile Include="TestBase.cs" />
<Compile Include="TestStream.cs" />
<Compile Include="Zip\ZipReaderTests.cs" />
<EmbeddedResource Include="Properties\Resources.resx">
<Generator>ResXFileCodeGenerator</Generator>
<LastGenOutput>Resources.Designer.cs</LastGenOutput>
<SubType>Designer</SubType>
</EmbeddedResource>
<Compile Include="Properties\Resources.Designer.cs">
<AutoGen>True</AutoGen>
<DependentUpon>Resources.resx</DependentUpon>
<DesignTime>True</DesignTime>
</Compile>
<None Include="..\SharpCompress\SharpCompress.pfx">
<Link>SharpCompress.pfx</Link>
</None>
<None Include="Properties\Settings.settings">
<Generator>SettingsSingleFileGenerator</Generator>
<LastGenOutput>Settings.Designer.cs</LastGenOutput>
</None>
<Compile Include="Properties\Settings.Designer.cs">
<AutoGen>True</AutoGen>
<DependentUpon>Settings.settings</DependentUpon>
<DesignTimeSharedInput>True</DesignTimeSharedInput>
</Compile>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\SharpCompress\SharpCompress.Unsigned.csproj">
<Project>{27d535cb-2fd3-4621-8c9a-46161fc77a5d}</Project>
<Name>SharpCompress.Unsigned</Name>
</ProjectReference>
</ItemGroup>
<Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project>

View File

@@ -1,92 +0,0 @@
using System.Collections.Generic;
using System.IO;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using SharpCompress.Common;
using SharpCompress.Reader.Tar;
namespace SharpCompress.Test
{
[TestClass]
public class TarReaderTests : ReaderTests
{
public TarReaderTests()
{
UseExtensionInsteadOfNameToVerify = true;
}
[TestMethod]
public void Tar_Reader()
{
Read("Tar.tar", CompressionType.None);
}
[TestMethod]
public void Tar_BZip2_Reader()
{
Read("Tar.tar.bz2", CompressionType.BZip2);
}
[TestMethod]
public void Tar_GZip_Reader()
{
Read("Tar.tar.gz", CompressionType.GZip);
}
[TestMethod]
public void Tar_BZip2_Entry_Stream()
{
ResetScratch();
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2")))
using (var reader = TarReader.Open(stream))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.AreEqual(reader.Entry.CompressionType, CompressionType.BZip2);
using (var entryStream = reader.OpenEntryStream())
{
string file = Path.GetFileName(reader.Entry.Key);
string folder = Path.GetDirectoryName(reader.Entry.Key);
string destdir = Path.Combine(SCRATCH_FILES_PATH, folder);
if (!Directory.Exists(destdir))
{
Directory.CreateDirectory(destdir);
}
string destinationFileName = Path.Combine(destdir, file);
using (FileStream fs = File.OpenWrite(destinationFileName))
{
entryStream.TransferTo(fs);
}
}
}
}
}
VerifyFiles();
}
[TestMethod]
public void Tar_BZip2_Skip_Entry_Stream()
{
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2")))
using (var reader = TarReader.Open(stream))
{
List<string> names = new List<string>();
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.AreEqual(reader.Entry.CompressionType, CompressionType.BZip2);
using (var entryStream = reader.OpenEntryStream())
{
entryStream.SkipEntry();
names.Add(reader.Entry.Key);
}
}
}
Assert.AreEqual(names.Count, 3);
}
}
}
}

View File

@@ -1,34 +0,0 @@
using Microsoft.VisualStudio.TestTools.UnitTesting;
using SharpCompress.Common;
namespace SharpCompress.Test
{
[TestClass]
public class TarWriterTests : WriterTests
{
public TarWriterTests()
: base(ArchiveType.Tar)
{
UseExtensionInsteadOfNameToVerify = true;
}
[TestMethod]
public void Tar_Writer()
{
Write(CompressionType.None, "Tar.noEmptyDirs.tar", "Tar.noEmptyDirs.tar");
}
[TestMethod]
public void Tar_BZip2_Writer()
{
Write(CompressionType.BZip2, "Tar.noEmptyDirs.tar.bz2", "Tar.noEmptyDirs.tar.bz2");
}
[TestMethod]
[ExpectedException(typeof(InvalidFormatException))]
public void Tar_Rar_Write()
{
Write(CompressionType.Rar, "Zip.ppmd.noEmptyDirs.zip", "Zip.ppmd.noEmptyDirs.zip");
}
}
}

View File

@@ -1,36 +0,0 @@
using System.IO;
using SharpCompress.Common;
using SharpCompress.Reader;
using SharpCompress.Writer;
namespace SharpCompress.Test
{
public class WriterTests : TestBase
{
private ArchiveType type;
protected WriterTests(ArchiveType type)
{
this.type = type;
}
protected void Write(CompressionType compressionType, string archive, string archiveToVerifyAgainst)
{
ResetScratch();
using (Stream stream = File.OpenWrite(Path.Combine(SCRATCH2_FILES_PATH, archive)))
using (var writer = WriterFactory.Open(stream, type, compressionType))
{
writer.WriteAll(ORIGINAL_FILES_PATH, "*", SearchOption.AllDirectories);
}
CompareArchivesByPath(Path.Combine(SCRATCH2_FILES_PATH, archive),
Path.Combine(TEST_ARCHIVES_PATH, archiveToVerifyAgainst));
using (Stream stream = File.OpenRead(Path.Combine(SCRATCH2_FILES_PATH, archive)))
using (var reader = ReaderFactory.Open(stream))
{
reader.WriteAllToDirectory(SCRATCH_FILES_PATH, ExtractOptions.ExtractFullPath);
}
VerifyFiles();
}
}
}

View File

@@ -1,224 +0,0 @@
using System;
using System.IO;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using SharpCompress.Common;
using SharpCompress.Reader;
using SharpCompress.Reader.Zip;
using SharpCompress.Writer;
namespace SharpCompress.Test
{
[TestClass]
public class ZipReaderTests : ReaderTests
{
public ZipReaderTests()
{
UseExtensionInsteadOfNameToVerify = true;
}
[TestMethod]
public void Zip_ZipX_Streamed_Read()
{
Read("Zip.Zipx", CompressionType.LZMA);
}
[TestMethod]
public void Zip_BZip2_Streamed_Read()
{
Read("Zip.bzip2.dd.zip", CompressionType.BZip2);
}
[TestMethod]
public void Zip_BZip2_Read()
{
Read("Zip.bzip2.zip", CompressionType.BZip2);
}
[TestMethod]
public void Zip_Deflate_Streamed2_Read()
{
Read("Zip.deflate.dd-.zip", CompressionType.Deflate);
}
[TestMethod]
public void Zip_Deflate_Streamed_Read()
{
Read("Zip.deflate.dd.zip", CompressionType.Deflate);
}
[TestMethod]
public void Zip_Deflate_Read()
{
Read("Zip.deflate.zip", CompressionType.Deflate);
}
[TestMethod]
public void Zip_LZMA_Streamed_Read()
{
Read("Zip.lzma.dd.zip", CompressionType.LZMA);
}
[TestMethod]
public void Zip_LZMA_Read()
{
Read("Zip.lzma.zip", CompressionType.LZMA);
}
[TestMethod]
public void Zip_PPMd_Streamed_Read()
{
Read("Zip.ppmd.dd.zip", CompressionType.PPMd);
}
[TestMethod]
public void Zip_PPMd_Read()
{
Read("Zip.ppmd.zip", CompressionType.PPMd);
}
[TestMethod]
public void Zip_None_Read()
{
Read("Zip.none.zip", CompressionType.None);
}
[TestMethod]
public void Zip_Deflate_NoEmptyDirs_Read()
{
Read("Zip.deflate.noEmptyDirs.zip", CompressionType.Deflate);
}
[TestMethod]
public void Zip_BZip2_PkwareEncryption_Read()
{
ResetScratch();
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.bzip2.pkware.zip")))
using (var reader = ZipReader.Open(stream, "test"))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.AreEqual(reader.Entry.CompressionType, CompressionType.BZip2);
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH, ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
}
VerifyFiles();
}
[TestMethod]
public void Zip_Reader_Disposal_Test()
{
ResetScratch();
using (TestStream stream = new TestStream(File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd.zip"))))
{
using (var reader = ReaderFactory.Open(stream, Options.None))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH,
ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
}
Assert.IsTrue(stream.IsDisposed);
}
}
[TestMethod]
public void Zip_Reader_Disposal_Test2()
{
ResetScratch();
using (TestStream stream = new TestStream(File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd.zip"))))
{
var reader = ReaderFactory.Open(stream, Options.None);
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH,
ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
Assert.IsFalse(stream.IsDisposed);
}
}
[TestMethod]
[ExpectedException(typeof(NotSupportedException))]
public void Zip_LZMA_WinzipAES_Read()
{
ResetScratch();
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.lzma.winzipaes.zip")))
using (var reader = ZipReader.Open(stream, "test"))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.AreEqual(reader.Entry.CompressionType, CompressionType.Unknown);
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH, ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
}
VerifyFiles();
}
[TestMethod]
public void Zip_Deflate_WinzipAES_Read()
{
ResetScratch();
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.WinzipAES.zip")))
using (var reader = ZipReader.Open(stream, "test"))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.AreEqual(reader.Entry.CompressionType, CompressionType.Unknown);
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH,
ExtractOptions.ExtractFullPath | ExtractOptions.Overwrite);
}
}
}
VerifyFiles();
}
class NonSeekableMemoryStream : MemoryStream
{
public override bool CanSeek
{
get
{
return false;
}
}
}
[TestMethod]
public void TestSharpCompressWithEmptyStream()
{
MemoryStream stream = new NonSeekableMemoryStream();
using (IWriter zipWriter = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.Deflate))
{
zipWriter.Write("foo.txt", new MemoryStream(new byte[0]));
zipWriter.Write("foo2.txt", new MemoryStream(new byte[10]));
}
stream = new MemoryStream(stream.ToArray());
File.WriteAllBytes("foo.zip", stream.ToArray());
using (IReader zipReader = ZipReader.Open(stream))
{
while (zipReader.MoveToNextEntry())
{
using (EntryStream entry = zipReader.OpenEntryStream())
{
MemoryStream tempStream = new MemoryStream();
const int bufSize = 0x1000;
byte[] buf = new byte[bufSize];
int bytesRead = 0;
while ((bytesRead = entry.Read(buf, 0, bufSize)) > 0)
tempStream.Write(buf, 0, bytesRead);
}
}
}
}
}
}

View File

@@ -1,53 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003" ToolsVersion="4.0">
<PropertyGroup>
<!-- The configuration and platform will be used to determine which
assemblies to include from solution and project documentation
sources -->
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<SchemaVersion>2.0</SchemaVersion>
<ProjectGuid>{a226cac9-f2c4-4cc2-94f9-163b3e221817}</ProjectGuid>
<SHFBSchemaVersion>1.9.3.0</SHFBSchemaVersion>
<!-- AssemblyName, Name, and RootNamespace are not used by SHFB but Visual
Studio adds them anyway -->
<AssemblyName>Documentation</AssemblyName>
<RootNamespace>Documentation</RootNamespace>
<Name>Documentation</Name>
<!-- SHFB properties -->
<OutputPath>.\Help\</OutputPath>
<HtmlHelpName>sharpcompress</HtmlHelpName>
<Language>en-US</Language>
<RootNamespaceTitle>SharpCompress</RootNamespaceTitle>
<SandcastlePath>..\..\Program Files (x86)\Sandcastle\</SandcastlePath>
<DocumentationSources>
<DocumentationSource sourceFile="bin\SharpCompress.dll" />
<DocumentationSource sourceFile="bin\SharpCompress.xml" />
</DocumentationSources>
<HelpTitle>SharpCompress</HelpTitle>
<PresentationStyle>Prototype</PresentationStyle>
<HelpFileFormat>HtmlHelp1, Website</HelpFileFormat>
<MissingTags>AutoDocumentCtors, AutoDocumentDispose</MissingTags>
</PropertyGroup>
<!-- There are no properties for these groups. AnyCPU needs to appear in
order for Visual Studio to perform the build. The others are optional
common platform types that may appear. -->
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|x86' ">
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|x86' ">
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|x64' ">
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|x64' ">
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|Win32' ">
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|Win32' ">
</PropertyGroup>
<!-- Import the SHFB build targets -->
<Import Project="$(SHFBROOT)\SandcastleHelpFileBuilder.targets" />
</Project>

Binary file not shown.

View File

@@ -1,71 +1,38 @@
Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio 2013
VisualStudioVersion = 12.0.31101.0
MinimumVisualStudioVersion = 10.0.40219.1
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Testing", "Testing", "{932BBFCC-76E3-45FF-90CA-6BE4FBF4A097}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{F18F1765-4A02-42FD-9BEF-F0E2FCBD9D17}"
ProjectSection(SolutionItems) = preProject
NuGet\sharpcompress.nuspec = NuGet\sharpcompress.nuspec
EndProjectSection
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SharpCompress", "SharpCompress\SharpCompress.csproj", "{10A689CF-76A2-4A4F-96E4-553C33398438}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SharpCompress.Test", "SharpCompress.Test\SharpCompress.Test.csproj", "{15679D7A-F22C-4943-87FF-BF5C76C4A6FD}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SharpCompress.Portable", "SharpCompress\SharpCompress.Portable.csproj", "{7FA7D133-1417-4F85-9998-4C618AC8FEDA}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SharpCompress.WindowsStore", "SharpCompress\SharpCompress.WindowsStore.csproj", "{1DF6D83C-31FF-47B6-82FE-C4603BE916B5}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SharpCompress.PortableTest", "SharpCompress\SharpCompress.PortableTest.csproj", "{EFDCAF57-FD4D-4E5D-A3D5-F26B875817ED}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SharpCompress.Test.Portable", "SharpCompress.Test\SharpCompress.Test.Portable.csproj", "{E9C3C94B-FB27-4B4F-B225-57513C254D37}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SharpCompress.Unsigned", "SharpCompress\SharpCompress.Unsigned.csproj", "{27D535CB-2FD3-4621-8C9A-46161FC77A5D}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{10A689CF-76A2-4A4F-96E4-553C33398438}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{10A689CF-76A2-4A4F-96E4-553C33398438}.Debug|Any CPU.Build.0 = Debug|Any CPU
{10A689CF-76A2-4A4F-96E4-553C33398438}.Release|Any CPU.ActiveCfg = Release|Any CPU
{10A689CF-76A2-4A4F-96E4-553C33398438}.Release|Any CPU.Build.0 = Release|Any CPU
{15679D7A-F22C-4943-87FF-BF5C76C4A6FD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{15679D7A-F22C-4943-87FF-BF5C76C4A6FD}.Debug|Any CPU.Build.0 = Debug|Any CPU
{15679D7A-F22C-4943-87FF-BF5C76C4A6FD}.Release|Any CPU.ActiveCfg = Release|Any CPU
{15679D7A-F22C-4943-87FF-BF5C76C4A6FD}.Release|Any CPU.Build.0 = Release|Any CPU
{7FA7D133-1417-4F85-9998-4C618AC8FEDA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{7FA7D133-1417-4F85-9998-4C618AC8FEDA}.Debug|Any CPU.Build.0 = Debug|Any CPU
{7FA7D133-1417-4F85-9998-4C618AC8FEDA}.Release|Any CPU.ActiveCfg = Release|Any CPU
{7FA7D133-1417-4F85-9998-4C618AC8FEDA}.Release|Any CPU.Build.0 = Release|Any CPU
{1DF6D83C-31FF-47B6-82FE-C4603BE916B5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{1DF6D83C-31FF-47B6-82FE-C4603BE916B5}.Debug|Any CPU.Build.0 = Debug|Any CPU
{1DF6D83C-31FF-47B6-82FE-C4603BE916B5}.Release|Any CPU.ActiveCfg = Release|Any CPU
{1DF6D83C-31FF-47B6-82FE-C4603BE916B5}.Release|Any CPU.Build.0 = Release|Any CPU
{EFDCAF57-FD4D-4E5D-A3D5-F26B875817ED}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{EFDCAF57-FD4D-4E5D-A3D5-F26B875817ED}.Debug|Any CPU.Build.0 = Debug|Any CPU
{EFDCAF57-FD4D-4E5D-A3D5-F26B875817ED}.Release|Any CPU.ActiveCfg = Release|Any CPU
{EFDCAF57-FD4D-4E5D-A3D5-F26B875817ED}.Release|Any CPU.Build.0 = Release|Any CPU
{E9C3C94B-FB27-4B4F-B225-57513C254D37}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{E9C3C94B-FB27-4B4F-B225-57513C254D37}.Debug|Any CPU.Build.0 = Debug|Any CPU
{E9C3C94B-FB27-4B4F-B225-57513C254D37}.Release|Any CPU.ActiveCfg = Release|Any CPU
{E9C3C94B-FB27-4B4F-B225-57513C254D37}.Release|Any CPU.Build.0 = Release|Any CPU
{27D535CB-2FD3-4621-8C9A-46161FC77A5D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{27D535CB-2FD3-4621-8C9A-46161FC77A5D}.Debug|Any CPU.Build.0 = Debug|Any CPU
{27D535CB-2FD3-4621-8C9A-46161FC77A5D}.Release|Any CPU.ActiveCfg = Release|Any CPU
{27D535CB-2FD3-4621-8C9A-46161FC77A5D}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(NestedProjects) = preSolution
{15679D7A-F22C-4943-87FF-BF5C76C4A6FD} = {932BBFCC-76E3-45FF-90CA-6BE4FBF4A097}
{EFDCAF57-FD4D-4E5D-A3D5-F26B875817ED} = {932BBFCC-76E3-45FF-90CA-6BE4FBF4A097}
{E9C3C94B-FB27-4B4F-B225-57513C254D37} = {932BBFCC-76E3-45FF-90CA-6BE4FBF4A097}
{27D535CB-2FD3-4621-8C9A-46161FC77A5D} = {932BBFCC-76E3-45FF-90CA-6BE4FBF4A097}
EndGlobalSection
EndGlobal
Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio 15
VisualStudioVersion = 15.0.26430.6
MinimumVisualStudioVersion = 10.0.40219.1
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{F18F1765-4A02-42FD-9BEF-F0E2FCBD9D17}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{3C5BE746-03E5-4895-9988-0B57F162F86C}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "tests", "tests", "{0F0901FF-E8D9-426A-B5A2-17C7F47C1529}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SharpCompress", "src\SharpCompress\SharpCompress.csproj", "{FD19DDD8-72B2-4024-8665-0D1F7A2AA998}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SharpCompress.Test", "tests\SharpCompress.Test\SharpCompress.Test.csproj", "{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{FD19DDD8-72B2-4024-8665-0D1F7A2AA998}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{FD19DDD8-72B2-4024-8665-0D1F7A2AA998}.Debug|Any CPU.Build.0 = Debug|Any CPU
{FD19DDD8-72B2-4024-8665-0D1F7A2AA998}.Release|Any CPU.ActiveCfg = Release|Any CPU
{FD19DDD8-72B2-4024-8665-0D1F7A2AA998}.Release|Any CPU.Build.0 = Release|Any CPU
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}.Debug|Any CPU.Build.0 = Debug|Any CPU
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}.Release|Any CPU.ActiveCfg = Release|Any CPU
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(NestedProjects) = preSolution
{FD19DDD8-72B2-4024-8665-0D1F7A2AA998} = {3C5BE746-03E5-4895-9988-0B57F162F86C}
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F} = {0F0901FF-E8D9-426A-B5A2-17C7F47C1529}
EndGlobalSection
EndGlobal

View File

@@ -1,6 +1,128 @@
<wpf:ResourceDictionary xml:space="preserve" xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml" xmlns:s="clr-namespace:System;assembly=mscorlib" xmlns:ss="urn:shemas-jetbrains-com:settings-storage-xaml" xmlns:wpf="http://schemas.microsoft.com/winfx/2006/xaml/presentation">
<s:String x:Key="/Default/Environment/InjectedLayers/FileInjectedLayer/=181069325DAB1C4287CD564D6CDDEDB3/AbsolutePath/@EntryValue">D:\Git\sharpcompress\SharpCompress\sharpcompress.DotSettings</s:String>
<s:String x:Key="/Default/Environment/InjectedLayers/FileInjectedLayer/=181069325DAB1C4287CD564D6CDDEDB3/RelativePath/@EntryValue">..\SharpCompress\sharpcompress.DotSettings</s:String>
<s:Boolean x:Key="/Default/Environment/InjectedLayers/FileInjectedLayer/=181069325DAB1C4287CD564D6CDDEDB3/@KeyIndexDefined">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/InjectedLayers/InjectedLayerCustomization/=File181069325DAB1C4287CD564D6CDDEDB3/@KeyIndexDefined">True</s:Boolean>
<s:Double x:Key="/Default/Environment/InjectedLayers/InjectedLayerCustomization/=File181069325DAB1C4287CD564D6CDDEDB3/RelativePriority/@EntryValue">1</s:Double></wpf:ResourceDictionary>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=ArgumentsStyleNamedExpression/@EntryIndexedValue">DO_NOT_SHOW</s:String>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=ArrangeBraces_005Fdowhile/@EntryIndexedValue">ERROR</s:String>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=ArrangeBraces_005Ffixed/@EntryIndexedValue">ERROR</s:String>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=ArrangeBraces_005Ffor/@EntryIndexedValue">ERROR</s:String>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=ArrangeBraces_005Fforeach/@EntryIndexedValue">ERROR</s:String>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=ArrangeBraces_005Fifelse/@EntryIndexedValue">ERROR</s:String>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=ArrangeBraces_005Flock/@EntryIndexedValue">ERROR</s:String>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=ArrangeBraces_005Fusing/@EntryIndexedValue">ERROR</s:String>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=ArrangeBraces_005Fwhile/@EntryIndexedValue">ERROR</s:String>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=MethodSupportsCancellation/@EntryIndexedValue">ERROR</s:String>
<s:String x:Key="/Default/CodeInspection/Highlighting/InspectionSeverities/=RedundantExplicitParamsArrayCreation/@EntryIndexedValue">DO_NOT_SHOW</s:String>
<s:String x:Key="/Default/CodeStyle/CodeCleanup/Profiles/=Basic_0020Clean/@EntryIndexedValue">&lt;?xml version="1.0" encoding="utf-16"?&gt;&lt;Profile name="Basic Clean"&gt;&lt;CSOptimizeUsings&gt;&lt;OptimizeUsings&gt;True&lt;/OptimizeUsings&gt;&lt;EmbraceInRegion&gt;False&lt;/EmbraceInRegion&gt;&lt;RegionName&gt;&lt;/RegionName&gt;&lt;/CSOptimizeUsings&gt;&lt;CSShortenReferences&gt;True&lt;/CSShortenReferences&gt;&lt;CSRemoveCodeRedundancies&gt;True&lt;/CSRemoveCodeRedundancies&gt;&lt;CSMakeFieldReadonly&gt;True&lt;/CSMakeFieldReadonly&gt;&lt;CSCodeStyleAttributes ArrangeTypeAccessModifier="False" ArrangeTypeMemberAccessModifier="False" SortModifiers="False" RemoveRedundantParentheses="False" AddMissingParentheses="False" ArrangeBraces="True" ArrangeAttributes="False" ArrangeArgumentsStyle="False" /&gt;&lt;RemoveCodeRedundancies&gt;True&lt;/RemoveCodeRedundancies&gt;&lt;CSUseAutoProperty&gt;True&lt;/CSUseAutoProperty&gt;&lt;CSMakeAutoPropertyGetOnly&gt;True&lt;/CSMakeAutoPropertyGetOnly&gt;&lt;CSReformatCode&gt;True&lt;/CSReformatCode&gt;&lt;/Profile&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/CodeCleanup/SilentCleanupProfile/@EntryValue">Basic Clean</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/APPLY_ON_COMPLETION/@EntryValue">True</s:Boolean>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/ARGUMENTS_NAMED/@EntryValue">Named</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_FOR/@EntryValue">Required</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_FOREACH/@EntryValue">Required</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_IFELSE/@EntryValue">Required</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_WHILE/@EntryValue">Required</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_FIRST_ARG_BY_PAREN/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_LINQ_QUERY/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_ARGUMENT/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_ARRAY_AND_OBJECT_INITIALIZER/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_CALLS_CHAIN/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_EXPRESSION/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_EXTENDS_LIST/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_FOR_STMT/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_PARAMETER/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTIPLE_DECLARATION/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTLINE_TYPE_PARAMETER_CONSTRAINS/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTLINE_TYPE_PARAMETER_LIST/@EntryValue">True</s:Boolean>
<s:Int64 x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/BLANK_LINES_AFTER_START_COMMENT/@EntryValue">0</s:Int64>
<s:Int64 x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/BLANK_LINES_BEFORE_SINGLE_LINE_COMMENT/@EntryValue">1</s:Int64>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_ATTRIBUTE_STYLE/@EntryValue">SEPARATE</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_FIXED_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_FOR_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_FOREACH_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_IFELSE_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_USING_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_WHILE_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/INDENT_ANONYMOUS_METHOD_BLOCK/@EntryValue">True</s:Boolean>
<s:Int64 x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/KEEP_BLANK_LINES_IN_CODE/@EntryValue">1</s:Int64>
<s:Int64 x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/KEEP_BLANK_LINES_IN_DECLARATIONS/@EntryValue">1</s:Int64>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_ACCESSOR_ATTRIBUTE_ON_SAME_LINE_EX/@EntryValue">NEVER</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_ACCESSORHOLDER_ATTRIBUTE_ON_SAME_LINE_EX/@EntryValue">NEVER</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_CONSTRUCTOR_INITIALIZER_ON_SAME_LINE/@EntryValue">False</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_FIELD_ATTRIBUTE_ON_SAME_LINE/@EntryValue">False</s:Boolean>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_FIELD_ATTRIBUTE_ON_SAME_LINE_EX/@EntryValue">NEVER</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_ACCESSORHOLDER_ON_SINGLE_LINE/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_ACCESSOR_ATTRIBUTE_ON_SAME_LINE/@EntryValue">False</s:Boolean>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_EMBEDDED_STATEMENT_ON_SAME_LINE/@EntryValue">NEVER</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_INITIALIZER_ON_SINGLE_LINE/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_WHILE_ON_NEW_LINE/@EntryValue">True</s:Boolean>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SIMPLE_EMBEDDED_STATEMENT_STYLE/@EntryValue">LINE_BREAK</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SPACE_AFTER_TYPECAST_PARENTHESES/@EntryValue">False</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SPACE_AROUND_ARROW_OP/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SPACE_AROUND_MULTIPLICATIVE_OP/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SPACE_BEFORE_SIZEOF_PARENTHESES/@EntryValue">False</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SPACE_BEFORE_TYPEOF_PARENTHESES/@EntryValue">False</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/STICK_COMMENT/@EntryValue">False</s:Boolean>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_ARGUMENTS_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_ARRAY_INITIALIZER_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_EXTENDS_LIST_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_LINES/@EntryValue">False</s:Boolean>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_PARAMETERS_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForBuiltInTypes/@EntryValue">UseVarWhenEvident</s:String>
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForOtherTypes/@EntryValue">UseVarWhenEvident</s:String>
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForSimpleTypes/@EntryValue">UseVarWhenEvident</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateInstanceFields/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="_" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateStaticFields/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateStaticReadonly/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=StaticReadonly/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FCONSTANT/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FFUNCTION/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FVARIABLE/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FCLASS/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FCONSTRUCTOR/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FFUNCTION/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FGLOBAL_005FVARIABLE/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FLABEL/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FLOCAL_005FCONSTRUCTOR/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FLOCAL_005FVARIABLE/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FOBJECT_005FPROPERTY_005FOF_005FFUNCTION/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FPARAMETER/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FCLASS/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FENUM/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FENUM_005FMEMBER/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FINTERFACE/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="I" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FMODULE/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FMODULE_005FEXPORTED/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FMODULE_005FLOCAL/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPRIVATE_005FMEMBER_005FACCESSOR/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPRIVATE_005FSTATIC_005FTYPE_005FFIELD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPRIVATE_005FTYPE_005FFIELD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPRIVATE_005FTYPE_005FMETHOD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPROTECTED_005FMEMBER_005FACCESSOR/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPROTECTED_005FSTATIC_005FTYPE_005FFIELD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPROTECTED_005FTYPE_005FFIELD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPROTECTED_005FTYPE_005FMETHOD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPUBLIC_005FMEMBER_005FACCESSOR/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPUBLIC_005FSTATIC_005FTYPE_005FFIELD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPUBLIC_005FTYPE_005FFIELD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FPUBLIC_005FTYPE_005FMETHOD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=TS_005FTYPE_005FPARAMETER/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="T" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/WebNaming/UserRules/=ASP_005FFIELD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/WebNaming/UserRules/=ASP_005FHTML_005FCONTROL/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/WebNaming/UserRules/=ASP_005FTAG_005FNAME/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/WebNaming/UserRules/=ASP_005FTAG_005FPREFIX/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/XamlNaming/UserRules/=NAMESPACE_005FALIAS/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/XamlNaming/UserRules/=XAML_005FFIELD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/XamlNaming/UserRules/=XAML_005FRESOURCE/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpAttributeForSingleLineMethodUpgrade/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpKeepExistingMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpPlaceEmbeddedOnSameLineMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpRenamePlacementToArrangementMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EAddAccessorOwnerDeclarationBracesMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002ECSharpPlaceAttributeOnSameLineMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateBlankLinesAroundFieldToBlankLinesAroundProperty/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateThisQualifierSettings/@EntryIndexedValue">True</s:Boolean></wpf:ResourceDictionary>

BIN
SharpCompress.snk Normal file

Binary file not shown.

View File

@@ -1,111 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Archive.GZip
{
internal class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArchiveEntry
{
private readonly string path;
private readonly long size;
private readonly DateTime? lastModified;
private readonly bool closeStream;
private readonly Stream stream;
internal GZipWritableArchiveEntry(GZipArchive archive, Stream stream,
string path, long size, DateTime? lastModified, bool closeStream)
: base(archive, null)
{
this.stream = stream;
this.path = path;
this.size = size;
this.lastModified = lastModified;
this.closeStream = closeStream;
}
public override long Crc
{
get { return 0; }
}
public override string Key
{
get { return path; }
}
public override long CompressedSize
{
get { return 0; }
}
public override long Size
{
get { return size; }
}
public override DateTime? LastModifiedTime
{
get { return lastModified; }
}
public override DateTime? CreatedTime
{
get { return null; }
}
public override DateTime? LastAccessedTime
{
get { return null; }
}
public override DateTime? ArchivedTime
{
get { return null; }
}
public override bool IsEncrypted
{
get { return false; }
}
public override bool IsDirectory
{
get { return false; }
}
public override bool IsSplit
{
get { return false; }
}
internal override IEnumerable<FilePart> Parts
{
get { throw new NotImplementedException(); }
}
Stream IWritableArchiveEntry.Stream
{
get
{
return stream;
}
}
public override Stream OpenEntryStream()
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new NonDisposingStream(stream);
}
internal override void Close()
{
if (closeStream)
{
stream.Dispose();
}
}
}
}

View File

@@ -1,123 +0,0 @@
using System.IO;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Archive
{
public static class IArchiveEntryExtensions
{
public static void WriteTo(this IArchiveEntry archiveEntry, Stream streamToWriteTo)
{
if (archiveEntry.Archive.Type == ArchiveType.Rar && archiveEntry.Archive.IsSolid)
{
throw new InvalidFormatException("Cannot use Archive random access on SOLID Rar files.");
}
if (archiveEntry.IsDirectory)
{
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
}
var streamListener = archiveEntry.Archive as IArchiveExtractionListener;
streamListener.EnsureEntriesLoaded();
streamListener.FireEntryExtractionBegin(archiveEntry);
streamListener.FireFilePartExtractionBegin(archiveEntry.Key, archiveEntry.Size, archiveEntry.CompressedSize);
var entryStream = archiveEntry.OpenEntryStream();
if (entryStream == null)
{
return;
}
using(entryStream)
using (Stream s = new ListeningStream(streamListener, entryStream))
{
s.TransferTo(streamToWriteTo);
}
streamListener.FireEntryExtractionEnd(archiveEntry);
}
#if !PORTABLE && !NETFX_CORE
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(this IArchiveEntry entry, string destinationDirectory,
ExtractOptions options = ExtractOptions.Overwrite)
{
string destinationFileName;
string file = Path.GetFileName(entry.Key);
if (options.HasFlag(ExtractOptions.ExtractFullPath))
{
string folder = Path.GetDirectoryName(entry.Key);
string destdir = Path.Combine(destinationDirectory, folder);
if (!Directory.Exists(destdir))
{
Directory.CreateDirectory(destdir);
}
destinationFileName = Path.Combine(destdir, file);
}
else
{
destinationFileName = Path.Combine(destinationDirectory, file);
}
entry.WriteToFile(destinationFileName, options);
}
/// <summary>
/// Extract to specific file
/// </summary>
public static void WriteToFile(this IArchiveEntry entry, string destinationFileName,
ExtractOptions options = ExtractOptions.Overwrite)
{
if (entry.IsDirectory)
{
return;
}
FileMode fm = FileMode.Create;
if (!options.HasFlag(ExtractOptions.Overwrite))
{
fm = FileMode.CreateNew;
}
using (FileStream fs = File.Open(destinationFileName, fm))
{
entry.WriteTo(fs);
}
if (options.HasFlag(ExtractOptions.PreserveFileTime) || options.HasFlag(ExtractOptions.PreserveAttributes))
{
// update file time to original packed time
FileInfo nf = new FileInfo(destinationFileName);
if (nf.Exists)
{
if (options.HasFlag(ExtractOptions.PreserveFileTime))
{
if (entry.CreatedTime.HasValue)
{
nf.CreationTime = entry.CreatedTime.Value;
}
if (entry.LastModifiedTime.HasValue)
{
nf.LastWriteTime = entry.LastModifiedTime.Value;
}
if (entry.LastAccessedTime.HasValue)
{
nf.LastAccessTime = entry.CreatedTime.Value;
}
}
if (options.HasFlag(ExtractOptions.PreserveAttributes))
{
if (entry.Attrib.HasValue)
{
nf.Attributes = (FileAttributes)System.Enum.ToObject(typeof(FileAttributes), entry.Attrib.Value);
}
}
}
}
}
#endif
}
}

View File

@@ -1,46 +0,0 @@
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
namespace SharpCompress.Archive.Rar
{
/// <summary>
/// A rar part based on a FileInfo object
/// </summary>
internal class FileInfoRarArchiveVolume : RarVolume
{
internal FileInfoRarArchiveVolume(FileInfo fileInfo, string password, Options options)
: base(StreamingMode.Seekable, fileInfo.OpenRead(), password, FixOptions(options))
{
FileInfo = fileInfo;
FileParts = base.GetVolumeFileParts().ToReadOnly();
}
private static Options FixOptions(Options options)
{
//make sure we're closing streams with fileinfo
if (options.HasFlag(Options.KeepStreamsOpen))
{
options = (Options) FlagUtility.SetFlag(options, Options.KeepStreamsOpen, false);
}
return options;
}
internal ReadOnlyCollection<RarFilePart> FileParts { get; private set; }
internal FileInfo FileInfo { get; private set; }
internal override RarFilePart CreateFilePart(FileHeader fileHeader, MarkHeader markHeader)
{
return new FileInfoRarFilePart(this, markHeader, fileHeader, FileInfo);
}
internal override IEnumerable<RarFilePart> ReadFileParts()
{
return FileParts;
}
}
}

View File

@@ -1,110 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Archive.Tar
{
internal class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiveEntry
{
private readonly string path;
private readonly long size;
private readonly DateTime? lastModified;
private readonly bool closeStream;
private readonly Stream stream;
internal TarWritableArchiveEntry(TarArchive archive, Stream stream, CompressionType compressionType,
string path, long size, DateTime? lastModified, bool closeStream)
: base(archive, null, compressionType)
{
this.stream = stream;
this.path = path;
this.size = size;
this.lastModified = lastModified;
this.closeStream = closeStream;
}
public override long Crc
{
get { return 0; }
}
public override string Key
{
get { return path; }
}
public override long CompressedSize
{
get { return 0; }
}
public override long Size
{
get { return size; }
}
public override DateTime? LastModifiedTime
{
get { return lastModified; }
}
public override DateTime? CreatedTime
{
get { return null; }
}
public override DateTime? LastAccessedTime
{
get { return null; }
}
public override DateTime? ArchivedTime
{
get { return null; }
}
public override bool IsEncrypted
{
get { return false; }
}
public override bool IsDirectory
{
get { return false; }
}
public override bool IsSplit
{
get { return false; }
}
internal override IEnumerable<FilePart> Parts
{
get { throw new NotImplementedException(); }
}
Stream IWritableArchiveEntry.Stream
{
get
{
return stream;
}
}
public override Stream OpenEntryStream()
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new NonDisposingStream(stream);
}
internal override void Close()
{
if (closeStream)
{
stream.Dispose();
}
}
}
}

View File

@@ -1,113 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Archive.Zip
{
internal class ZipWritableArchiveEntry : ZipArchiveEntry, IWritableArchiveEntry
{
private readonly string path;
private readonly long size;
private readonly DateTime? lastModified;
private readonly bool closeStream;
private readonly Stream stream;
private bool isDisposed;
internal ZipWritableArchiveEntry(ZipArchive archive, Stream stream, string path, long size,
DateTime? lastModified, bool closeStream)
: base(archive, null)
{
this.stream = stream;
this.path = path;
this.size = size;
this.lastModified = lastModified;
this.closeStream = closeStream;
}
public override long Crc
{
get { return 0; }
}
public override string Key
{
get { return path; }
}
public override long CompressedSize
{
get { return 0; }
}
public override long Size
{
get { return size; }
}
public override DateTime? LastModifiedTime
{
get { return lastModified; }
}
public override DateTime? CreatedTime
{
get { return null; }
}
public override DateTime? LastAccessedTime
{
get { return null; }
}
public override DateTime? ArchivedTime
{
get { return null; }
}
public override bool IsEncrypted
{
get { return false; }
}
public override bool IsDirectory
{
get { return false; }
}
public override bool IsSplit
{
get { return false; }
}
internal override IEnumerable<FilePart> Parts
{
get { throw new NotImplementedException(); }
}
Stream IWritableArchiveEntry.Stream
{
get
{
return stream;
}
}
public override Stream OpenEntryStream()
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new NonDisposingStream(stream);
}
internal override void Close()
{
if (closeStream && !isDisposed)
{
stream.Dispose();
isDisposed = true;
}
}
}
}

View File

@@ -1,20 +0,0 @@
using System;
using System.Reflection;
using System.Runtime.CompilerServices;
#if PORTABLE
[assembly: AssemblyTitle("SharpCompress.Portable")]
[assembly: AssemblyProduct("SharpCompress.Portable")]
#else
[assembly: AssemblyTitle("SharpCompress")]
[assembly: AssemblyProduct("SharpCompress")]
#endif
#if UNSIGNED
[assembly: InternalsVisibleTo("SharpCompress.Test")]
[assembly: InternalsVisibleTo("SharpCompress.Test.Portable")]
#endif
[assembly: CLSCompliant(true)]

View File

@@ -1,23 +0,0 @@
using System.Text;
namespace SharpCompress.Common
{
public static class ArchiveEncoding
{
/// <summary>
/// Default encoding to use when archive format doesn't specify one.
/// </summary>
public static Encoding Default { get; set; }
/// <summary>
/// Encoding used by encryption schemes which don't comply with RFC 2898.
/// </summary>
public static Encoding Password { get; set; }
static ArchiveEncoding()
{
Default = Encoding.UTF8;
Password = Encoding.UTF8;
}
}
}

View File

@@ -1,30 +0,0 @@
using SharpCompress.Compressor.Deflate;
namespace SharpCompress.Common
{
/// <summary>
/// Detailed compression properties when saving.
/// </summary>
public class CompressionInfo
{
public CompressionInfo()
{
DeflateCompressionLevel = CompressionLevel.Default;
}
/// <summary>
/// The algorthm to use. Must be valid for the format type.
/// </summary>
public CompressionType Type { get; set; }
/// <summary>
/// When CompressionType.Deflate is used, this property is referenced. Defaults to CompressionLevel.Default.
/// </summary>
public CompressionLevel DeflateCompressionLevel { get; set; }
public static implicit operator CompressionInfo(CompressionType compressionType)
{
return new CompressionInfo() {Type = compressionType};
}
}
}

View File

@@ -1,86 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Common.GZip
{
public class GZipEntry : Entry
{
private readonly GZipFilePart filePart;
internal GZipEntry(GZipFilePart filePart)
{
this.filePart = filePart;
}
public override CompressionType CompressionType
{
get { return CompressionType.GZip; }
}
public override long Crc
{
get { return 0; }
}
public override string Key
{
get { return filePart.FilePartName; }
}
public override long CompressedSize
{
get { return 0; }
}
public override long Size
{
get { return 0; }
}
public override DateTime? LastModifiedTime
{
get { return filePart.DateModified; }
}
public override DateTime? CreatedTime
{
get { return null; }
}
public override DateTime? LastAccessedTime
{
get { return null; }
}
public override DateTime? ArchivedTime
{
get { return null; }
}
public override bool IsEncrypted
{
get { return false; }
}
public override bool IsDirectory
{
get { return false; }
}
public override bool IsSplit
{
get { return false; }
}
internal override IEnumerable<FilePart> Parts
{
get { return filePart.AsEnumerable<FilePart>(); }
}
internal static IEnumerable<GZipEntry> GetEntries(Stream stream)
{
yield return new GZipEntry(new GZipFilePart(stream));
}
}
}

View File

@@ -1,34 +0,0 @@
using System.IO;
namespace SharpCompress.Common.GZip
{
public class GZipVolume : Volume
{
#if !PORTABLE && !NETFX_CORE
private readonly FileInfo fileInfo;
#endif
public GZipVolume(Stream stream, Options options)
: base(stream, options)
{
}
#if !PORTABLE && !NETFX_CORE
public GZipVolume(FileInfo fileInfo, Options options)
: base(fileInfo.OpenRead(), options)
{
this.fileInfo = fileInfo;
}
#endif
public override bool IsFirstVolume
{
get { return true; }
}
public override bool IsMultiVolume
{
get { return true; }
}
}
}

View File

@@ -1,23 +0,0 @@
using System;
namespace SharpCompress.Common
{
[Flags]
public enum Options
{
/// <summary>
/// No options specified
/// </summary>
None = 0,
/// <summary>
/// SharpCompress will keep the supplied streams open
/// </summary>
KeepStreamsOpen = 1,
/// <summary>
/// Look for RarArchive (Check for self-extracting archives or cases where RarArchive isn't at the start of the file)
/// </summary>
LookForHeader = 2,
}
}

View File

@@ -1,36 +0,0 @@
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{
internal class ArchiveHeader : RarHeader
{
internal const short mainHeaderSizeWithEnc = 7;
internal const short mainHeaderSize = 6;
protected override void ReadFromReader(MarkingBinaryReader reader)
{
HighPosAv = reader.ReadInt16();
PosAv = reader.ReadInt32();
if (ArchiveHeaderFlags.HasFlag(ArchiveFlags.ENCRYPTVER))
{
EncryptionVersion = reader.ReadByte();
}
}
internal ArchiveFlags ArchiveHeaderFlags
{
get { return (ArchiveFlags) base.Flags; }
}
internal short HighPosAv { get; private set; }
internal int PosAv { get; private set; }
internal byte EncryptionVersion { get; private set; }
public bool HasPassword
{
get { return ArchiveHeaderFlags.HasFlag(ArchiveFlags.PASSWORD); }
}
}
}

View File

@@ -1,28 +0,0 @@
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{
internal class EndArchiveHeader : RarHeader
{
protected override void ReadFromReader(MarkingBinaryReader reader)
{
if (EndArchiveFlags.HasFlag(EndArchiveFlags.EARC_DATACRC))
{
ArchiveCRC = reader.ReadInt32();
}
if (EndArchiveFlags.HasFlag(EndArchiveFlags.EARC_VOLNUMBER))
{
VolumeNumber = reader.ReadInt16();
}
}
internal EndArchiveFlags EndArchiveFlags
{
get { return (EndArchiveFlags) base.Flags; }
}
internal int? ArchiveCRC { get; private set; }
internal short? VolumeNumber { get; private set; }
}
}

View File

@@ -1,233 +0,0 @@
using System;
using System.IO;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{
internal class FileHeader : RarHeader
{
private const byte SALT_SIZE = 8;
private const byte NEWLHD_SIZE = 32;
protected override void ReadFromReader(MarkingBinaryReader reader)
{
uint lowUncompressedSize = reader.ReadUInt32();
HostOS = (HostOS)reader.ReadByte();
FileCRC = reader.ReadUInt32();
FileLastModifiedTime = Utility.DosDateToDateTime(reader.ReadInt32());
RarVersion = reader.ReadByte();
PackingMethod = reader.ReadByte();
short nameSize = reader.ReadInt16();
FileAttributes = reader.ReadInt32();
uint highCompressedSize = 0;
uint highUncompressedkSize = 0;
if (FileFlags.HasFlag(FileFlags.LARGE))
{
highCompressedSize = reader.ReadUInt32();
highUncompressedkSize = reader.ReadUInt32();
}
else
{
if (lowUncompressedSize == 0xffffffff)
{
lowUncompressedSize = 0xffffffff;
highUncompressedkSize = int.MaxValue;
}
}
CompressedSize = UInt32To64(highCompressedSize, AdditionalSize);
UncompressedSize = UInt32To64(highUncompressedkSize, lowUncompressedSize);
nameSize = nameSize > 4 * 1024 ? (short)(4 * 1024) : nameSize;
byte[] fileNameBytes = reader.ReadBytes(nameSize);
switch (HeaderType)
{
case HeaderType.FileHeader:
{
if (FileFlags.HasFlag(FileFlags.UNICODE))
{
int length = 0;
while (length < fileNameBytes.Length
&& fileNameBytes[length] != 0)
{
length++;
}
if (length != nameSize)
{
length++;
FileName = FileNameDecoder.Decode(fileNameBytes, length);
}
else
{
FileName = DecodeDefault(fileNameBytes);
}
}
else
{
FileName = DecodeDefault(fileNameBytes);
}
FileName = ConvertPath(FileName, HostOS);
}
break;
case HeaderType.NewSubHeader:
{
int datasize = HeaderSize - NEWLHD_SIZE - nameSize;
if (FileFlags.HasFlag(FileFlags.SALT))
{
datasize -= SALT_SIZE;
}
if (datasize > 0)
{
SubData = reader.ReadBytes(datasize);
}
if (NewSubHeaderType.SUBHEAD_TYPE_RR.Equals(fileNameBytes))
{
RecoverySectors = SubData[8] + (SubData[9] << 8)
+ (SubData[10] << 16) + (SubData[11] << 24);
}
}
break;
}
if (FileFlags.HasFlag(FileFlags.SALT))
{
Salt = reader.ReadBytes(SALT_SIZE);
}
if (FileFlags.HasFlag(FileFlags.EXTTIME))
{
// verify that the end of the header hasn't been reached before reading the Extended Time.
// some tools incorrectly omit Extended Time despite specifying FileFlags.EXTTIME, which most parsers tolerate.
if (ReadBytes + reader.CurrentReadByteCount <= HeaderSize - 2)
{
ushort extendedFlags = reader.ReadUInt16();
FileLastModifiedTime = ProcessExtendedTime(extendedFlags, FileLastModifiedTime, reader, 0);
FileCreatedTime = ProcessExtendedTime(extendedFlags, null, reader, 1);
FileLastAccessedTime = ProcessExtendedTime(extendedFlags, null, reader, 2);
FileArchivedTime = ProcessExtendedTime(extendedFlags, null, reader, 3);
}
}
}
//only the full .net framework will do other code pages than unicode/utf8
private string DecodeDefault(byte[] bytes)
{
return ArchiveEncoding.Default.GetString(bytes, 0, bytes.Length);
}
private long UInt32To64(uint x, uint y)
{
long l = x;
l <<= 32;
return l + y;
}
private static DateTime? ProcessExtendedTime(ushort extendedFlags, DateTime? time, MarkingBinaryReader reader,
int i)
{
uint rmode = (uint)extendedFlags >> (3 - i) * 4;
if ((rmode & 8) == 0)
{
return null;
}
if (i != 0)
{
uint DosTime = reader.ReadUInt32();
time = Utility.DosDateToDateTime(DosTime);
}
if ((rmode & 4) == 0)
{
time = time.Value.AddSeconds(1);
}
uint nanosecondHundreds = 0;
int count = (int)rmode & 3;
for (int j = 0; j < count; j++)
{
byte b = reader.ReadByte();
nanosecondHundreds |= (((uint)b) << ((j + 3 - count) * 8));
}
//10^-7 to 10^-3
return time.Value.AddMilliseconds(nanosecondHundreds * Math.Pow(10, -4));
}
private static string ConvertPath(string path, HostOS os)
{
#if PORTABLE || NETFX_CORE
return path.Replace('\\', '/');
#else
switch (os)
{
case HostOS.MacOS:
case HostOS.Unix:
{
if (Path.DirectorySeparatorChar == '\\')
{
return path.Replace('/', '\\');
}
}
break;
default:
{
if (Path.DirectorySeparatorChar == '/')
{
return path.Replace('\\', '/');
}
}
break;
}
return path;
#endif
}
internal long DataStartPosition { get; set; }
internal HostOS HostOS { get; private set; }
internal uint FileCRC { get; private set; }
internal DateTime? FileLastModifiedTime { get; private set; }
internal DateTime? FileCreatedTime { get; private set; }
internal DateTime? FileLastAccessedTime { get; private set; }
internal DateTime? FileArchivedTime { get; private set; }
internal byte RarVersion { get; private set; }
internal byte PackingMethod { get; private set; }
internal int FileAttributes { get; private set; }
internal FileFlags FileFlags
{
get { return (FileFlags)base.Flags; }
}
internal long CompressedSize { get; private set; }
internal long UncompressedSize { get; private set; }
internal string FileName { get; private set; }
internal byte[] SubData { get; private set; }
internal int RecoverySectors { get; private set; }
internal byte[] Salt { get; private set; }
public override string ToString()
{
return FileName;
}
public Stream PackedStream { get; set; }
}
}

View File

@@ -1,85 +0,0 @@
using System;
namespace SharpCompress.Common.Rar.Headers
{
internal enum HeaderType
{
MarkHeader = 0x72,
ArchiveHeader = 0x73,
FileHeader = 0x74,
CommHeader = 0x75,
AvHeader = 0x76,
SubHeader = 0x77,
ProtectHeader = 0x78,
SignHeader = 0x79,
NewSubHeader = 0x7a,
EndArchiveHeader = 0x7b,
}
internal enum HeaderFlags : short
{
LONG_BLOCK = -0x8000,
}
[Flags]
internal enum ArchiveFlags
{
VOLUME = 0x0001,
COMMENT = 0x0002,
LOCK = 0x0004,
SOLID = 0x0008,
NEWNUMBERING = 0x0010,
AV = 0x0020,
PROTECT = 0x0040,
PASSWORD = 0x0080,
FIRSTVOLUME = 0x0100,
ENCRYPTVER = 0x0200,
}
internal enum HostOS
{
MSDOS = 0,
OS2 = 1,
Win32 = 2,
Unix = 3,
MacOS = 4,
BeOS = 5
}
[Flags]
internal enum FileFlags : ushort
{
SPLIT_BEFORE = 0x0001,
SPLIT_AFTER = 0x0002,
PASSWORD = 0x0004,
COMMENT = 0x0008,
SOLID = 0x0010,
WINDOWMASK = 0x00e0,
WINDOW64 = 0x0000,
WINDOW128 = 0x0020,
WINDOW256 = 0x0040,
WINDOW512 = 0x0060,
WINDOW1024 = 0x0080,
WINDOW2048 = 0x00a0,
WINDOW4096 = 0x00c0,
DIRECTORY = 0x00e0,
LARGE = 0x0100,
UNICODE = 0x0200,
SALT = 0x0400,
VERSION = 0x0800,
EXTTIME = 0x1000,
EXTFLAGS = 0x2000,
}
[Flags]
internal enum EndArchiveFlags
{
EARC_NEXT_VOLUME = 0x0001,
EARC_DATACRC = 0x0002,
EARC_REVSPACE = 0x0004,
EARC_VOLNUMBER = 0x0008,
}
}

View File

@@ -1,59 +0,0 @@
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{
internal class MarkHeader : RarHeader
{
protected override void ReadFromReader(MarkingBinaryReader reader)
{
}
internal bool IsValid()
{
if (!(HeadCRC == 0x6152))
{
return false;
}
if (!(HeaderType == HeaderType.MarkHeader))
{
return false;
}
if (!(Flags == 0x1a21))
{
return false;
}
if (!(HeaderSize == BaseBlockSize))
{
return false;
}
return true;
}
internal bool IsSignature()
{
bool valid = false;
/*byte[] d = new byte[BaseBlock.BaseBlockSize];
BinaryWriter writer = new BinaryWriter();
writer.Write(HeadCRC);
writer.Write((byte)HeaderType);
writer.Write(flags);
writer.Write(HeaderSize);
writer.Flush
if (d[0] == 0x52) {
if (d[1]==0x45 && d[2]==0x7e && d[3]==0x5e) {
oldFormat=true;
valid=true;
}
else if (d[1]==0x61 && d[2]==0x72 && d[3]==0x21 && d[4]==0x1a &&
d[5]==0x07 && d[6]==0x00) {
oldFormat=false;
valid=true;
}
}*/
return valid;
}
internal bool OldFormat { get; private set; }
}
}

View File

@@ -1,95 +0,0 @@
using System.IO;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{
internal class RarHeader
{
internal const short BaseBlockSize = 7;
internal const short LONG_BLOCK = -0x8000;
private void FillBase(RarHeader baseHeader)
{
HeadCRC = baseHeader.HeadCRC;
HeaderType = baseHeader.HeaderType;
Flags = baseHeader.Flags;
HeaderSize = baseHeader.HeaderSize;
AdditionalSize = baseHeader.AdditionalSize;
ReadBytes = baseHeader.ReadBytes;
}
internal static RarHeader Create(MarkingBinaryReader reader)
{
try
{
RarHeader header = new RarHeader();
reader.Mark();
header.ReadFromReader(reader);
header.ReadBytes += reader.CurrentReadByteCount;
return header;
}
catch (EndOfStreamException)
{
return null;
}
}
protected virtual void ReadFromReader(MarkingBinaryReader reader)
{
HeadCRC = reader.ReadInt16();
HeaderType = (HeaderType)(reader.ReadByte() & 0xff);
Flags = reader.ReadInt16();
HeaderSize = reader.ReadInt16();
if (FlagUtility.HasFlag(Flags, LONG_BLOCK))
{
AdditionalSize = reader.ReadUInt32();
}
}
internal T PromoteHeader<T>(MarkingBinaryReader reader)
where T : RarHeader, new()
{
T header = new T();
header.FillBase(this);
reader.Mark();
header.ReadFromReader(reader);
header.ReadBytes += reader.CurrentReadByteCount;
int headerSizeDiff = header.HeaderSize - (int)header.ReadBytes;
if (headerSizeDiff > 0)
{
reader.ReadBytes(headerSizeDiff);
}
return header;
}
protected virtual void PostReadingBytes(MarkingBinaryReader reader)
{
}
/// <summary>
/// This is the number of bytes read when reading the header
/// </summary>
protected long ReadBytes { get; private set; }
protected short HeadCRC { get; private set; }
internal HeaderType HeaderType { get; private set; }
/// <summary>
/// Untyped flags. These should be typed when Promoting to another header
/// </summary>
protected short Flags { get; private set; }
protected short HeaderSize { get; private set; }
/// <summary>
/// This additional size of the header could be file data
/// </summary>
protected uint AdditionalSize { get; private set; }
}
}

View File

@@ -1,251 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{
internal class RarHeaderFactory
{
private const int MAX_SFX_SIZE = 0x80000 - 16; //archive.cpp line 136
internal RarHeaderFactory(StreamingMode mode, Options options, string password = null)
{
StreamingMode = mode;
Options = options;
Password = password;
}
private Options Options { get; set; }
public string Password { get; private set; }
internal StreamingMode StreamingMode { get; private set; }
internal bool IsEncrypted { get; private set; }
internal IEnumerable<RarHeader> ReadHeaders(Stream stream)
{
if (Options.HasFlag(Options.LookForHeader))
{
stream = CheckSFX(stream);
}
RarHeader header;
while ((header = ReadNextHeader(stream)) != null)
{
yield return header;
if (header.HeaderType == HeaderType.EndArchiveHeader)
{
yield break; // the end?
}
}
}
private Stream CheckSFX(Stream stream)
{
RewindableStream rewindableStream = GetRewindableStream(stream);
stream = rewindableStream;
BinaryReader reader = new BinaryReader(rewindableStream);
try
{
int count = 0;
while (true)
{
byte firstByte = reader.ReadByte();
if (firstByte == 0x52)
{
MemoryStream buffer = new MemoryStream();
byte[] nextThreeBytes = reader.ReadBytes(3);
if ((nextThreeBytes[0] == 0x45)
&& (nextThreeBytes[1] == 0x7E)
&& (nextThreeBytes[2] == 0x5E))
{
//old format and isvalid
buffer.WriteByte(0x52);
buffer.Write(nextThreeBytes, 0, 3);
rewindableStream.Rewind(buffer);
break;
}
byte[] secondThreeBytes = reader.ReadBytes(3);
if ((nextThreeBytes[0] == 0x61)
&& (nextThreeBytes[1] == 0x72)
&& (nextThreeBytes[2] == 0x21)
&& (secondThreeBytes[0] == 0x1A)
&& (secondThreeBytes[1] == 0x07)
&& (secondThreeBytes[2] == 0x00))
{
//new format and isvalid
buffer.WriteByte(0x52);
buffer.Write(nextThreeBytes, 0, 3);
buffer.Write(secondThreeBytes, 0, 3);
rewindableStream.Rewind(buffer);
break;
}
buffer.Write(nextThreeBytes, 0, 3);
buffer.Write(secondThreeBytes, 0, 3);
rewindableStream.Rewind(buffer);
}
if (count > MAX_SFX_SIZE)
{
break;
}
}
}
catch (Exception e)
{
if (!Options.HasFlag(Options.KeepStreamsOpen))
{
#if NET2
reader.Close();
#else
reader.Dispose();
#endif
}
throw new InvalidFormatException("Error trying to read rar signature.", e);
}
return stream;
}
private RewindableStream GetRewindableStream(Stream stream)
{
RewindableStream rewindableStream = stream as RewindableStream;
if (rewindableStream == null)
{
rewindableStream = new RewindableStream(stream);
}
return rewindableStream;
}
private RarHeader ReadNextHeader(Stream stream)
{
#if PORTABLE
var reader = new MarkingBinaryReader(stream);
#else
var reader = new RarCryptoBinaryReader(stream, Password);
if (IsEncrypted)
{
if (Password == null)
{
throw new CryptographicException("Encrypted Rar archive has no password specified.");
}
reader.SkipQueue();
byte[] salt = reader.ReadBytes(8);
reader.InitializeAes(salt);
}
#endif
RarHeader header = RarHeader.Create(reader);
if (header == null)
{
return null;
}
switch (header.HeaderType)
{
case HeaderType.ArchiveHeader:
{
var ah = header.PromoteHeader<ArchiveHeader>(reader);
IsEncrypted = ah.HasPassword;
return ah;
}
case HeaderType.MarkHeader:
{
return header.PromoteHeader<MarkHeader>(reader);
}
case HeaderType.ProtectHeader:
{
ProtectHeader ph = header.PromoteHeader<ProtectHeader>(reader);
// skip the recovery record data, we do not use it.
switch (StreamingMode)
{
case StreamingMode.Seekable:
{
reader.BaseStream.Position += ph.DataSize;
}
break;
case StreamingMode.Streaming:
{
reader.BaseStream.Skip(ph.DataSize);
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
return ph;
}
case HeaderType.NewSubHeader:
{
FileHeader fh = header.PromoteHeader<FileHeader>(reader);
switch (StreamingMode)
{
case StreamingMode.Seekable:
{
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming:
{
//skip the data because it's useless?
reader.BaseStream.Skip(fh.CompressedSize);
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
return fh;
}
case HeaderType.FileHeader:
{
FileHeader fh = header.PromoteHeader<FileHeader>(reader);
switch (StreamingMode)
{
case StreamingMode.Seekable:
{
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming:
{
var ms = new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize);
if (fh.Salt == null)
{
fh.PackedStream = ms;
}
else
{
#if PORTABLE
throw new NotSupportedException("Encrypted Rar files aren't supported in portable distro.");
#else
fh.PackedStream = new RarCryptoWrapper(ms, Password, fh.Salt);
#endif
}
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
return fh;
}
case HeaderType.EndArchiveHeader:
{
return header.PromoteHeader<EndArchiveHeader>(reader);
}
default:
{
throw new InvalidFormatException("Invalid Rar Header: " + header.HeaderType.ToString());
}
}
}
}
}

View File

@@ -1,23 +0,0 @@
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{
internal class AVHeader : RarHeader
{
protected override void ReadFromReader(MarkingBinaryReader reader)
{
UnpackVersion = reader.ReadByte();
Method = reader.ReadByte();
AVVersion = reader.ReadByte();
AVInfoCRC = reader.ReadInt32();
}
internal int AVInfoCRC { get; private set; }
internal byte UnpackVersion { get; private set; }
internal byte Method { get; private set; }
internal byte AVVersion { get; private set; }
}
}

View File

@@ -1,81 +0,0 @@
using System.Collections.Generic;
using System.IO;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar
{
internal class RarCryptoBinaryReader : MarkingBinaryReader
{
private RarRijndael rijndael;
private byte[] salt;
private readonly string password;
private readonly Queue<byte> data = new Queue<byte>();
public RarCryptoBinaryReader(Stream stream, string password )
: base(stream)
{
this.password = password;
}
protected bool UseEncryption
{
get { return salt != null; }
}
internal void InitializeAes(byte[] salt)
{
this.salt = salt;
rijndael = RarRijndael.InitializeFrom(password, salt);
}
public override byte[] ReadBytes(int count)
{
if (UseEncryption)
{
return ReadAndDecryptBytes(count);
}
return base.ReadBytes(count);
}
private byte[] ReadAndDecryptBytes(int count)
{
int queueSize = data.Count;
int sizeToRead = count - queueSize;
if (sizeToRead > 0)
{
int alignedSize = sizeToRead + ((~sizeToRead + 1) & 0xf);
for (int i = 0; i < alignedSize / 16; i++)
{
//long ax = System.currentTimeMillis();
byte[] cipherText = base.ReadBytes(16);
var readBytes = rijndael.ProcessBlock(cipherText);
foreach (var readByte in readBytes)
data.Enqueue(readByte);
}
}
var decryptedBytes = new byte[count];
for (int i = 0; i < count; i++)
{
decryptedBytes[i] = data.Dequeue();
}
return decryptedBytes;
}
public void ClearQueue()
{
data.Clear();
}
public void SkipQueue()
{
var position = BaseStream.Position;
BaseStream.Position = position + data.Count;
ClearQueue();
}
}
}

View File

@@ -1,113 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Org.BouncyCastle.Crypto.Digests;
using Org.BouncyCastle.Crypto.Engines;
using Org.BouncyCastle.Crypto.Parameters;
namespace SharpCompress.Common.Rar
{
internal class RarRijndael : IDisposable
{
internal const int CRYPTO_BLOCK_SIZE = 16;
private readonly string password;
private readonly byte[] salt;
private byte[] aesInitializationVector;
private RijndaelEngine rijndael;
private RarRijndael(string password, byte[] salt)
{
this.password = password;
this.salt = salt;
}
private byte[] ComputeHash(byte[] input)
{
var sha = new Sha1Digest();
sha.BlockUpdate(input, 0, input.Length);
byte[] result = new byte[sha.GetDigestSize()];
sha.DoFinal(result, 0);
return result;
}
private void Initialize()
{
rijndael = new RijndaelEngine();
aesInitializationVector = new byte[CRYPTO_BLOCK_SIZE];
int rawLength = 2 * password.Length;
byte[] rawPassword = new byte[rawLength + 8];
byte[] passwordBytes = Encoding.UTF8.GetBytes(password);
for (int i = 0; i < password.Length; i++)
{
rawPassword[i * 2] = passwordBytes[i];
rawPassword[i * 2 + 1] = 0;
}
for (int i = 0; i < salt.Length; i++)
{
rawPassword[i + rawLength] = salt[i];
}
const int noOfRounds = (1 << 18);
IList<byte> bytes = new List<byte>();
byte[] digest;
//TODO slow code below, find ways to optimize
for (int i = 0; i < noOfRounds; i++)
{
bytes.AddRange(rawPassword);
bytes.AddRange(new[] { (byte)i, (byte)(i >> 8), (byte)(i >> CRYPTO_BLOCK_SIZE) });
if (i % (noOfRounds / CRYPTO_BLOCK_SIZE) == 0)
{
digest = ComputeHash(bytes.ToArray());
aesInitializationVector[i / (noOfRounds / CRYPTO_BLOCK_SIZE)] = digest[19];
}
}
digest = ComputeHash(bytes.ToArray());
//slow code ends
byte[] aesKey = new byte[CRYPTO_BLOCK_SIZE];
for (int i = 0; i < 4; i++)
for (int j = 0; j < 4; j++)
aesKey[i * 4 + j] = (byte)
(((digest[i * 4] * 0x1000000) & 0xff000000 |
(uint)((digest[i * 4 + 1] * 0x10000) & 0xff0000) |
(uint)((digest[i * 4 + 2] * 0x100) & 0xff00) |
(uint)(digest[i * 4 + 3] & 0xff)) >> (j * 8));
rijndael.Init(false, new KeyParameter(aesKey));
}
public static RarRijndael InitializeFrom(string password, byte[] salt)
{
var rijndael = new RarRijndael(password, salt);
rijndael.Initialize();
return rijndael;
}
public byte[] ProcessBlock(byte[] cipherText)
{
var plainText = new byte[CRYPTO_BLOCK_SIZE];
var decryptedBytes = new List<byte>();
rijndael.ProcessBlock(cipherText, 0, plainText, 0);
for (int j = 0; j < plainText.Length; j++)
decryptedBytes.Add((byte)(plainText[j] ^ aesInitializationVector[j % 16])); //32:114, 33:101
for (int j = 0; j < aesInitializationVector.Length; j++)
aesInitializationVector[j] = cipherText[j];
return decryptedBytes.ToArray();
}
public void Dispose()
{
}
}
}

View File

@@ -1,111 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
namespace SharpCompress.Common.Rar
{
internal class RarRijndael : IDisposable
{
internal const int CRYPTO_BLOCK_SIZE = 16;
private readonly string password;
private readonly byte[] salt;
private byte[] aesInitializationVector;
private Rijndael rijndael;
private RarRijndael(string password, byte[] salt)
{
this.password = password;
this.salt = salt;
}
private void Initialize()
{
rijndael = new RijndaelManaged() { Padding = PaddingMode.None };
aesInitializationVector = new byte[CRYPTO_BLOCK_SIZE];
int rawLength = 2 * password.Length;
byte[] rawPassword = new byte[rawLength + 8];
byte[] passwordBytes = Encoding.Unicode.GetBytes(password);
for (int i = 0; i < rawLength; i++)
{
rawPassword[i] = passwordBytes[i];
}
for (int i = 0; i < salt.Length; i++)
{
rawPassword[i + rawLength] = salt[i];
}
SHA1 sha = new SHA1CryptoServiceProvider();
const int noOfRounds = (1 << 18);
IList<byte> bytes = new List<byte>();
byte[] digest;
//TODO slow code below, find ways to optimize
for (int i = 0; i < noOfRounds; i++)
{
bytes.AddRange(rawPassword);
bytes.AddRange(new[] { (byte)i, (byte)(i >> 8), (byte)(i >> CRYPTO_BLOCK_SIZE) });
if (i % (noOfRounds / CRYPTO_BLOCK_SIZE) == 0)
{
digest = sha.ComputeHash(bytes.ToArray());
aesInitializationVector[i / (noOfRounds / CRYPTO_BLOCK_SIZE)] = digest[19];
}
}
digest = sha.ComputeHash(bytes.ToArray());
//slow code ends
byte[] aesKey = new byte[CRYPTO_BLOCK_SIZE];
for (int i = 0; i < 4; i++)
for (int j = 0; j < 4; j++)
aesKey[i * 4 + j] = (byte)
(((digest[i * 4] * 0x1000000) & 0xff000000 |
(uint)((digest[i * 4 + 1] * 0x10000) & 0xff0000) |
(uint)((digest[i * 4 + 2] * 0x100) & 0xff00) |
(uint)(digest[i * 4 + 3] & 0xff)) >> (j * 8));
rijndael.IV = new byte[CRYPTO_BLOCK_SIZE];
rijndael.Key = aesKey;
rijndael.BlockSize = CRYPTO_BLOCK_SIZE * 8;
}
public static RarRijndael InitializeFrom(string password, byte[] salt)
{
var rijndael = new RarRijndael(password, salt);
rijndael.Initialize();
return rijndael;
}
public byte[] ProcessBlock(byte[] cipherText)
{
var plainText = new byte[CRYPTO_BLOCK_SIZE];
var decryptedBytes = new List<byte>();
var decryptor = rijndael.CreateDecryptor();
using (var msDecrypt = new MemoryStream(cipherText))
{
using (var csDecrypt = new CryptoStream(msDecrypt, decryptor, CryptoStreamMode.Read))
{
csDecrypt.ReadFully(plainText);
}
}
for (int j = 0; j < plainText.Length; j++)
decryptedBytes.Add((byte)(plainText[j] ^ aesInitializationVector[j % 16])); //32:114, 33:101
for (int j = 0; j < aesInitializationVector.Length; j++)
aesInitializationVector[j] = cipherText[j];
return decryptedBytes.ToArray();
}
public void Dispose()
{
((IDisposable)rijndael).Dispose();
}
}
}

View File

@@ -1,13 +0,0 @@
using System;
namespace SharpCompress.Common
{
public class ReaderExtractionEventArgs<T> : EventArgs
{
internal ReaderExtractionEventArgs(T entry)
{
Item = entry;
}
public T Item { get; private set; }
}
}

View File

@@ -1,161 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Compressor.LZMA;
using SharpCompress.Compressor.LZMA.Utilites;
namespace SharpCompress.Common.SevenZip
{
internal class ArchiveDatabase
{
internal byte MajorVersion;
internal byte MinorVersion;
internal long StartPositionAfterHeader;
internal long DataStartPosition;
internal List<long> PackSizes = new List<long>();
internal List<uint?> PackCRCs = new List<uint?>();
internal List<CFolder> Folders = new List<CFolder>();
internal List<int> NumUnpackStreamsVector;
internal List<CFileItem> Files = new List<CFileItem>();
internal List<long> PackStreamStartPositions = new List<long>();
internal List<int> FolderStartFileIndex = new List<int>();
internal List<int> FileIndexToFolderIndexMap = new List<int>();
internal void Clear()
{
PackSizes.Clear();
PackCRCs.Clear();
Folders.Clear();
NumUnpackStreamsVector = null;
Files.Clear();
PackStreamStartPositions.Clear();
FolderStartFileIndex.Clear();
FileIndexToFolderIndexMap.Clear();
}
internal bool IsEmpty()
{
return PackSizes.Count == 0
&& PackCRCs.Count == 0
&& Folders.Count == 0
&& NumUnpackStreamsVector.Count == 0
&& Files.Count == 0;
}
private void FillStartPos()
{
PackStreamStartPositions.Clear();
long startPos = 0;
for (int i = 0; i < PackSizes.Count; i++)
{
PackStreamStartPositions.Add(startPos);
startPos += PackSizes[i];
}
}
private void FillFolderStartFileIndex()
{
FolderStartFileIndex.Clear();
FileIndexToFolderIndexMap.Clear();
int folderIndex = 0;
int indexInFolder = 0;
for (int i = 0; i < Files.Count; i++)
{
CFileItem file = Files[i];
bool emptyStream = !file.HasStream;
if (emptyStream && indexInFolder == 0)
{
FileIndexToFolderIndexMap.Add(-1);
continue;
}
if (indexInFolder == 0)
{
// v3.13 incorrectly worked with empty folders
// v4.07: Loop for skipping empty folders
for (; ; )
{
if (folderIndex >= Folders.Count)
throw new InvalidOperationException();
FolderStartFileIndex.Add(i); // check it
if (NumUnpackStreamsVector[folderIndex] != 0)
break;
folderIndex++;
}
}
FileIndexToFolderIndexMap.Add(folderIndex);
if (emptyStream)
continue;
indexInFolder++;
if (indexInFolder >= NumUnpackStreamsVector[folderIndex])
{
folderIndex++;
indexInFolder = 0;
}
}
}
public void Fill()
{
FillStartPos();
FillFolderStartFileIndex();
}
internal long GetFolderStreamPos(CFolder folder, int indexInFolder)
{
int index = folder.FirstPackStreamId + indexInFolder;
return DataStartPosition + PackStreamStartPositions[index];
}
internal long GetFolderFullPackSize(int folderIndex)
{
int packStreamIndex = Folders[folderIndex].FirstPackStreamId;
CFolder folder = Folders[folderIndex];
long size = 0;
for (int i = 0; i < folder.PackStreams.Count; i++)
size += PackSizes[packStreamIndex + i];
return size;
}
internal Stream GetFolderStream(Stream stream, CFolder folder, IPasswordProvider pw)
{
int packStreamIndex = folder.FirstPackStreamId;
long folderStartPackPos = GetFolderStreamPos(folder, 0);
List<long> packSizes = new List<long>();
for (int j = 0; j < folder.PackStreams.Count; j++)
packSizes.Add(PackSizes[packStreamIndex + j]);
return DecoderStreamHelper.CreateDecoderStream(stream, folderStartPackPos, packSizes.ToArray(), folder, pw);
}
private long GetFolderPackStreamSize(int folderIndex, int streamIndex)
{
return PackSizes[Folders[folderIndex].FirstPackStreamId + streamIndex];
}
private long GetFilePackSize(int fileIndex)
{
int folderIndex = FileIndexToFolderIndexMap[fileIndex];
if (folderIndex != -1)
if (FolderStartFileIndex[folderIndex] == fileIndex)
return GetFolderFullPackSize(folderIndex);
return 0;
}
}
}

View File

@@ -1,10 +0,0 @@
namespace SharpCompress.Common.SevenZip
{
internal class CCoderInfo
{
internal CMethodId MethodId;
internal byte[] Props;
internal int NumInStreams;
internal int NumOutStreams;
}
}

View File

@@ -1,139 +0,0 @@
using System;
using System.Collections.Generic;
using SharpCompress.Compressor.LZMA;
namespace SharpCompress.Common.SevenZip
{
internal class CFolder
{
internal List<CCoderInfo> Coders = new List<CCoderInfo>();
internal List<CBindPair> BindPairs = new List<CBindPair>();
internal List<int> PackStreams = new List<int>();
internal int FirstPackStreamId;
internal List<long> UnpackSizes = new List<long>();
internal uint? UnpackCRC;
internal bool UnpackCRCDefined
{
get { return UnpackCRC != null; }
}
public long GetUnpackSize()
{
if (UnpackSizes.Count == 0)
return 0;
for (int i = UnpackSizes.Count - 1; i >= 0; i--)
if (FindBindPairForOutStream(i) < 0)
return UnpackSizes[i];
throw new Exception();
}
public int GetNumOutStreams()
{
int count = 0;
for (int i = 0; i < Coders.Count; i++)
count += Coders[i].NumOutStreams;
return count;
}
public int FindBindPairForInStream(int inStreamIndex)
{
for (int i = 0; i < BindPairs.Count; i++)
if (BindPairs[i].InIndex == inStreamIndex)
return i;
return -1;
}
public int FindBindPairForOutStream(int outStreamIndex)
{
for (int i = 0; i < BindPairs.Count; i++)
if (BindPairs[i].OutIndex == outStreamIndex)
return i;
return -1;
}
public int FindPackStreamArrayIndex(int inStreamIndex)
{
for (int i = 0; i < PackStreams.Count; i++)
if (PackStreams[i] == inStreamIndex)
return i;
return -1;
}
public bool IsEncrypted()
{
for (int i = Coders.Count - 1; i >= 0; i--)
if (Coders[i].MethodId == CMethodId.kAES)
return true;
return false;
}
public bool CheckStructure()
{
const int kNumCodersMax = 32; // don't change it
const int kMaskSize = 32; // it must be >= kNumCodersMax
const int kNumBindsMax = 32;
if (Coders.Count > kNumCodersMax || BindPairs.Count > kNumBindsMax)
return false;
{
var v = new BitVector(BindPairs.Count + PackStreams.Count);
for (int i = 0; i < BindPairs.Count; i++)
if (v.GetAndSet(BindPairs[i].InIndex))
return false;
for (int i = 0; i < PackStreams.Count; i++)
if (v.GetAndSet(PackStreams[i]))
return false;
}
{
var v = new BitVector(UnpackSizes.Count);
for (int i = 0; i < BindPairs.Count; i++)
if (v.GetAndSet(BindPairs[i].OutIndex))
return false;
}
uint[] mask = new uint[kMaskSize];
{
List<int> inStreamToCoder = new List<int>();
List<int> outStreamToCoder = new List<int>();
for (int i = 0; i < Coders.Count; i++)
{
CCoderInfo coder = Coders[i];
for (int j = 0; j < coder.NumInStreams; j++)
inStreamToCoder.Add(i);
for (int j = 0; j < coder.NumOutStreams; j++)
outStreamToCoder.Add(i);
}
for (int i = 0; i < BindPairs.Count; i++)
{
CBindPair bp = BindPairs[i];
mask[inStreamToCoder[bp.InIndex]] |= (1u << outStreamToCoder[bp.OutIndex]);
}
}
for (int i = 0; i < kMaskSize; i++)
for (int j = 0; j < kMaskSize; j++)
if (((1u << j) & mask[i]) != 0)
mask[i] |= mask[j];
for (int i = 0; i < kMaskSize; i++)
if (((1u << i) & mask[i]) != 0)
return false;
return true;
}
}
}

View File

@@ -1,55 +0,0 @@
namespace SharpCompress.Common.SevenZip
{
internal struct CMethodId
{
public const ulong kCopyId = 0;
public const ulong kLzmaId = 0x030101;
public const ulong kLzma2Id = 0x21;
public const ulong kAESId = 0x06F10701;
public static readonly CMethodId kCopy = new CMethodId(kCopyId);
public static readonly CMethodId kLzma = new CMethodId(kLzmaId);
public static readonly CMethodId kLzma2 = new CMethodId(kLzma2Id);
public static readonly CMethodId kAES = new CMethodId(kAESId);
public readonly ulong Id;
public CMethodId(ulong id)
{
this.Id = id;
}
public override int GetHashCode()
{
return Id.GetHashCode();
}
public override bool Equals(object obj)
{
return obj is CMethodId && (CMethodId) obj == this;
}
public bool Equals(CMethodId other)
{
return Id == other.Id;
}
public static bool operator ==(CMethodId left, CMethodId right)
{
return left.Id == right.Id;
}
public static bool operator !=(CMethodId left, CMethodId right)
{
return left.Id != right.Id;
}
public int GetLength()
{
int bytes = 0;
for (ulong value = Id; value != 0; value >>= 8)
bytes++;
return bytes;
}
}
}

View File

@@ -1,85 +0,0 @@
using System;
using System.Collections.Generic;
namespace SharpCompress.Common.SevenZip
{
public class SevenZipEntry : Entry
{
internal SevenZipEntry(SevenZipFilePart filePart)
{
this.FilePart = filePart;
}
internal SevenZipFilePart FilePart { get; private set; }
public override CompressionType CompressionType
{
get { return FilePart.CompressionType; }
}
public override long Crc
{
get { return FilePart.Header.Crc ?? 0; }
}
public override string Key
{
get { return FilePart.Header.Name; }
}
public override long CompressedSize
{
get { return 0; }
}
public override long Size
{
get { return (long) FilePart.Header.Size; }
}
public override DateTime? LastModifiedTime
{
get { return FilePart.Header.MTime; }
}
public override DateTime? CreatedTime
{
get { return null; }
}
public override DateTime? LastAccessedTime
{
get { return null; }
}
public override DateTime? ArchivedTime
{
get { return null; }
}
public override bool IsEncrypted
{
get { return false; }
}
public override bool IsDirectory
{
get { return FilePart.Header.IsDir; }
}
public override bool IsSplit
{
get { return false; }
}
public override int? Attrib
{
get { return (int) FilePart.Header.Attrib; }
}
internal override IEnumerable<FilePart> Parts
{
get { return FilePart.AsEnumerable<FilePart>(); }
}
}
}

View File

@@ -1,109 +0,0 @@
using System;
using System.IO;
using System.Linq;
using SharpCompress.IO;
namespace SharpCompress.Common.SevenZip
{
internal class SevenZipFilePart : FilePart
{
private CompressionType? type;
private Stream stream;
private ArchiveDatabase database;
internal SevenZipFilePart(Stream stream, ArchiveDatabase database, int index, CFileItem fileEntry)
{
this.stream = stream;
this.database = database;
Index = index;
Header = fileEntry;
if (Header.HasStream)
{
Folder = database.Folders[database.FileIndexToFolderIndexMap[index]];
}
}
internal Stream BaseStream { get; private set; }
internal CFileItem Header { get; private set; }
internal CFolder Folder { get; private set; }
internal int Index { get; private set; }
internal override string FilePartName
{
get { return Header.Name; }
}
internal override Stream GetRawStream()
{
return null;
}
internal override Stream GetCompressedStream()
{
if (!Header.HasStream)
{
return null;
}
var folderStream = database.GetFolderStream(stream, Folder, null);
int firstFileIndex = database.FolderStartFileIndex[database.Folders.IndexOf(Folder)];
int skipCount = Index - firstFileIndex;
long skipSize = 0;
for (int i = 0; i < skipCount; i++)
{
skipSize += database.Files[firstFileIndex + i].Size;
}
if (skipSize > 0)
{
folderStream.Skip(skipSize);
}
return new ReadOnlySubStream(folderStream, Header.Size);
}
public CompressionType CompressionType
{
get
{
if (type == null)
{
type = GetCompression();
}
return type.Value;
}
}
//copied from DecoderRegistry
private const uint k_Copy = 0x0;
private const uint k_Delta = 3;
private const uint k_LZMA2 = 0x21;
private const uint k_LZMA = 0x030101;
private const uint k_PPMD = 0x030401;
private const uint k_BCJ = 0x03030103;
private const uint k_BCJ2 = 0x0303011B;
private const uint k_Deflate = 0x040108;
private const uint k_BZip2 = 0x040202;
internal CompressionType GetCompression()
{
var coder = Folder.Coders.First();
switch (coder.MethodId.Id)
{
case k_LZMA:
case k_LZMA2:
{
return CompressionType.LZMA;
}
case k_PPMD:
{
return CompressionType.PPMd;
}
case k_BZip2:
{
return CompressionType.BZip2;
}
default:
throw new NotImplementedException();
}
}
}
}

View File

@@ -1,12 +0,0 @@
using System.IO;
namespace SharpCompress.Common.SevenZip
{
public class SevenZipVolume : Volume
{
public SevenZipVolume(Stream stream, Options options)
: base(stream, options)
{
}
}
}

View File

@@ -1,104 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
namespace SharpCompress.Common.Tar
{
public class TarEntry : Entry
{
private readonly TarFilePart filePart;
private readonly CompressionType type;
internal TarEntry(TarFilePart filePart, CompressionType type)
{
this.filePart = filePart;
this.type = type;
}
public override CompressionType CompressionType
{
get { return type; }
}
public override long Crc
{
get { return 0; }
}
public override string Key
{
get { return filePart.Header.Name; }
}
public override long CompressedSize
{
get { return filePart.Header.Size; }
}
public override long Size
{
get { return filePart.Header.Size; }
}
public override DateTime? LastModifiedTime
{
get { return filePart.Header.LastModifiedTime; }
}
public override DateTime? CreatedTime
{
get { return null; }
}
public override DateTime? LastAccessedTime
{
get { return null; }
}
public override DateTime? ArchivedTime
{
get { return null; }
}
public override bool IsEncrypted
{
get { return false; }
}
public override bool IsDirectory
{
get { return filePart.Header.EntryType == EntryType.Directory; }
}
public override bool IsSplit
{
get { return false; }
}
internal override IEnumerable<FilePart> Parts
{
get { return filePart.AsEnumerable<FilePart>(); }
}
internal static IEnumerable<TarEntry> GetEntries(StreamingMode mode, Stream stream,
CompressionType compressionType)
{
foreach (TarHeader h in TarHeaderFactory.ReadHeader(mode, stream))
{
if (h != null)
{
if (mode == StreamingMode.Seekable)
{
yield return new TarEntry(new TarFilePart(h, stream), compressionType);
}
else
{
yield return new TarEntry(new TarFilePart(h, null), compressionType);
}
}
}
}
}
}

View File

@@ -1,12 +0,0 @@
using System.IO;
namespace SharpCompress.Common.Tar
{
public class TarVolume : Volume
{
public TarVolume(Stream stream, Options options)
: base(stream, options)
{
}
}
}

View File

@@ -1,51 +0,0 @@
using System.IO;
using SharpCompress.IO;
namespace SharpCompress.Common
{
public abstract class Volume : IVolume
{
private readonly Stream actualStream;
internal Volume(Stream stream, Options options)
{
actualStream = stream;
Options = options;
}
internal Stream Stream
{
get { return new NonDisposingStream(actualStream); }
}
internal Options Options { get; private set; }
/// <summary>
/// RarArchive is the first volume of a multi-part archive.
/// Only Rar 3.0 format and higher
/// </summary>
public virtual bool IsFirstVolume
{
get { return true; }
}
/// <summary>
/// RarArchive is part of a multi-part archive.
/// </summary>
public virtual bool IsMultiVolume
{
get { return true; }
}
private bool disposed;
public void Dispose()
{
if (!Options.HasFlag(Options.KeepStreamsOpen) && !disposed)
{
actualStream.Dispose();
disposed = true;
}
}
}
}

View File

@@ -1,61 +0,0 @@
using System.IO;
using System.Linq;
namespace SharpCompress.Common.Zip.Headers
{
internal class LocalEntryHeader : ZipFileEntry
{
public LocalEntryHeader()
: base(ZipHeaderType.LocalEntry)
{
}
internal override void Read(BinaryReader reader)
{
Version = reader.ReadUInt16();
Flags = (HeaderFlags) reader.ReadUInt16();
CompressionMethod = (ZipCompressionMethod) reader.ReadUInt16();
LastModifiedTime = reader.ReadUInt16();
LastModifiedDate = reader.ReadUInt16();
Crc = reader.ReadUInt32();
CompressedSize = reader.ReadUInt32();
UncompressedSize = reader.ReadUInt32();
ushort nameLength = reader.ReadUInt16();
ushort extraLength = reader.ReadUInt16();
byte[] name = reader.ReadBytes(nameLength);
byte[] extra = reader.ReadBytes(extraLength);
Name = DecodeString(name);
LoadExtra(extra);
var unicodePathExtra = Extra.FirstOrDefault(u => u.Type == ExtraDataType.UnicodePathExtraField);
if (unicodePathExtra!=null)
{
Name = ((ExtraUnicodePathExtraField) unicodePathExtra).UnicodeName;
}
}
internal override void Write(BinaryWriter writer)
{
writer.Write(Version);
writer.Write((ushort) Flags);
writer.Write((ushort) CompressionMethod);
writer.Write(LastModifiedTime);
writer.Write(LastModifiedDate);
writer.Write(Crc);
writer.Write(CompressedSize);
writer.Write(UncompressedSize);
byte[] nameBytes = EncodeString(Name);
writer.Write((ushort) nameBytes.Length);
writer.Write((ushort) 0);
//if (Extra != null)
//{
// writer.Write(Extra);
//}
writer.Write(nameBytes);
}
internal ushort Version { get; private set; }
}
}

View File

@@ -1,75 +0,0 @@
using System;
using System.Text;
namespace SharpCompress.Common.Zip.Headers
{
internal enum ExtraDataType : ushort
{
WinZipAes = 0x9901,
NotImplementedExtraData = 0xFFFF,
// Third Party Mappings
// -Info-ZIP Unicode Path Extra Field
UnicodePathExtraField = 0x7075
}
internal class ExtraData
{
internal ExtraDataType Type { get; set; }
internal ushort Length { get; set; }
internal byte[] DataBytes { get; set; }
}
internal class ExtraUnicodePathExtraField : ExtraData
{
internal byte Version
{
get { return this.DataBytes[0]; }
}
internal byte[] NameCRC32
{
get
{
var crc = new byte[4];
Buffer.BlockCopy(this.DataBytes, 1, crc, 0, 4);
return crc;
}
}
internal string UnicodeName
{
get
{
// PathNamelength = dataLength - Version(1 byte) - NameCRC32(4 bytes)
var length = this.Length - 5;
var nameStr = Encoding.UTF8.GetString(this.DataBytes, 5, length);
return nameStr;
}
}
}
internal static class LocalEntryHeaderExtraFactory
{
internal static ExtraData Create(ExtraDataType type,ushort length, byte[] extraData)
{
switch (type)
{
case ExtraDataType.UnicodePathExtraField:
return new ExtraUnicodePathExtraField()
{
Type = type,
Length = length,
DataBytes = extraData
};
default:
return new ExtraData
{
Type = type,
Length = length,
DataBytes = extraData
};
}
}
}
}

View File

@@ -1,88 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace SharpCompress.Common.Zip.Headers
{
internal abstract class ZipFileEntry : ZipHeader
{
protected ZipFileEntry(ZipHeaderType type)
: base(type)
{
Extra = new List<ExtraData>();
}
internal bool IsDirectory
{
get { return Name.EndsWith("/"); }
}
protected string DecodeString(byte[] str)
{
if (FlagUtility.HasFlag(Flags, HeaderFlags.UTF8))
{
return Encoding.UTF8.GetString(str, 0, str.Length);
}
return ArchiveEncoding.Default.GetString(str, 0, str.Length);
}
protected byte[] EncodeString(string str)
{
if (FlagUtility.HasFlag(Flags, HeaderFlags.UTF8))
{
return Encoding.UTF8.GetBytes(str);
}
return ArchiveEncoding.Default.GetBytes(str);
}
internal Stream PackedStream { get; set; }
internal string Name { get; set; }
internal HeaderFlags Flags { get; set; }
internal ZipCompressionMethod CompressionMethod { get; set; }
internal uint CompressedSize { get; set; }
internal long? DataStartPosition { get; set; }
internal uint UncompressedSize { get; set; }
internal List<ExtraData> Extra { get; set; }
internal PkwareTraditionalEncryptionData PkwareTraditionalEncryptionData { get; set; }
#if !PORTABLE && !NETFX_CORE
internal WinzipAesEncryptionData WinzipAesEncryptionData { get; set; }
#endif
internal ushort LastModifiedDate { get; set; }
internal ushort LastModifiedTime { get; set; }
internal uint Crc { get; set; }
protected void LoadExtra(byte[] extra)
{
for (int i = 0; i < extra.Length-4;)
{
ExtraDataType type = (ExtraDataType) BitConverter.ToUInt16(extra, i);
if (!Enum.IsDefined(typeof (ExtraDataType), type))
{
type = ExtraDataType.NotImplementedExtraData;
}
ushort length = BitConverter.ToUInt16(extra, i + 2);
byte[] data = new byte[length];
Buffer.BlockCopy(extra, i + 4, data, 0, length);
Extra.Add(LocalEntryHeaderExtraFactory.Create(type,length,data));
i += length + 4;
}
}
internal ZipFilePart Part { get; set; }
}
}

View File

@@ -1,64 +0,0 @@
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip
{
internal class StreamingZipHeaderFactory : ZipHeaderFactory
{
internal StreamingZipHeaderFactory(string password)
: base(StreamingMode.Streaming, password)
{
}
internal IEnumerable<ZipHeader> ReadStreamHeader(Stream stream)
{
RewindableStream rewindableStream;
if (stream is RewindableStream)
{
rewindableStream = stream as RewindableStream;
}
else
{
rewindableStream = new RewindableStream(stream);
}
while (true)
{
ZipHeader header = null;
BinaryReader reader = new BinaryReader(rewindableStream);
if (lastEntryHeader != null &&
FlagUtility.HasFlag(lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor))
{
reader = (lastEntryHeader.Part as StreamingZipFilePart).FixStreamedFileLocation(ref rewindableStream);
long pos = rewindableStream.Position;
uint crc = reader.ReadUInt32();
if (crc == POST_DATA_DESCRIPTOR)
{
crc = reader.ReadUInt32();
}
lastEntryHeader.Crc = crc;
lastEntryHeader.CompressedSize = reader.ReadUInt32();
lastEntryHeader.UncompressedSize = reader.ReadUInt32();
lastEntryHeader.DataStartPosition = pos - lastEntryHeader.CompressedSize;
}
lastEntryHeader = null;
uint headerBytes = reader.ReadUInt32();
header = ReadHeader(headerBytes, reader);
if (header != null) {
// entry could be zero bytes so we need to know that.
if(header.ZipHeaderType == ZipHeaderType.LocalEntry) {
bool isRecording = rewindableStream.IsRecording;
if (!isRecording) {
rewindableStream.StartRecording();
}
uint nextHeaderBytes = reader.ReadUInt32();
header.HasData = !IsHeader(nextHeaderBytes);
rewindableStream.Rewind(!isRecording);
}
}
yield return header;
}
}
}
}

View File

@@ -1,170 +0,0 @@
using System;
using System.IO;
using Org.BouncyCastle.Crypto;
using Org.BouncyCastle.Crypto.Engines;
using Org.BouncyCastle.Crypto.Parameters;
namespace SharpCompress.Common.Zip
{
internal class WinzipAesCryptoStream : Stream
{
private const int BLOCK_SIZE_IN_BYTES = 16;
private readonly IBufferedCipher rijndael;
private readonly byte[] counter = new byte[BLOCK_SIZE_IN_BYTES];
private readonly Stream stream;
private int nonce = 1;
private byte[] counterOut = new byte[BLOCK_SIZE_IN_BYTES];
private bool isFinalBlock;
private long totalBytesLeftToRead;
private bool isDisposed;
internal WinzipAesCryptoStream(Stream stream, WinzipAesEncryptionData winzipAesEncryptionData, long length)
{
this.stream = stream;
totalBytesLeftToRead = length;
rijndael = CreateRijndael(winzipAesEncryptionData);
}
private IBufferedCipher CreateRijndael(WinzipAesEncryptionData winzipAesEncryptionData)
{
var blockCipher = new BufferedBlockCipher(new RijndaelEngine());
var param = new KeyParameter(winzipAesEncryptionData.KeyBytes);
blockCipher.Init(true, param);
return blockCipher;
}
public override bool CanRead
{
get { return true; }
}
public override bool CanSeek
{
get { return false; }
}
public override bool CanWrite
{
get { return false; }
}
public override long Length
{
get { throw new NotImplementedException(); }
}
public override long Position
{
get { throw new NotImplementedException(); }
set { throw new NotImplementedException(); }
}
protected override void Dispose(bool disposing)
{
if (isDisposed)
{
return;
}
isDisposed = true;
if (disposing)
{
//read out last 10 auth bytes
var ten = new byte[10];
stream.Read(ten, 0, 10);
stream.Dispose();
}
}
public override void Flush()
{
throw new NotImplementedException();
}
public override int Read(byte[] buffer, int offset, int count)
{
if (totalBytesLeftToRead == 0)
{
return 0;
}
int bytesToRead = count;
if (count > totalBytesLeftToRead)
{
bytesToRead = (int)totalBytesLeftToRead;
}
int read = stream.Read(buffer, offset, bytesToRead);
totalBytesLeftToRead -= read;
ReadTransformBlocks(buffer, offset, read);
return read;
}
private int ReadTransformOneBlock(byte[] buffer, int offset, int last)
{
if (isFinalBlock)
{
throw new InvalidOperationException();
}
int bytesRemaining = last - offset;
int bytesToRead = (bytesRemaining > BLOCK_SIZE_IN_BYTES)
? BLOCK_SIZE_IN_BYTES
: bytesRemaining;
// update the counter
Array.Copy(BitConverter.GetBytes(nonce++), 0, counter, 0, 4);
// Determine if this is the final block
if ((bytesToRead == bytesRemaining) && (totalBytesLeftToRead == 0))
{
counterOut = rijndael.DoFinal(counter, 0, BLOCK_SIZE_IN_BYTES);
isFinalBlock = true;
}
else
{
rijndael.ProcessBytes(counter, 0, BLOCK_SIZE_IN_BYTES, counterOut, 0);
}
XorInPlace(buffer, offset, bytesToRead);
return bytesToRead;
}
private void XorInPlace(byte[] buffer, int offset, int count)
{
for (int i = 0; i < count; i++)
{
buffer[offset + i] = (byte)(counterOut[i] ^ buffer[offset + i]);
}
}
private void ReadTransformBlocks(byte[] buffer, int offset, int count)
{
int posn = offset;
int last = count + offset;
while (posn < buffer.Length && posn < last)
{
int n = ReadTransformOneBlock(buffer, posn, last);
posn += n;
}
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotImplementedException();
}
public override void SetLength(long value)
{
throw new NotImplementedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotImplementedException();
}
}
}

View File

@@ -1,68 +0,0 @@
using System;
using System.Text;
using SharpCompress.Crypto;
namespace SharpCompress.Common.Zip
{
internal class WinzipAesEncryptionData
{
private const int RFC2898_ITERATIONS = 1000;
private byte[] salt;
private WinzipAesKeySize keySize;
private byte[] passwordVerifyValue;
private string password;
private byte[] generatedVerifyValue;
internal WinzipAesEncryptionData(WinzipAesKeySize keySize, byte[] salt, byte[] passwordVerifyValue,
string password)
{
this.keySize = keySize;
this.salt = salt;
this.passwordVerifyValue = passwordVerifyValue;
this.password = password;
Initialize();
}
internal byte[] IvBytes { get; set; }
internal byte[] KeyBytes { get; set; }
private int KeySizeInBytes
{
get { return KeyLengthInBytes(keySize); }
}
internal static int KeyLengthInBytes(WinzipAesKeySize keySize)
{
switch (keySize)
{
case WinzipAesKeySize.KeySize128:
return 16;
case WinzipAesKeySize.KeySize192:
return 24;
case WinzipAesKeySize.KeySize256:
return 32;
}
throw new InvalidOperationException();
}
private void Initialize()
{
var utf8 = new UTF8Encoding(false);
var paramz = new PBKDF2(utf8.GetBytes(password), salt, RFC2898_ITERATIONS);
KeyBytes = paramz.GetBytes(KeySizeInBytes);
IvBytes = paramz.GetBytes(KeySizeInBytes);
generatedVerifyValue = paramz.GetBytes(2);
short verify = BitConverter.ToInt16(passwordVerifyValue, 0);
if (password != null)
{
short generated = BitConverter.ToInt16(generatedVerifyValue, 0);
if (verify != generated)
throw new InvalidFormatException("bad password");
}
}
}
}

View File

@@ -1,116 +0,0 @@
using System;
using System.Collections.Generic;
using SharpCompress.Common.Zip.Headers;
namespace SharpCompress.Common.Zip
{
public class ZipEntry : Entry
{
private readonly ZipFilePart filePart;
private readonly DateTime? lastModifiedTime;
internal ZipEntry(ZipFilePart filePart)
{
if (filePart != null)
{
this.filePart = filePart;
lastModifiedTime = Utility.DosDateToDateTime(filePart.Header.LastModifiedDate,
filePart.Header.LastModifiedTime);
}
}
public override CompressionType CompressionType
{
get
{
switch (filePart.Header.CompressionMethod)
{
case ZipCompressionMethod.BZip2:
{
return CompressionType.BZip2;
}
case ZipCompressionMethod.Deflate:
{
return CompressionType.Deflate;
}
case ZipCompressionMethod.LZMA:
{
return CompressionType.LZMA;
}
case ZipCompressionMethod.PPMd:
{
return CompressionType.PPMd;
}
case ZipCompressionMethod.None:
{
return CompressionType.None;
}
default:
{
return CompressionType.Unknown;
}
}
}
}
public override long Crc
{
get { return filePart.Header.Crc; }
}
public override string Key
{
get { return filePart.Header.Name; }
}
public override long CompressedSize
{
get { return filePart.Header.CompressedSize; }
}
public override long Size
{
get { return filePart.Header.UncompressedSize; }
}
public override DateTime? LastModifiedTime
{
get { return lastModifiedTime; }
}
public override DateTime? CreatedTime
{
get { return null; }
}
public override DateTime? LastAccessedTime
{
get { return null; }
}
public override DateTime? ArchivedTime
{
get { return null; }
}
public override bool IsEncrypted
{
get { return FlagUtility.HasFlag(filePart.Header.Flags, HeaderFlags.Encrypted); }
}
public override bool IsDirectory
{
get { return filePart.Header.IsDirectory; }
}
public override bool IsSplit
{
get { return false; }
}
internal override IEnumerable<FilePart> Parts
{
get { return filePart.AsEnumerable<FilePart>(); }
}
}
}

View File

@@ -1,169 +0,0 @@
using System;
using System.IO;
using System.Linq;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressor;
using SharpCompress.Compressor.BZip2;
using SharpCompress.Compressor.Deflate;
using SharpCompress.Compressor.LZMA;
using SharpCompress.Compressor.PPMd;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip
{
internal abstract class ZipFilePart : FilePart
{
internal ZipFilePart(ZipFileEntry header, Stream stream)
{
Header = header;
header.Part = this;
this.BaseStream = stream;
}
internal Stream BaseStream { get; private set; }
internal ZipFileEntry Header { get; set; }
internal override string FilePartName
{
get { return Header.Name; }
}
internal override Stream GetCompressedStream()
{
if (!Header.HasData)
{
return Stream.Null;
}
Stream decompressionStream = CreateDecompressionStream(GetCryptoStream(CreateBaseStream()));
if (LeaveStreamOpen)
{
return new NonDisposingStream(decompressionStream);
}
return decompressionStream;
}
internal override Stream GetRawStream()
{
if (!Header.HasData)
{
return Stream.Null;
}
return CreateBaseStream();
}
protected abstract Stream CreateBaseStream();
protected bool LeaveStreamOpen
{
get { return FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor); }
}
protected Stream CreateDecompressionStream(Stream stream)
{
switch (Header.CompressionMethod)
{
case ZipCompressionMethod.None:
{
return stream;
}
case ZipCompressionMethod.Deflate:
{
return new DeflateStream(stream, CompressionMode.Decompress);
}
case ZipCompressionMethod.BZip2:
{
return new BZip2Stream(stream, CompressionMode.Decompress);
}
case ZipCompressionMethod.LZMA:
{
if (FlagUtility.HasFlag(Header.Flags, HeaderFlags.Encrypted))
{
throw new NotSupportedException("LZMA with pkware encryption.");
}
var reader = new BinaryReader(stream);
reader.ReadUInt16(); //LZMA version
var props = new byte[reader.ReadUInt16()];
reader.Read(props, 0, props.Length);
return new LzmaStream(props, stream,
Header.CompressedSize > 0 ? Header.CompressedSize - 4 - props.Length : -1,
FlagUtility.HasFlag(Header.Flags, HeaderFlags.Bit1)
? -1
: (long)Header.UncompressedSize);
}
case ZipCompressionMethod.PPMd:
{
var props = new byte[2];
stream.Read(props, 0, props.Length);
return new PpmdStream(new PpmdProperties(props), stream, false);
}
case ZipCompressionMethod.WinzipAes:
{
ExtraData data = Header.Extra.Where(x => x.Type == ExtraDataType.WinZipAes).SingleOrDefault();
if (data == null)
{
throw new InvalidFormatException("No Winzip AES extra data found.");
}
if (data.Length != 7)
{
throw new InvalidFormatException("Winzip data length is not 7.");
}
ushort method = BitConverter.ToUInt16(data.DataBytes, 0);
if (method != 0x01 && method != 0x02)
{
throw new InvalidFormatException("Unexpected vendor version number for WinZip AES metadata");
}
ushort vendorId = BitConverter.ToUInt16(data.DataBytes, 2);
if (vendorId != 0x4541)
{
throw new InvalidFormatException("Unexpected vendor ID for WinZip AES metadata");
}
Header.CompressionMethod = (ZipCompressionMethod)BitConverter.ToUInt16(data.DataBytes, 5);
return CreateDecompressionStream(stream);
}
default:
{
throw new NotSupportedException("CompressionMethod: " + Header.CompressionMethod);
}
}
}
protected Stream GetCryptoStream(Stream plainStream)
{
if ((Header.CompressedSize == 0)
#if !PORTABLE && !NETFX_CORE
&& ((Header.PkwareTraditionalEncryptionData != null)
|| (Header.WinzipAesEncryptionData != null)))
#else
&& (Header.PkwareTraditionalEncryptionData != null))
#endif
{
throw new NotSupportedException("Cannot encrypt file with unknown size at start.");
}
if ((Header.CompressedSize == 0)
&& FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor))
{
plainStream = new NonDisposingStream(plainStream); //make sure AES doesn't close
}
else
{
plainStream = new ReadOnlySubStream(plainStream, Header.CompressedSize); //make sure AES doesn't close
}
if (Header.PkwareTraditionalEncryptionData != null)
{
return new PkwareTraditionalCryptoStream(plainStream, Header.PkwareTraditionalEncryptionData,
CryptoMode.Decrypt);
}
#if !PORTABLE && !NETFX_CORE
if (Header.WinzipAesEncryptionData != null)
{
//only read 10 less because the last ten are auth bytes
return new WinzipAesCryptoStream(plainStream, Header.WinzipAesEncryptionData, Header.CompressedSize - 10);
}
#endif
return plainStream;
}
}
}

View File

@@ -1,101 +0,0 @@
/*
* Copyright 2001,2004-2005 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* This package is based on the work done by Keiron Liddle, Aftex Software
* <keiron@aftexsw.com> to whom the Ant project is very grateful for his
* great code.
*/
namespace SharpCompress.Compressor.BZip2
{
/**
* Base class for both the compress and decompress classes.
* Holds common arrays, and static data.
*
* @author <a href="mailto:keiron@aftexsw.com">Keiron Liddle</a>
*/
internal class BZip2Constants
{
public const int baseBlockSize = 100000;
public const int MAX_ALPHA_SIZE = 258;
public const int MAX_CODE_LEN = 23;
public const int RUNA = 0;
public const int RUNB = 1;
public const int N_GROUPS = 6;
public const int G_SIZE = 50;
public const int N_ITERS = 4;
public const int MAX_SELECTORS = (2 + (900000/G_SIZE));
public const int NUM_OVERSHOOT_BYTES = 20;
public static int[] rNums =
{
619, 720, 127, 481, 931, 816, 813, 233, 566, 247,
985, 724, 205, 454, 863, 491, 741, 242, 949, 214,
733, 859, 335, 708, 621, 574, 73, 654, 730, 472,
419, 436, 278, 496, 867, 210, 399, 680, 480, 51,
878, 465, 811, 169, 869, 675, 611, 697, 867, 561,
862, 687, 507, 283, 482, 129, 807, 591, 733, 623,
150, 238, 59, 379, 684, 877, 625, 169, 643, 105,
170, 607, 520, 932, 727, 476, 693, 425, 174, 647,
73, 122, 335, 530, 442, 853, 695, 249, 445, 515,
909, 545, 703, 919, 874, 474, 882, 500, 594, 612,
641, 801, 220, 162, 819, 984, 589, 513, 495, 799,
161, 604, 958, 533, 221, 400, 386, 867, 600, 782,
382, 596, 414, 171, 516, 375, 682, 485, 911, 276,
98, 553, 163, 354, 666, 933, 424, 341, 533, 870,
227, 730, 475, 186, 263, 647, 537, 686, 600, 224,
469, 68, 770, 919, 190, 373, 294, 822, 808, 206,
184, 943, 795, 384, 383, 461, 404, 758, 839, 887,
715, 67, 618, 276, 204, 918, 873, 777, 604, 560,
951, 160, 578, 722, 79, 804, 96, 409, 713, 940,
652, 934, 970, 447, 318, 353, 859, 672, 112, 785,
645, 863, 803, 350, 139, 93, 354, 99, 820, 908,
609, 772, 154, 274, 580, 184, 79, 626, 630, 742,
653, 282, 762, 623, 680, 81, 927, 626, 789, 125,
411, 521, 938, 300, 821, 78, 343, 175, 128, 250,
170, 774, 972, 275, 999, 639, 495, 78, 352, 126,
857, 956, 358, 619, 580, 124, 737, 594, 701, 612,
669, 112, 134, 694, 363, 992, 809, 743, 168, 974,
944, 375, 748, 52, 600, 747, 642, 182, 862, 81,
344, 805, 988, 739, 511, 655, 814, 334, 249, 515,
897, 955, 664, 981, 649, 113, 974, 459, 893, 228,
433, 837, 553, 268, 926, 240, 102, 654, 459, 51,
686, 754, 806, 760, 493, 403, 415, 394, 687, 700,
946, 670, 656, 610, 738, 392, 760, 799, 887, 653,
978, 321, 576, 617, 626, 502, 894, 679, 243, 440,
680, 879, 194, 572, 640, 724, 926, 56, 204, 700,
707, 151, 457, 449, 797, 195, 791, 558, 945, 679,
297, 59, 87, 824, 713, 663, 412, 693, 342, 606,
134, 108, 571, 364, 631, 212, 174, 643, 304, 329,
343, 97, 430, 751, 497, 314, 983, 374, 822, 928,
140, 206, 73, 263, 980, 736, 876, 478, 430, 305,
170, 514, 364, 692, 829, 82, 855, 953, 676, 246,
369, 970, 294, 750, 807, 827, 150, 790, 288, 923,
804, 378, 215, 828, 592, 281, 565, 555, 710, 82,
896, 831, 547, 261, 524, 462, 293, 465, 502, 56,
661, 821, 976, 991, 658, 869, 905, 758, 745, 193,
768, 550, 608, 933, 378, 286, 215, 979, 792, 961,
61, 688, 793, 644, 986, 403, 106, 366, 905, 644,
372, 567, 466, 434, 645, 210, 389, 550, 919, 135,
780, 773, 635, 389, 707, 100, 626, 958, 165, 504,
920, 176, 193, 713, 857, 265, 203, 50, 668, 108,
645, 990, 626, 197, 510, 357, 358, 850, 858, 364,
936, 638
};
}
}

View File

@@ -1,203 +0,0 @@
/*
* Copyright 2001,2004-2005 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* This package is based on the work done by Keiron Liddle), Aftex Software
* <keiron@aftexsw.com> to whom the Ant project is very grateful for his
* great code.
*/
namespace SharpCompress.Compressor.BZip2
{
/**
* A simple class the hold and calculate the CRC for sanity checking
* of the data.
*
* @author <a href="mailto:keiron@aftexsw.com">Keiron Liddle</a>
*/
internal class CRC
{
public static int[] crc32Table =
{
unchecked((int) 0x00000000), unchecked((int) 0x04c11db7), unchecked((int) 0x09823b6e),
unchecked((int) 0x0d4326d9),
unchecked((int) 0x130476dc), unchecked((int) 0x17c56b6b), unchecked((int) 0x1a864db2),
unchecked((int) 0x1e475005),
unchecked((int) 0x2608edb8), unchecked((int) 0x22c9f00f), unchecked((int) 0x2f8ad6d6),
unchecked((int) 0x2b4bcb61),
unchecked((int) 0x350c9b64), unchecked((int) 0x31cd86d3), unchecked((int) 0x3c8ea00a),
unchecked((int) 0x384fbdbd),
unchecked((int) 0x4c11db70), unchecked((int) 0x48d0c6c7), unchecked((int) 0x4593e01e),
unchecked((int) 0x4152fda9),
unchecked((int) 0x5f15adac), unchecked((int) 0x5bd4b01b), unchecked((int) 0x569796c2),
unchecked((int) 0x52568b75),
unchecked((int) 0x6a1936c8), unchecked((int) 0x6ed82b7f), unchecked((int) 0x639b0da6),
unchecked((int) 0x675a1011),
unchecked((int) 0x791d4014), unchecked((int) 0x7ddc5da3), unchecked((int) 0x709f7b7a),
unchecked((int) 0x745e66cd),
unchecked((int) 0x9823b6e0), unchecked((int) 0x9ce2ab57), unchecked((int) 0x91a18d8e),
unchecked((int) 0x95609039),
unchecked((int) 0x8b27c03c), unchecked((int) 0x8fe6dd8b), unchecked((int) 0x82a5fb52),
unchecked((int) 0x8664e6e5),
unchecked((int) 0xbe2b5b58), unchecked((int) 0xbaea46ef), unchecked((int) 0xb7a96036),
unchecked((int) 0xb3687d81),
unchecked((int) 0xad2f2d84), unchecked((int) 0xa9ee3033), unchecked((int) 0xa4ad16ea),
unchecked((int) 0xa06c0b5d),
unchecked((int) 0xd4326d90), unchecked((int) 0xd0f37027), unchecked((int) 0xddb056fe),
unchecked((int) 0xd9714b49),
unchecked((int) 0xc7361b4c), unchecked((int) 0xc3f706fb), unchecked((int) 0xceb42022),
unchecked((int) 0xca753d95),
unchecked((int) 0xf23a8028), unchecked((int) 0xf6fb9d9f), unchecked((int) 0xfbb8bb46),
unchecked((int) 0xff79a6f1),
unchecked((int) 0xe13ef6f4), unchecked((int) 0xe5ffeb43), unchecked((int) 0xe8bccd9a),
unchecked((int) 0xec7dd02d),
unchecked((int) 0x34867077), unchecked((int) 0x30476dc0), unchecked((int) 0x3d044b19),
unchecked((int) 0x39c556ae),
unchecked((int) 0x278206ab), unchecked((int) 0x23431b1c), unchecked((int) 0x2e003dc5),
unchecked((int) 0x2ac12072),
unchecked((int) 0x128e9dcf), unchecked((int) 0x164f8078), unchecked((int) 0x1b0ca6a1),
unchecked((int) 0x1fcdbb16),
unchecked((int) 0x018aeb13), unchecked((int) 0x054bf6a4), unchecked((int) 0x0808d07d),
unchecked((int) 0x0cc9cdca),
unchecked((int) 0x7897ab07), unchecked((int) 0x7c56b6b0), unchecked((int) 0x71159069),
unchecked((int) 0x75d48dde),
unchecked((int) 0x6b93dddb), unchecked((int) 0x6f52c06c), unchecked((int) 0x6211e6b5),
unchecked((int) 0x66d0fb02),
unchecked((int) 0x5e9f46bf), unchecked((int) 0x5a5e5b08), unchecked((int) 0x571d7dd1),
unchecked((int) 0x53dc6066),
unchecked((int) 0x4d9b3063), unchecked((int) 0x495a2dd4), unchecked((int) 0x44190b0d),
unchecked((int) 0x40d816ba),
unchecked((int) 0xaca5c697), unchecked((int) 0xa864db20), unchecked((int) 0xa527fdf9),
unchecked((int) 0xa1e6e04e),
unchecked((int) 0xbfa1b04b), unchecked((int) 0xbb60adfc), unchecked((int) 0xb6238b25),
unchecked((int) 0xb2e29692),
unchecked((int) 0x8aad2b2f), unchecked((int) 0x8e6c3698), unchecked((int) 0x832f1041),
unchecked((int) 0x87ee0df6),
unchecked((int) 0x99a95df3), unchecked((int) 0x9d684044), unchecked((int) 0x902b669d),
unchecked((int) 0x94ea7b2a),
unchecked((int) 0xe0b41de7), unchecked((int) 0xe4750050), unchecked((int) 0xe9362689),
unchecked((int) 0xedf73b3e),
unchecked((int) 0xf3b06b3b), unchecked((int) 0xf771768c), unchecked((int) 0xfa325055),
unchecked((int) 0xfef34de2),
unchecked((int) 0xc6bcf05f), unchecked((int) 0xc27dede8), unchecked((int) 0xcf3ecb31),
unchecked((int) 0xcbffd686),
unchecked((int) 0xd5b88683), unchecked((int) 0xd1799b34), unchecked((int) 0xdc3abded),
unchecked((int) 0xd8fba05a),
unchecked((int) 0x690ce0ee), unchecked((int) 0x6dcdfd59), unchecked((int) 0x608edb80),
unchecked((int) 0x644fc637),
unchecked((int) 0x7a089632), unchecked((int) 0x7ec98b85), unchecked((int) 0x738aad5c),
unchecked((int) 0x774bb0eb),
unchecked((int) 0x4f040d56), unchecked((int) 0x4bc510e1), unchecked((int) 0x46863638),
unchecked((int) 0x42472b8f),
unchecked((int) 0x5c007b8a), unchecked((int) 0x58c1663d), unchecked((int) 0x558240e4),
unchecked((int) 0x51435d53),
unchecked((int) 0x251d3b9e), unchecked((int) 0x21dc2629), unchecked((int) 0x2c9f00f0),
unchecked((int) 0x285e1d47),
unchecked((int) 0x36194d42), unchecked((int) 0x32d850f5), unchecked((int) 0x3f9b762c),
unchecked((int) 0x3b5a6b9b),
unchecked((int) 0x0315d626), unchecked((int) 0x07d4cb91), unchecked((int) 0x0a97ed48),
unchecked((int) 0x0e56f0ff),
unchecked((int) 0x1011a0fa), unchecked((int) 0x14d0bd4d), unchecked((int) 0x19939b94),
unchecked((int) 0x1d528623),
unchecked((int) 0xf12f560e), unchecked((int) 0xf5ee4bb9), unchecked((int) 0xf8ad6d60),
unchecked((int) 0xfc6c70d7),
unchecked((int) 0xe22b20d2), unchecked((int) 0xe6ea3d65), unchecked((int) 0xeba91bbc),
unchecked((int) 0xef68060b),
unchecked((int) 0xd727bbb6), unchecked((int) 0xd3e6a601), unchecked((int) 0xdea580d8),
unchecked((int) 0xda649d6f),
unchecked((int) 0xc423cd6a), unchecked((int) 0xc0e2d0dd), unchecked((int) 0xcda1f604),
unchecked((int) 0xc960ebb3),
unchecked((int) 0xbd3e8d7e), unchecked((int) 0xb9ff90c9), unchecked((int) 0xb4bcb610),
unchecked((int) 0xb07daba7),
unchecked((int) 0xae3afba2), unchecked((int) 0xaafbe615), unchecked((int) 0xa7b8c0cc),
unchecked((int) 0xa379dd7b),
unchecked((int) 0x9b3660c6), unchecked((int) 0x9ff77d71), unchecked((int) 0x92b45ba8),
unchecked((int) 0x9675461f),
unchecked((int) 0x8832161a), unchecked((int) 0x8cf30bad), unchecked((int) 0x81b02d74),
unchecked((int) 0x857130c3),
unchecked((int) 0x5d8a9099), unchecked((int) 0x594b8d2e), unchecked((int) 0x5408abf7),
unchecked((int) 0x50c9b640),
unchecked((int) 0x4e8ee645), unchecked((int) 0x4a4ffbf2), unchecked((int) 0x470cdd2b),
unchecked((int) 0x43cdc09c),
unchecked((int) 0x7b827d21), unchecked((int) 0x7f436096), unchecked((int) 0x7200464f),
unchecked((int) 0x76c15bf8),
unchecked((int) 0x68860bfd), unchecked((int) 0x6c47164a), unchecked((int) 0x61043093),
unchecked((int) 0x65c52d24),
unchecked((int) 0x119b4be9), unchecked((int) 0x155a565e), unchecked((int) 0x18197087),
unchecked((int) 0x1cd86d30),
unchecked((int) 0x029f3d35), unchecked((int) 0x065e2082), unchecked((int) 0x0b1d065b),
unchecked((int) 0x0fdc1bec),
unchecked((int) 0x3793a651), unchecked((int) 0x3352bbe6), unchecked((int) 0x3e119d3f),
unchecked((int) 0x3ad08088),
unchecked((int) 0x2497d08d), unchecked((int) 0x2056cd3a), unchecked((int) 0x2d15ebe3),
unchecked((int) 0x29d4f654),
unchecked((int) 0xc5a92679), unchecked((int) 0xc1683bce), unchecked((int) 0xcc2b1d17),
unchecked((int) 0xc8ea00a0),
unchecked((int) 0xd6ad50a5), unchecked((int) 0xd26c4d12), unchecked((int) 0xdf2f6bcb),
unchecked((int) 0xdbee767c),
unchecked((int) 0xe3a1cbc1), unchecked((int) 0xe760d676), unchecked((int) 0xea23f0af),
unchecked((int) 0xeee2ed18),
unchecked((int) 0xf0a5bd1d), unchecked((int) 0xf464a0aa), unchecked((int) 0xf9278673),
unchecked((int) 0xfde69bc4),
unchecked((int) 0x89b8fd09), unchecked((int) 0x8d79e0be), unchecked((int) 0x803ac667),
unchecked((int) 0x84fbdbd0),
unchecked((int) 0x9abc8bd5), unchecked((int) 0x9e7d9662), unchecked((int) 0x933eb0bb),
unchecked((int) 0x97ffad0c),
unchecked((int) 0xafb010b1), unchecked((int) 0xab710d06), unchecked((int) 0xa6322bdf),
unchecked((int) 0xa2f33668),
unchecked((int) 0xbcb4666d), unchecked((int) 0xb8757bda), unchecked((int) 0xb5365d03),
unchecked((int) 0xb1f740b4)
};
public CRC()
{
InitialiseCRC();
}
internal void InitialiseCRC()
{
globalCrc = unchecked((int) 0xffffffff);
}
internal int GetFinalCRC()
{
return ~globalCrc;
}
internal int GetGlobalCRC()
{
return globalCrc;
}
internal void SetGlobalCRC(int newCrc)
{
globalCrc = newCrc;
}
internal void UpdateCRC(int inCh)
{
int temp = (globalCrc >> 24) ^ inCh;
if (temp < 0)
{
temp = 256 + temp;
}
globalCrc = (globalCrc << 8) ^ CRC.crc32Table[temp];
}
internal int globalCrc;
}
}

View File

@@ -1,464 +0,0 @@
// Tree.cs
// ------------------------------------------------------------------
//
// Copyright (c) 2009 Dino Chiesa and Microsoft Corporation.
// All rights reserved.
//
// This code module is part of DotNetZip, a zipfile class library.
//
// ------------------------------------------------------------------
//
// This code is licensed under the Microsoft Public License.
// See the file License.txt for the license details.
// More info on: http://dotnetzip.codeplex.com
//
// ------------------------------------------------------------------
//
// last saved (in emacs):
// Time-stamp: <2009-October-28 13:29:50>
//
// ------------------------------------------------------------------
//
// This module defines classes for zlib compression and
// decompression. This code is derived from the jzlib implementation of
// zlib. In keeping with the license for jzlib, the copyright to that
// code is below.
//
// ------------------------------------------------------------------
//
// Copyright (c) 2000,2001,2002,2003 ymnk, JCraft,Inc. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in
// the documentation and/or other materials provided with the distribution.
//
// 3. The names of the authors may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES,
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
// FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JCRAFT,
// INC. OR ANY CONTRIBUTORS TO THIS SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
// OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// -----------------------------------------------------------------------
//
// This program is based on zlib-1.1.3; credit to authors
// Jean-loup Gailly(jloup@gzip.org) and Mark Adler(madler@alumni.caltech.edu)
// and contributors of zlib.
//
// -----------------------------------------------------------------------
using System;
namespace SharpCompress.Compressor.Deflate
{
internal sealed partial class DeflateManager
{
#region Nested type: Tree
private sealed class Tree
{
internal const int Buf_size = 8*2;
private static readonly int HEAP_SIZE = (2*InternalConstants.L_CODES + 1);
internal static readonly sbyte[] bl_order = new sbyte[]
{
16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2,
14,
1, 15
};
// The lengths of the bit length codes are sent in order of decreasing
// probability, to avoid transmitting the lengths for unused bit
// length codes.
// see definition of array dist_code below
//internal const int DIST_CODE_LEN = 512;
private static readonly sbyte[] _dist_code = new sbyte[]
{
0, 1, 2, 3, 4, 4, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7,
8, 8, 8, 8, 8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 9, 9,
10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
10, 10,
11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
11, 11,
12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12,
12, 12,
12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12,
12, 12,
13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
13, 13,
13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
13, 13,
14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
14, 14,
14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
14, 14,
14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
14, 14,
14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
14, 14,
15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
15, 15,
15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
15, 15,
15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
15, 15,
15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
15, 15,
0, 0, 16, 17, 18, 18, 19, 19, 20, 20, 20, 20, 21, 21,
21, 21,
22, 22, 22, 22, 22, 22, 22, 22, 23, 23, 23, 23, 23, 23,
23, 23,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24,
25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25,
25, 25,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
26, 26,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
26, 26,
27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27,
27, 27,
27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27,
27, 27,
28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
28, 28,
28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
28, 28,
28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
28, 28,
28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
28, 28,
29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
29, 29,
29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
29, 29,
29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
29, 29,
29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
29, 29
};
internal static readonly sbyte[] LengthCode = new sbyte[]
{
0, 1, 2, 3, 4, 5, 6, 7, 8, 8, 9, 9, 10, 10, 11, 11,
12, 12, 12, 12, 13, 13, 13, 13, 14, 14, 14, 14, 15, 15
, 15, 15,
16, 16, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 17, 17
, 17, 17,
18, 18, 18, 18, 18, 18, 18, 18, 19, 19, 19, 19, 19, 19
, 19, 19,
20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20
, 20, 20,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21
, 21, 21,
22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22
, 22, 22,
23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23
, 23, 23,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24
, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24
, 24, 24,
25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25
, 25, 25,
25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25
, 25, 25,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26
, 26, 26,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26
, 26, 26,
27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27
, 27, 27,
27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27
, 27, 28
};
internal static readonly int[] LengthBase = new[]
{
0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 12, 14, 16, 20, 24, 28,
32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 0
};
internal static readonly int[] DistanceBase = new[]
{
0, 1, 2, 3, 4, 6, 8, 12, 16, 24, 32, 48, 64, 96, 128,
192,
256, 384, 512, 768, 1024, 1536, 2048, 3072, 4096, 6144
, 8192, 12288, 16384, 24576
};
internal short[] dyn_tree; // the dynamic tree
internal int max_code; // largest code with non zero frequency
internal StaticTree staticTree; // the corresponding static tree
/// <summary>
/// Map from a distance to a distance code.
/// </summary>
/// <remarks>
/// No side effects. _dist_code[256] and _dist_code[257] are never used.
/// </remarks>
internal static int DistanceCode(int dist)
{
return (dist < 256)
? _dist_code[dist]
: _dist_code[256 + SharedUtils.URShift(dist, 7)];
}
// Compute the optimal bit lengths for a tree and update the total bit length
// for the current block.
// IN assertion: the fields freq and dad are set, heap[heap_max] and
// above are the tree nodes sorted by increasing frequency.
// OUT assertions: the field len is set to the optimal bit length, the
// array bl_count contains the frequencies for each bit length.
// The length opt_len is updated; static_len is also updated if stree is
// not null.
internal void gen_bitlen(DeflateManager s)
{
short[] tree = dyn_tree;
short[] stree = staticTree.treeCodes;
int[] extra = staticTree.extraBits;
int base_Renamed = staticTree.extraBase;
int max_length = staticTree.maxLength;
int h; // heap index
int n, m; // iterate over the tree elements
int bits; // bit length
int xbits; // extra bits
short f; // frequency
int overflow = 0; // number of elements with bit length too large
for (bits = 0; bits <= InternalConstants.MAX_BITS; bits++)
s.bl_count[bits] = 0;
// In a first pass, compute the optimal bit lengths (which may
// overflow in the case of the bit length tree).
tree[s.heap[s.heap_max]*2 + 1] = 0; // root of the heap
for (h = s.heap_max + 1; h < HEAP_SIZE; h++)
{
n = s.heap[h];
bits = tree[tree[n*2 + 1]*2 + 1] + 1;
if (bits > max_length)
{
bits = max_length;
overflow++;
}
tree[n*2 + 1] = (short) bits;
// We overwrite tree[n*2+1] which is no longer needed
if (n > max_code)
continue; // not a leaf node
s.bl_count[bits]++;
xbits = 0;
if (n >= base_Renamed)
xbits = extra[n - base_Renamed];
f = tree[n*2];
s.opt_len += f*(bits + xbits);
if (stree != null)
s.static_len += f*(stree[n*2 + 1] + xbits);
}
if (overflow == 0)
return;
// This happens for example on obj2 and pic of the Calgary corpus
// Find the first bit length which could increase:
do
{
bits = max_length - 1;
while (s.bl_count[bits] == 0)
bits--;
s.bl_count[bits]--; // move one leaf down the tree
s.bl_count[bits + 1] = (short) (s.bl_count[bits + 1] + 2); // move one overflow item as its brother
s.bl_count[max_length]--;
// The brother of the overflow item also moves one step up,
// but this does not affect bl_count[max_length]
overflow -= 2;
} while (overflow > 0);
for (bits = max_length; bits != 0; bits--)
{
n = s.bl_count[bits];
while (n != 0)
{
m = s.heap[--h];
if (m > max_code)
continue;
if (tree[m*2 + 1] != bits)
{
s.opt_len = (int) (s.opt_len + (bits - (long) tree[m*2 + 1])*tree[m*2]);
tree[m*2 + 1] = (short) bits;
}
n--;
}
}
}
// Construct one Huffman tree and assigns the code bit strings and lengths.
// Update the total bit length for the current block.
// IN assertion: the field freq is set for all tree elements.
// OUT assertions: the fields len and code are set to the optimal bit length
// and corresponding code. The length opt_len is updated; static_len is
// also updated if stree is not null. The field max_code is set.
internal void build_tree(DeflateManager s)
{
short[] tree = dyn_tree;
short[] stree = staticTree.treeCodes;
int elems = staticTree.elems;
int n, m; // iterate over heap elements
int max_code = -1; // largest code with non zero frequency
int node; // new node being created
// Construct the initial heap, with least frequent element in
// heap[1]. The sons of heap[n] are heap[2*n] and heap[2*n+1].
// heap[0] is not used.
s.heap_len = 0;
s.heap_max = HEAP_SIZE;
for (n = 0; n < elems; n++)
{
if (tree[n*2] != 0)
{
s.heap[++s.heap_len] = max_code = n;
s.depth[n] = 0;
}
else
{
tree[n*2 + 1] = 0;
}
}
// The pkzip format requires that at least one distance code exists,
// and that at least one bit should be sent even if there is only one
// possible code. So to avoid special checks later on we force at least
// two codes of non zero frequency.
while (s.heap_len < 2)
{
node = s.heap[++s.heap_len] = (max_code < 2 ? ++max_code : 0);
tree[node*2] = 1;
s.depth[node] = 0;
s.opt_len--;
if (stree != null)
s.static_len -= stree[node*2 + 1];
// node is 0 or 1 so it does not have extra bits
}
this.max_code = max_code;
// The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
// establish sub-heaps of increasing lengths:
for (n = s.heap_len/2; n >= 1; n--)
s.pqdownheap(tree, n);
// Construct the Huffman tree by repeatedly combining the least two
// frequent nodes.
node = elems; // next internal node of the tree
do
{
// n = node of least frequency
n = s.heap[1];
s.heap[1] = s.heap[s.heap_len--];
s.pqdownheap(tree, 1);
m = s.heap[1]; // m = node of next least frequency
s.heap[--s.heap_max] = n; // keep the nodes sorted by frequency
s.heap[--s.heap_max] = m;
// Create a new node father of n and m
tree[node*2] = unchecked((short) (tree[n*2] + tree[m*2]));
s.depth[node] = (sbyte) (Math.Max((byte) s.depth[n], (byte) s.depth[m]) + 1);
tree[n*2 + 1] = tree[m*2 + 1] = (short) node;
// and insert the new node in the heap
s.heap[1] = node++;
s.pqdownheap(tree, 1);
} while (s.heap_len >= 2);
s.heap[--s.heap_max] = s.heap[1];
// At this point, the fields freq and dad are set. We can now
// generate the bit lengths.
gen_bitlen(s);
// The field len is now set, we can generate the bit codes
gen_codes(tree, max_code, s.bl_count);
}
// Generate the codes for a given tree and bit counts (which need not be
// optimal).
// IN assertion: the array bl_count contains the bit length statistics for
// the given tree and the field len is set for all tree elements.
// OUT assertion: the field code is set for all tree elements of non
// zero code length.
internal static void gen_codes(short[] tree, int max_code, short[] bl_count)
{
var next_code = new short[InternalConstants.MAX_BITS + 1]; // next code value for each bit length
short code = 0; // running code value
int bits; // bit index
int n; // code index
// The distribution counts are first used to generate the code values
// without bit reversal.
for (bits = 1; bits <= InternalConstants.MAX_BITS; bits++)
unchecked
{
next_code[bits] = code = (short) ((code + bl_count[bits - 1]) << 1);
}
// Check that the bit counts in bl_count are consistent. The last code
// must be all ones.
//Assert (code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1,
// "inconsistent bit counts");
//Tracev((stderr,"\ngen_codes: max_code %d ", max_code));
for (n = 0; n <= max_code; n++)
{
int len = tree[n*2 + 1];
if (len == 0)
continue;
// Now reverse the bits
tree[n*2] = unchecked((short) (bi_reverse(next_code[len]++, len)));
}
}
// Reverse the first len bits of a code, using straightforward code (a faster
// method would use a table)
// IN assertion: 1 <= len <= 15
internal static int bi_reverse(int code, int len)
{
int res = 0;
do
{
res |= code & 1;
code >>= 1; //SharedUtils.URShift(code, 1);
res <<= 1;
} while (--len > 0);
return res >> 1;
}
}
#endregion
}
}

View File

@@ -1,219 +0,0 @@
using System;
using System.IO;
namespace SharpCompress.Compressor.Filters
{
internal class BCJ2Filter : Stream
{
private readonly Stream baseStream;
private readonly byte[] input = new byte[4096];
private int inputOffset = 0;
private int inputCount = 0;
private bool endReached = false;
private long position = 0;
private byte[] output = new byte[4];
private int outputOffset = 0;
private int outputCount = 0;
private byte[] control;
private byte[] data1;
private byte[] data2;
private int controlPos = 0;
private int data1Pos = 0;
private int data2Pos = 0;
private ushort[] p = new ushort[256 + 2];
private uint range, code;
private byte prevByte = 0;
private bool isDisposed;
private const int kNumTopBits = 24;
private const int kTopValue = 1 << kNumTopBits;
private const int kNumBitModelTotalBits = 11;
private const int kBitModelTotal = 1 << kNumBitModelTotalBits;
private const int kNumMoveBits = 5;
private static bool IsJ(byte b0, byte b1)
{
return (b1 & 0xFE) == 0xE8 || IsJcc(b0, b1);
}
private static bool IsJcc(byte b0, byte b1)
{
return b0 == 0x0F && (b1 & 0xF0) == 0x80;
}
public BCJ2Filter(byte[] control, byte[] data1, byte[] data2, Stream baseStream)
{
this.control = control;
this.data1 = data1;
this.data2 = data2;
this.baseStream = baseStream;
int i;
for (i = 0; i < p.Length; i++)
p[i] = kBitModelTotal >> 1;
code = 0;
range = 0xFFFFFFFF;
for (i = 0; i < 5; i++)
code = (code << 8) | control[controlPos++];
}
protected override void Dispose(bool disposing)
{
if (isDisposed)
{
return;
}
isDisposed = true;
base.Dispose(disposing);
baseStream.Dispose();
}
public override bool CanRead
{
get { return true; }
}
public override bool CanSeek
{
get { return false; }
}
public override bool CanWrite
{
get { return false; }
}
public override void Flush()
{
throw new NotSupportedException();
}
public override long Length
{
get { return baseStream.Length + data1.Length + data2.Length; }
}
public override long Position
{
get { return position; }
set { throw new NotSupportedException(); }
}
public override int Read(byte[] buffer, int offset, int count)
{
int size = 0;
byte b = 0;
while (!endReached && size < count)
{
while (outputOffset < outputCount)
{
b = output[outputOffset++];
buffer[offset++] = b;
size++;
position++;
prevByte = b;
if (size == count)
return size;
}
if (inputOffset == inputCount)
{
inputOffset = 0;
inputCount = baseStream.Read(input, 0, input.Length);
if (inputCount == 0)
{
endReached = true;
break;
}
}
b = input[inputOffset++];
buffer[offset++] = b;
size++;
position++;
if (!IsJ(prevByte, b))
prevByte = b;
else
{
int prob;
if (b == 0xE8)
prob = prevByte;
else if (b == 0xE9)
prob = 256;
else
prob = 257;
uint bound = (range >> kNumBitModelTotalBits) * p[prob];
if (code < bound)
{
range = bound;
p[prob] += (ushort)((kBitModelTotal - p[prob]) >> kNumMoveBits);
if (range < kTopValue)
{
range <<= 8;
code = (code << 8) | control[controlPos++];
}
prevByte = b;
}
else
{
range -= bound;
code -= bound;
p[prob] -= (ushort)(p[prob] >> kNumMoveBits);
if (range < kTopValue)
{
range <<= 8;
code = (code << 8) | control[controlPos++];
}
uint dest;
if (b == 0xE8)
dest =
(uint)
((data1[data1Pos++] << 24) | (data1[data1Pos++] << 16) | (data1[data1Pos++] << 8) |
data1[data1Pos++]);
else
dest =
(uint)
((data2[data2Pos++] << 24) | (data2[data2Pos++] << 16) | (data2[data2Pos++] << 8) |
data2[data2Pos++]);
dest -= (uint)(position + 4);
output[0] = (byte)dest;
output[1] = (byte)(dest >> 8);
output[2] = (byte)(dest >> 16);
output[3] = (byte)(dest >> 24);
outputOffset = 0;
outputCount = 4;
}
}
}
return size;
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();
}
public override void SetLength(long value)
{
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException();
}
}
}

View File

@@ -1,154 +0,0 @@
using System;
using System.IO;
namespace SharpCompress.Compressor.Filters
{
internal abstract class Filter : Stream
{
protected bool isEncoder;
protected Stream baseStream;
private byte[] tail;
private byte[] window;
private int transformed = 0;
private int read = 0;
private bool endReached = false;
private bool isDisposed;
protected Filter(bool isEncoder, Stream baseStream, int lookahead)
{
this.isEncoder = isEncoder;
this.baseStream = baseStream;
tail = new byte[lookahead - 1];
window = new byte[tail.Length * 2];
}
protected override void Dispose(bool disposing)
{
if (isDisposed)
{
return;
}
isDisposed = true;
base.Dispose(disposing);
baseStream.Dispose();
}
public override bool CanRead
{
get { return !isEncoder; }
}
public override bool CanSeek
{
get { return false; }
}
public override bool CanWrite
{
get { return isEncoder; }
}
public override void Flush()
{
throw new NotSupportedException();
}
public override long Length
{
get { return baseStream.Length; }
}
public override long Position
{
get { return baseStream.Position; }
set { throw new NotSupportedException(); }
}
public override int Read(byte[] buffer, int offset, int count)
{
int size = 0;
if (transformed > 0)
{
int copySize = transformed;
if (copySize > count)
copySize = count;
Buffer.BlockCopy(tail, 0, buffer, offset, copySize);
transformed -= copySize;
read -= copySize;
offset += copySize;
count -= copySize;
size += copySize;
Buffer.BlockCopy(tail, copySize, tail, 0, read);
}
if (count == 0)
return size;
int inSize = read;
if (inSize > count)
inSize = count;
Buffer.BlockCopy(tail, 0, buffer, offset, inSize);
read -= inSize;
Buffer.BlockCopy(tail, inSize, tail, 0, read);
while (!endReached && inSize < count)
{
int baseRead = baseStream.Read(buffer, offset + inSize, count - inSize);
inSize += baseRead;
if (baseRead == 0)
endReached = true;
}
while (!endReached && read < tail.Length)
{
int baseRead = baseStream.Read(tail, read, tail.Length - read);
read += baseRead;
if (baseRead == 0)
endReached = true;
}
if (inSize > tail.Length)
{
transformed = Transform(buffer, offset, inSize);
offset += transformed;
count -= transformed;
size += transformed;
inSize -= transformed;
transformed = 0;
}
if (count == 0)
return size;
Buffer.BlockCopy(buffer, offset, window, 0, inSize);
Buffer.BlockCopy(tail, 0, window, inSize, read);
if (inSize + read > tail.Length)
transformed = Transform(window, 0, inSize + read);
else
transformed = inSize + read;
Buffer.BlockCopy(window, 0, buffer, offset, inSize);
Buffer.BlockCopy(window, inSize, tail, 0, read);
size += inSize;
transformed -= inSize;
return size;
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();
}
public override void SetLength(long value)
{
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
Transform(buffer, offset, count);
baseStream.Write(buffer, offset, count);
}
protected abstract int Transform(byte[] buffer, int offset, int count);
}
}

View File

@@ -1,245 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Compressor.LZMA
{
internal class Bcj2DecoderStream : DecoderStream2
{
private const int kNumTopBits = 24;
private const uint kTopValue = (1 << kNumTopBits);
private class RangeDecoder
{
internal Stream mStream;
internal uint Range;
internal uint Code;
public RangeDecoder(Stream stream)
{
mStream = stream;
Range = 0xFFFFFFFF;
for (int i = 0; i < 5; i++)
Code = (Code << 8) | ReadByte();
}
public byte ReadByte()
{
int bt = mStream.ReadByte();
if (bt < 0)
throw new EndOfStreamException();
return (byte)bt;
}
public void Dispose()
{
mStream.Dispose();
}
}
private class StatusDecoder
{
private const int numMoveBits = 5;
private const int kNumBitModelTotalBits = 11;
private const uint kBitModelTotal = 1u << kNumBitModelTotalBits;
private uint Prob;
public StatusDecoder()
{
Prob = kBitModelTotal / 2;
}
private void UpdateModel(uint symbol)
{
/*
Prob -= (Prob + ((symbol - 1) & ((1 << numMoveBits) - 1))) >> numMoveBits;
Prob += (1 - symbol) << (kNumBitModelTotalBits - numMoveBits);
*/
if (symbol == 0)
Prob += (kBitModelTotal - Prob) >> numMoveBits;
else
Prob -= (Prob) >> numMoveBits;
}
public uint Decode(RangeDecoder decoder)
{
uint newBound = (decoder.Range >> kNumBitModelTotalBits) * Prob;
if (decoder.Code < newBound)
{
decoder.Range = newBound;
Prob += (kBitModelTotal - Prob) >> numMoveBits;
if (decoder.Range < kTopValue)
{
decoder.Code = (decoder.Code << 8) | decoder.ReadByte();
decoder.Range <<= 8;
}
return 0;
}
else
{
decoder.Range -= newBound;
decoder.Code -= newBound;
Prob -= Prob >> numMoveBits;
if (decoder.Range < kTopValue)
{
decoder.Code = (decoder.Code << 8) | decoder.ReadByte();
decoder.Range <<= 8;
}
return 1;
}
}
}
private Stream mMainStream;
private Stream mCallStream;
private Stream mJumpStream;
private RangeDecoder mRangeDecoder;
private StatusDecoder[] mStatusDecoder;
private long mWritten;
private long mLimit;
private IEnumerator<byte> mIter;
private bool mFinished;
private bool isDisposed;
public Bcj2DecoderStream(Stream[] streams, byte[] info, long limit)
{
if (info != null && info.Length > 0)
throw new NotSupportedException();
if (streams.Length != 4)
throw new NotSupportedException();
mLimit = limit;
mMainStream = streams[0];
mCallStream = streams[1];
mJumpStream = streams[2];
mRangeDecoder = new RangeDecoder(streams[3]);
mStatusDecoder = new StatusDecoder[256 + 2];
for (int i = 0; i < mStatusDecoder.Length; i++)
mStatusDecoder[i] = new StatusDecoder();
mIter = Run().GetEnumerator();
}
protected override void Dispose(bool disposing)
{
if (isDisposed)
{
return;
}
isDisposed = true;
base.Dispose(disposing);
mMainStream.Dispose();
mCallStream.Dispose();
mJumpStream.Dispose();
}
private static bool IsJcc(byte b0, byte b1)
{
return b0 == 0x0F
&& (b1 & 0xF0) == 0x80;
}
private static bool IsJ(byte b0, byte b1)
{
return (b1 & 0xFE) == 0xE8
|| IsJcc(b0, b1);
}
private static int GetIndex(byte b0, byte b1)
{
if (b1 == 0xE8)
return b0;
else if (b1 == 0xE9)
return 256;
else
return 257;
}
public override int Read(byte[] buffer, int offset, int count)
{
if (count == 0 || mFinished)
return 0;
for (int i = 0; i < count; i++)
{
if (!mIter.MoveNext())
{
mFinished = true;
return i;
}
buffer[offset + i] = mIter.Current;
}
return count;
}
public IEnumerable<byte> Run()
{
const uint kBurstSize = (1u << 18);
byte prevByte = 0;
uint processedBytes = 0;
for (; ; )
{
byte b = 0;
uint i;
for (i = 0; i < kBurstSize; i++)
{
int tmp = mMainStream.ReadByte();
if (tmp < 0)
yield break;
b = (byte)tmp;
mWritten++;
yield return b;
if (IsJ(prevByte, b))
break;
prevByte = b;
}
processedBytes += i;
if (i == kBurstSize)
continue;
if (mStatusDecoder[GetIndex(prevByte, b)].Decode(mRangeDecoder) == 1)
{
Stream s = (b == 0xE8) ? mCallStream : mJumpStream;
uint src = 0;
for (i = 0; i < 4; i++)
{
int b0 = s.ReadByte();
if (b0 < 0)
throw new EndOfStreamException();
src <<= 8;
src |= (uint)b0;
}
uint dest = src - (uint)(mWritten + 4);
mWritten++;
yield return (byte)dest;
mWritten++;
yield return (byte)(dest >> 8);
mWritten++;
yield return (byte)(dest >> 16);
mWritten++;
yield return (byte)(dest >> 24);
prevByte = (byte)(dest >> 24);
processedBytes += 4;
}
else
{
prevByte = b;
}
}
}
}
}

View File

@@ -1,87 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;
namespace SharpCompress.Compressor.LZMA
{
internal class BitVector
{
private uint[] mBits;
private int mLength;
public BitVector(int length)
{
mLength = length;
mBits = new uint[(length + 31) >> 5];
}
public BitVector(int length, bool initValue)
{
mLength = length;
mBits = new uint[(length + 31) >> 5];
if (initValue)
for (int i = 0; i < mBits.Length; i++)
mBits[i] = ~0u;
}
public BitVector(List<bool> bits)
: this(bits.Count)
{
for (int i = 0; i < bits.Count; i++)
if (bits[i])
SetBit(i);
}
public bool[] ToArray()
{
bool[] bits = new bool[mLength];
for (int i = 0; i < bits.Length; i++)
bits[i] = this[i];
return bits;
}
public int Length
{
get { return mLength; }
}
public bool this[int index]
{
get
{
if (index < 0 || index >= mLength)
throw new ArgumentOutOfRangeException("index");
return (mBits[index >> 5] & (1u << (index & 31))) != 0;
}
}
public void SetBit(int index)
{
if (index < 0 || index >= mLength)
throw new ArgumentOutOfRangeException("index");
mBits[index >> 5] |= 1u << (index & 31);
}
internal bool GetAndSet(int index)
{
if (index < 0 || index >= mLength)
throw new ArgumentOutOfRangeException("index");
uint bits = mBits[index >> 5];
uint mask = 1u << (index & 31);
mBits[index >> 5] |= mask;
return (bits & mask) != 0;
}
public override string ToString()
{
StringBuilder sb = new StringBuilder(mLength);
for (int i = 0; i < mLength; i++)
sb.Append(this[i] ? 'x' : '.');
return sb.ToString();
}
}
}

View File

@@ -1,59 +0,0 @@
namespace SharpCompress.Compressor.LZMA.LZ
{
internal class CRC
{
public static readonly uint[] Table;
static CRC()
{
Table = new uint[256];
const uint kPoly = 0xEDB88320;
for (uint i = 0; i < 256; i++)
{
uint r = i;
for (int j = 0; j < 8; j++)
if ((r & 1) != 0)
r = (r >> 1) ^ kPoly;
else
r >>= 1;
Table[i] = r;
}
}
private uint _value = 0xFFFFFFFF;
public void Init()
{
_value = 0xFFFFFFFF;
}
public void UpdateByte(byte b)
{
_value = Table[(((byte) (_value)) ^ b)] ^ (_value >> 8);
}
public void Update(byte[] data, uint offset, uint size)
{
for (uint i = 0; i < size; i++)
_value = Table[(((byte) (_value)) ^ data[offset + i])] ^ (_value >> 8);
}
public uint GetDigest()
{
return _value ^ 0xFFFFFFFF;
}
private static uint CalculateDigest(byte[] data, uint offset, uint size)
{
CRC crc = new CRC();
// crc.Init();
crc.Update(data, offset, size);
return crc.GetDigest();
}
private static bool VerifyDigest(uint digest, byte[] data, uint offset, uint size)
{
return (CalculateDigest(data, offset, size) == digest);
}
}
}

View File

@@ -1,91 +0,0 @@
using System;
using System.Collections.Generic;
namespace SharpCompress.Compressor.LZMA
{
internal static class Log
{
private static Stack<string> _indent = new Stack<string>();
private static bool _needsIndent = true;
static Log()
{
_indent.Push("");
}
public static void PushIndent(string indent = " ")
{
_indent.Push(_indent.Peek() + indent);
}
public static void PopIndent()
{
if (_indent.Count == 1)
throw new InvalidOperationException();
_indent.Pop();
}
private static void EnsureIndent()
{
if (_needsIndent)
{
_needsIndent = false;
#if !PORTABLE && !NETFX_CORE
System.Diagnostics.Debug.Write(_indent.Peek());
#endif
}
}
public static void Write(object value)
{
EnsureIndent();
#if !PORTABLE && !NETFX_CORE
System.Diagnostics.Debug.Write(value);
#endif
}
public static void Write(string text)
{
EnsureIndent();
#if !PORTABLE && !NETFX_CORE
System.Diagnostics.Debug.Write(text);
#endif
}
public static void Write(string format, params object[] args)
{
EnsureIndent();
#if !PORTABLE && !NETFX_CORE
System.Diagnostics.Debug.Write(string.Format(format, args));
#endif
}
public static void WriteLine()
{
System.Diagnostics.Debug.WriteLine("");
_needsIndent = true;
}
public static void WriteLine(object value)
{
EnsureIndent();
System.Diagnostics.Debug.WriteLine(value);
_needsIndent = true;
}
public static void WriteLine(string text)
{
EnsureIndent();
System.Diagnostics.Debug.WriteLine(text);
_needsIndent = true;
}
public static void WriteLine(string format, params object[] args)
{
EnsureIndent();
System.Diagnostics.Debug.WriteLine(string.Format(format, args));
_needsIndent = true;
}
}
}

View File

@@ -1,97 +0,0 @@
namespace SharpCompress.Compressor.LZMA
{
internal abstract class Base
{
public const uint kNumRepDistances = 4;
public const uint kNumStates = 12;
// static byte []kLiteralNextStates = {0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 4, 5};
// static byte []kMatchNextStates = {7, 7, 7, 7, 7, 7, 7, 10, 10, 10, 10, 10};
// static byte []kRepNextStates = {8, 8, 8, 8, 8, 8, 8, 11, 11, 11, 11, 11};
// static byte []kShortRepNextStates = {9, 9, 9, 9, 9, 9, 9, 11, 11, 11, 11, 11};
public struct State
{
public uint Index;
public void Init()
{
Index = 0;
}
public void UpdateChar()
{
if (Index < 4) Index = 0;
else if (Index < 10) Index -= 3;
else Index -= 6;
}
public void UpdateMatch()
{
Index = (uint) (Index < 7 ? 7 : 10);
}
public void UpdateRep()
{
Index = (uint) (Index < 7 ? 8 : 11);
}
public void UpdateShortRep()
{
Index = (uint) (Index < 7 ? 9 : 11);
}
public bool IsCharState()
{
return Index < 7;
}
}
public const int kNumPosSlotBits = 6;
public const int kDicLogSizeMin = 0;
// public const int kDicLogSizeMax = 30;
// public const uint kDistTableSizeMax = kDicLogSizeMax * 2;
public const int kNumLenToPosStatesBits = 2; // it's for speed optimization
public const uint kNumLenToPosStates = 1 << kNumLenToPosStatesBits;
public const uint kMatchMinLen = 2;
public static uint GetLenToPosState(uint len)
{
len -= kMatchMinLen;
if (len < kNumLenToPosStates)
return len;
return (uint) (kNumLenToPosStates - 1);
}
public const int kNumAlignBits = 4;
public const uint kAlignTableSize = 1 << kNumAlignBits;
public const uint kAlignMask = (kAlignTableSize - 1);
public const uint kStartPosModelIndex = 4;
public const uint kEndPosModelIndex = 14;
public const uint kNumPosModels = kEndPosModelIndex - kStartPosModelIndex;
public const uint kNumFullDistances = 1 << ((int) kEndPosModelIndex/2);
public const uint kNumLitPosStatesBitsEncodingMax = 4;
public const uint kNumLitContextBitsMax = 8;
public const int kNumPosStatesBitsMax = 4;
public const uint kNumPosStatesMax = (1 << kNumPosStatesBitsMax);
public const int kNumPosStatesBitsEncodingMax = 4;
public const uint kNumPosStatesEncodingMax = (1 << kNumPosStatesBitsEncodingMax);
public const int kNumLowLenBits = 3;
public const int kNumMidLenBits = 3;
public const int kNumHighLenBits = 8;
public const uint kNumLowLenSymbols = 1 << kNumLowLenBits;
public const uint kNumMidLenSymbols = 1 << kNumMidLenBits;
public const uint kNumLenSymbols = kNumLowLenSymbols + kNumMidLenSymbols +
(1 << kNumHighLenBits);
public const uint kMatchMaxLen = kMatchMinLen + kNumLenSymbols - 1;
}
}

View File

@@ -1,418 +0,0 @@
using System;
using SharpCompress.Compressor.LZMA.RangeCoder;
namespace SharpCompress.Compressor.LZMA
{
internal class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Stream
{
private class LenDecoder
{
private BitDecoder m_Choice = new BitDecoder();
private BitDecoder m_Choice2 = new BitDecoder();
private BitTreeDecoder[] m_LowCoder = new BitTreeDecoder[Base.kNumPosStatesMax];
private BitTreeDecoder[] m_MidCoder = new BitTreeDecoder[Base.kNumPosStatesMax];
private BitTreeDecoder m_HighCoder = new BitTreeDecoder(Base.kNumHighLenBits);
private uint m_NumPosStates = 0;
public void Create(uint numPosStates)
{
for (uint posState = m_NumPosStates; posState < numPosStates; posState++)
{
m_LowCoder[posState] = new BitTreeDecoder(Base.kNumLowLenBits);
m_MidCoder[posState] = new BitTreeDecoder(Base.kNumMidLenBits);
}
m_NumPosStates = numPosStates;
}
public void Init()
{
m_Choice.Init();
for (uint posState = 0; posState < m_NumPosStates; posState++)
{
m_LowCoder[posState].Init();
m_MidCoder[posState].Init();
}
m_Choice2.Init();
m_HighCoder.Init();
}
public uint Decode(RangeCoder.Decoder rangeDecoder, uint posState)
{
if (m_Choice.Decode(rangeDecoder) == 0)
return m_LowCoder[posState].Decode(rangeDecoder);
else
{
uint symbol = Base.kNumLowLenSymbols;
if (m_Choice2.Decode(rangeDecoder) == 0)
symbol += m_MidCoder[posState].Decode(rangeDecoder);
else
{
symbol += Base.kNumMidLenSymbols;
symbol += m_HighCoder.Decode(rangeDecoder);
}
return symbol;
}
}
}
private class LiteralDecoder
{
private struct Decoder2
{
private BitDecoder[] m_Decoders;
public void Create()
{
m_Decoders = new BitDecoder[0x300];
}
public void Init()
{
for (int i = 0; i < 0x300; i++) m_Decoders[i].Init();
}
public byte DecodeNormal(RangeCoder.Decoder rangeDecoder)
{
uint symbol = 1;
do
symbol = (symbol << 1) | m_Decoders[symbol].Decode(rangeDecoder); while (symbol < 0x100);
return (byte) symbol;
}
public byte DecodeWithMatchByte(RangeCoder.Decoder rangeDecoder, byte matchByte)
{
uint symbol = 1;
do
{
uint matchBit = (uint) (matchByte >> 7) & 1;
matchByte <<= 1;
uint bit = m_Decoders[((1 + matchBit) << 8) + symbol].Decode(rangeDecoder);
symbol = (symbol << 1) | bit;
if (matchBit != bit)
{
while (symbol < 0x100)
symbol = (symbol << 1) | m_Decoders[symbol].Decode(rangeDecoder);
break;
}
} while (symbol < 0x100);
return (byte) symbol;
}
}
private Decoder2[] m_Coders;
private int m_NumPrevBits;
private int m_NumPosBits;
private uint m_PosMask;
public void Create(int numPosBits, int numPrevBits)
{
if (m_Coders != null && m_NumPrevBits == numPrevBits &&
m_NumPosBits == numPosBits)
return;
m_NumPosBits = numPosBits;
m_PosMask = ((uint) 1 << numPosBits) - 1;
m_NumPrevBits = numPrevBits;
uint numStates = (uint) 1 << (m_NumPrevBits + m_NumPosBits);
m_Coders = new Decoder2[numStates];
for (uint i = 0; i < numStates; i++)
m_Coders[i].Create();
}
public void Init()
{
uint numStates = (uint) 1 << (m_NumPrevBits + m_NumPosBits);
for (uint i = 0; i < numStates; i++)
m_Coders[i].Init();
}
private uint GetState(uint pos, byte prevByte)
{
return ((pos & m_PosMask) << m_NumPrevBits) + (uint) (prevByte >> (8 - m_NumPrevBits));
}
public byte DecodeNormal(RangeCoder.Decoder rangeDecoder, uint pos, byte prevByte)
{
return m_Coders[GetState(pos, prevByte)].DecodeNormal(rangeDecoder);
}
public byte DecodeWithMatchByte(RangeCoder.Decoder rangeDecoder, uint pos, byte prevByte, byte matchByte)
{
return m_Coders[GetState(pos, prevByte)].DecodeWithMatchByte(rangeDecoder, matchByte);
}
};
private LZ.OutWindow m_OutWindow;
private BitDecoder[] m_IsMatchDecoders = new BitDecoder[Base.kNumStates << Base.kNumPosStatesBitsMax];
private BitDecoder[] m_IsRepDecoders = new BitDecoder[Base.kNumStates];
private BitDecoder[] m_IsRepG0Decoders = new BitDecoder[Base.kNumStates];
private BitDecoder[] m_IsRepG1Decoders = new BitDecoder[Base.kNumStates];
private BitDecoder[] m_IsRepG2Decoders = new BitDecoder[Base.kNumStates];
private BitDecoder[] m_IsRep0LongDecoders = new BitDecoder[Base.kNumStates << Base.kNumPosStatesBitsMax];
private BitTreeDecoder[] m_PosSlotDecoder = new BitTreeDecoder[Base.kNumLenToPosStates];
private BitDecoder[] m_PosDecoders = new BitDecoder[Base.kNumFullDistances - Base.kEndPosModelIndex];
private BitTreeDecoder m_PosAlignDecoder = new BitTreeDecoder(Base.kNumAlignBits);
private LenDecoder m_LenDecoder = new LenDecoder();
private LenDecoder m_RepLenDecoder = new LenDecoder();
private LiteralDecoder m_LiteralDecoder = new LiteralDecoder();
private int m_DictionarySize;
private uint m_PosStateMask;
private Base.State state = new Base.State();
private uint rep0, rep1, rep2, rep3;
public Decoder()
{
m_DictionarySize = -1;
for (int i = 0; i < Base.kNumLenToPosStates; i++)
m_PosSlotDecoder[i] = new BitTreeDecoder(Base.kNumPosSlotBits);
}
private void CreateDictionary()
{
if (m_DictionarySize < 0)
throw new InvalidParamException();
m_OutWindow = new LZ.OutWindow();
int blockSize = Math.Max(m_DictionarySize, (1 << 12));
m_OutWindow.Create(blockSize);
}
private void SetLiteralProperties(int lp, int lc)
{
if (lp > 8)
throw new InvalidParamException();
if (lc > 8)
throw new InvalidParamException();
m_LiteralDecoder.Create(lp, lc);
}
private void SetPosBitsProperties(int pb)
{
if (pb > Base.kNumPosStatesBitsMax)
throw new InvalidParamException();
uint numPosStates = (uint) 1 << pb;
m_LenDecoder.Create(numPosStates);
m_RepLenDecoder.Create(numPosStates);
m_PosStateMask = numPosStates - 1;
}
private void Init()
{
uint i;
for (i = 0; i < Base.kNumStates; i++)
{
for (uint j = 0; j <= m_PosStateMask; j++)
{
uint index = (i << Base.kNumPosStatesBitsMax) + j;
m_IsMatchDecoders[index].Init();
m_IsRep0LongDecoders[index].Init();
}
m_IsRepDecoders[i].Init();
m_IsRepG0Decoders[i].Init();
m_IsRepG1Decoders[i].Init();
m_IsRepG2Decoders[i].Init();
}
m_LiteralDecoder.Init();
for (i = 0; i < Base.kNumLenToPosStates; i++)
m_PosSlotDecoder[i].Init();
// m_PosSpecDecoder.Init();
for (i = 0; i < Base.kNumFullDistances - Base.kEndPosModelIndex; i++)
m_PosDecoders[i].Init();
m_LenDecoder.Init();
m_RepLenDecoder.Init();
m_PosAlignDecoder.Init();
state.Init();
rep0 = 0;
rep1 = 0;
rep2 = 0;
rep3 = 0;
}
public void Code(System.IO.Stream inStream, System.IO.Stream outStream,
Int64 inSize, Int64 outSize, ICodeProgress progress)
{
if (m_OutWindow == null)
CreateDictionary();
m_OutWindow.Init(outStream);
if (outSize > 0)
m_OutWindow.SetLimit(outSize);
else
m_OutWindow.SetLimit(Int64.MaxValue - m_OutWindow.Total);
RangeCoder.Decoder rangeDecoder = new RangeCoder.Decoder();
rangeDecoder.Init(inStream);
Code(m_DictionarySize, m_OutWindow, rangeDecoder);
m_OutWindow.ReleaseStream();
rangeDecoder.ReleaseStream();
if (!rangeDecoder.IsFinished || (inSize > 0 && rangeDecoder.Total != inSize))
throw new DataErrorException();
if (m_OutWindow.HasPending)
throw new DataErrorException();
m_OutWindow = null;
}
internal bool Code(int dictionarySize, LZ.OutWindow outWindow, RangeCoder.Decoder rangeDecoder)
{
int dictionarySizeCheck = Math.Max(dictionarySize, 1);
outWindow.CopyPending();
while (outWindow.HasSpace)
{
uint posState = (uint) outWindow.Total & m_PosStateMask;
if (m_IsMatchDecoders[(state.Index << Base.kNumPosStatesBitsMax) + posState].Decode(rangeDecoder) == 0)
{
byte b;
byte prevByte = outWindow.GetByte(0);
if (!state.IsCharState())
b = m_LiteralDecoder.DecodeWithMatchByte(rangeDecoder,
(uint) outWindow.Total, prevByte,
outWindow.GetByte((int) rep0));
else
b = m_LiteralDecoder.DecodeNormal(rangeDecoder, (uint) outWindow.Total, prevByte);
outWindow.PutByte(b);
state.UpdateChar();
}
else
{
uint len;
if (m_IsRepDecoders[state.Index].Decode(rangeDecoder) == 1)
{
if (m_IsRepG0Decoders[state.Index].Decode(rangeDecoder) == 0)
{
if (
m_IsRep0LongDecoders[(state.Index << Base.kNumPosStatesBitsMax) + posState].Decode(
rangeDecoder) == 0)
{
state.UpdateShortRep();
outWindow.PutByte(outWindow.GetByte((int) rep0));
continue;
}
}
else
{
UInt32 distance;
if (m_IsRepG1Decoders[state.Index].Decode(rangeDecoder) == 0)
{
distance = rep1;
}
else
{
if (m_IsRepG2Decoders[state.Index].Decode(rangeDecoder) == 0)
distance = rep2;
else
{
distance = rep3;
rep3 = rep2;
}
rep2 = rep1;
}
rep1 = rep0;
rep0 = distance;
}
len = m_RepLenDecoder.Decode(rangeDecoder, posState) + Base.kMatchMinLen;
state.UpdateRep();
}
else
{
rep3 = rep2;
rep2 = rep1;
rep1 = rep0;
len = Base.kMatchMinLen + m_LenDecoder.Decode(rangeDecoder, posState);
state.UpdateMatch();
uint posSlot = m_PosSlotDecoder[Base.GetLenToPosState(len)].Decode(rangeDecoder);
if (posSlot >= Base.kStartPosModelIndex)
{
int numDirectBits = (int) ((posSlot >> 1) - 1);
rep0 = ((2 | (posSlot & 1)) << numDirectBits);
if (posSlot < Base.kEndPosModelIndex)
rep0 += BitTreeDecoder.ReverseDecode(m_PosDecoders,
rep0 - posSlot - 1, rangeDecoder, numDirectBits);
else
{
rep0 += (rangeDecoder.DecodeDirectBits(
numDirectBits - Base.kNumAlignBits) << Base.kNumAlignBits);
rep0 += m_PosAlignDecoder.ReverseDecode(rangeDecoder);
}
}
else
rep0 = posSlot;
}
if (rep0 >= outWindow.Total || rep0 >= dictionarySizeCheck)
{
if (rep0 == 0xFFFFFFFF)
return true;
throw new DataErrorException();
}
outWindow.CopyBlock((int) rep0, (int) len);
}
}
return false;
}
public void SetDecoderProperties(byte[] properties)
{
if (properties.Length < 1)
throw new InvalidParamException();
int lc = properties[0]%9;
int remainder = properties[0]/9;
int lp = remainder%5;
int pb = remainder/5;
if (pb > Base.kNumPosStatesBitsMax)
throw new InvalidParamException();
SetLiteralProperties(lp, lc);
SetPosBitsProperties(pb);
Init();
if (properties.Length >= 5)
{
m_DictionarySize = 0;
for (int i = 0; i < 4; i++)
m_DictionarySize += properties[1 + i] << (i*8);
}
}
public void Train(System.IO.Stream stream)
{
if (m_OutWindow == null)
CreateDictionary();
m_OutWindow.Train(stream);
}
/*
public override bool CanRead { get { return true; }}
public override bool CanWrite { get { return true; }}
public override bool CanSeek { get { return true; }}
public override long Length { get { return 0; }}
public override long Position
{
get { return 0; }
set { }
}
public override void Flush() { }
public override int Read(byte[] buffer, int offset, int count)
{
return 0;
}
public override void Write(byte[] buffer, int offset, int count)
{
}
public override long Seek(long offset, System.IO.SeekOrigin origin)
{
return 0;
}
public override void SetLength(long value) {}
*/
}
}

View File

@@ -1,55 +0,0 @@
namespace SharpCompress.Compressor.LZMA
{
public class LzmaEncoderProperties
{
internal CoderPropID[] propIDs;
internal object[] properties;
public LzmaEncoderProperties()
: this(false)
{
}
public LzmaEncoderProperties(bool eos)
: this(eos, 1 << 20)
{
}
public LzmaEncoderProperties(bool eos, int dictionary)
: this(eos, dictionary, 32)
{
}
public LzmaEncoderProperties(bool eos, int dictionary, int numFastBytes)
{
int posStateBits = 2;
int litContextBits = 3;
int litPosBits = 0;
int algorithm = 2;
string mf = "bt4";
propIDs = new CoderPropID[]
{
CoderPropID.DictionarySize,
CoderPropID.PosStateBits,
CoderPropID.LitContextBits,
CoderPropID.LitPosBits,
CoderPropID.Algorithm,
CoderPropID.NumFastBytes,
CoderPropID.MatchFinder,
CoderPropID.EndMarker
};
properties = new object[]
{
dictionary,
posStateBits,
litContextBits,
litPosBits,
algorithm,
numFastBytes,
mf,
eos
};
}
}
}

View File

@@ -1,308 +0,0 @@
using System;
using System.IO;
using SharpCompress.Compressor.LZMA.LZ;
namespace SharpCompress.Compressor.LZMA
{
public class LzmaStream : Stream
{
private Stream inputStream;
private long inputSize;
private long outputSize;
private int dictionarySize;
private OutWindow outWindow = new OutWindow();
private RangeCoder.Decoder rangeDecoder = new RangeCoder.Decoder();
private Decoder decoder;
private long position = 0;
private bool endReached = false;
private long availableBytes;
private long rangeDecoderLimit;
private long inputPosition = 0;
// LZMA2
private bool isLZMA2;
private bool uncompressedChunk = false;
private bool needDictReset = true;
private bool needProps = true;
private byte[] props = new byte[5];
private Encoder encoder;
private bool isDisposed;
public LzmaStream(byte[] properties, Stream inputStream)
: this(properties, inputStream, -1, -1, null, properties.Length < 5)
{
}
public LzmaStream(byte[] properties, Stream inputStream, long inputSize)
: this(properties, inputStream, inputSize, -1, null, properties.Length < 5)
{
}
public LzmaStream(byte[] properties, Stream inputStream, long inputSize, long outputSize)
: this(properties, inputStream, inputSize, outputSize, null, properties.Length < 5)
{
}
public LzmaStream(byte[] properties, Stream inputStream, long inputSize, long outputSize,
Stream presetDictionary, bool isLZMA2)
{
this.inputStream = inputStream;
this.inputSize = inputSize;
this.outputSize = outputSize;
this.isLZMA2 = isLZMA2;
if (!isLZMA2)
{
dictionarySize = BitConverter.ToInt32(properties, 1);
outWindow.Create(dictionarySize);
if (presetDictionary != null)
outWindow.Train(presetDictionary);
rangeDecoder.Init(inputStream);
decoder = new Decoder();
decoder.SetDecoderProperties(properties);
props = properties;
availableBytes = outputSize < 0 ? long.MaxValue : outputSize;
rangeDecoderLimit = inputSize;
}
else
{
dictionarySize = 2 | (properties[0] & 1);
dictionarySize <<= (properties[0] >> 1) + 11;
outWindow.Create(dictionarySize);
if (presetDictionary != null)
{
outWindow.Train(presetDictionary);
needDictReset = false;
}
props = new byte[1];
availableBytes = 0;
}
}
public LzmaStream(LzmaEncoderProperties properties, bool isLZMA2, Stream outputStream)
: this(properties, isLZMA2, null, outputStream)
{
}
public LzmaStream(LzmaEncoderProperties properties, bool isLZMA2, Stream presetDictionary, Stream outputStream)
{
this.isLZMA2 = isLZMA2;
availableBytes = 0;
endReached = true;
if (isLZMA2)
throw new NotImplementedException();
encoder = new Encoder();
encoder.SetCoderProperties(properties.propIDs, properties.properties);
MemoryStream propStream = new MemoryStream(5);
encoder.WriteCoderProperties(propStream);
props = propStream.ToArray();
encoder.SetStreams(null, outputStream, -1, -1);
if (presetDictionary != null)
encoder.Train(presetDictionary);
}
public override bool CanRead
{
get { return encoder == null; }
}
public override bool CanSeek
{
get { return false; }
}
public override bool CanWrite
{
get { return encoder != null; }
}
public override void Flush()
{
}
protected override void Dispose(bool disposing)
{
if (isDisposed)
{
return;
}
isDisposed = true;
if (disposing)
{
if (encoder != null)
{
position = encoder.Code(null, true);
}
if (inputStream != null)
{
inputStream.Dispose();
}
}
base.Dispose(disposing);
}
public override long Length
{
get { return position + availableBytes; }
}
public override long Position
{
get { return position; }
set { throw new NotSupportedException(); }
}
public override int Read(byte[] buffer, int offset, int count)
{
if (endReached)
return 0;
int total = 0;
while (total < count)
{
if (availableBytes == 0)
{
if (isLZMA2)
decodeChunkHeader();
else
endReached = true;
if (endReached)
break;
}
int toProcess = count - total;
if (toProcess > availableBytes)
toProcess = (int)availableBytes;
outWindow.SetLimit(toProcess);
if (uncompressedChunk)
{
inputPosition += outWindow.CopyStream(inputStream, toProcess);
}
else if (decoder.Code(dictionarySize, outWindow, rangeDecoder)
&& outputSize < 0)
{
availableBytes = outWindow.AvailableBytes;
}
int read = outWindow.Read(buffer, offset, toProcess);
total += read;
offset += read;
position += read;
availableBytes -= read;
if (availableBytes == 0 && !uncompressedChunk)
{
rangeDecoder.ReleaseStream();
if (!rangeDecoder.IsFinished || (rangeDecoderLimit >= 0 && rangeDecoder.Total != rangeDecoderLimit))
throw new DataErrorException();
inputPosition += rangeDecoder.Total;
if (outWindow.HasPending)
throw new DataErrorException();
}
}
if (endReached)
{
if (inputSize >= 0 && inputPosition != inputSize)
throw new DataErrorException();
if (outputSize >= 0 && position != outputSize)
throw new DataErrorException();
}
return total;
}
private void decodeChunkHeader()
{
int control = inputStream.ReadByte();
inputPosition++;
if (control == 0x00)
{
endReached = true;
return;
}
if (control >= 0xE0 || control == 0x01)
{
needProps = true;
needDictReset = false;
outWindow.Reset();
}
else if (needDictReset)
throw new DataErrorException();
if (control >= 0x80)
{
uncompressedChunk = false;
availableBytes = (control & 0x1F) << 16;
availableBytes += (inputStream.ReadByte() << 8) + inputStream.ReadByte() + 1;
inputPosition += 2;
rangeDecoderLimit = (inputStream.ReadByte() << 8) + inputStream.ReadByte() + 1;
inputPosition += 2;
if (control >= 0xC0)
{
needProps = false;
props[0] = (byte)inputStream.ReadByte();
inputPosition++;
decoder = new Decoder();
decoder.SetDecoderProperties(props);
}
else if (needProps)
throw new DataErrorException();
else if (control >= 0xA0)
{
decoder = new Decoder();
decoder.SetDecoderProperties(props);
}
rangeDecoder.Init(inputStream);
}
else if (control > 0x02)
throw new DataErrorException();
else
{
uncompressedChunk = true;
availableBytes = (inputStream.ReadByte() << 8) + inputStream.ReadByte() + 1;
inputPosition += 2;
}
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();
}
public override void SetLength(long value)
{
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
if (encoder != null)
position = encoder.Code(new MemoryStream(buffer, offset, count), false);
}
public byte[] Properties
{
get { return props; }
}
}
}

View File

@@ -1,133 +0,0 @@
using System;
namespace SharpCompress.Compressor.LZMA.RangeCoder
{
internal struct BitEncoder
{
public const int kNumBitModelTotalBits = 11;
public const uint kBitModelTotal = (1 << kNumBitModelTotalBits);
private const int kNumMoveBits = 5;
private const int kNumMoveReducingBits = 2;
public const int kNumBitPriceShiftBits = 6;
private uint Prob;
public void Init()
{
Prob = kBitModelTotal >> 1;
}
public void UpdateModel(uint symbol)
{
if (symbol == 0)
Prob += (kBitModelTotal - Prob) >> kNumMoveBits;
else
Prob -= (Prob) >> kNumMoveBits;
}
public void Encode(Encoder encoder, uint symbol)
{
// encoder.EncodeBit(Prob, kNumBitModelTotalBits, symbol);
// UpdateModel(symbol);
uint newBound = (encoder.Range >> kNumBitModelTotalBits)*Prob;
if (symbol == 0)
{
encoder.Range = newBound;
Prob += (kBitModelTotal - Prob) >> kNumMoveBits;
}
else
{
encoder.Low += newBound;
encoder.Range -= newBound;
Prob -= (Prob) >> kNumMoveBits;
}
if (encoder.Range < Encoder.kTopValue)
{
encoder.Range <<= 8;
encoder.ShiftLow();
}
}
private static UInt32[] ProbPrices = new UInt32[kBitModelTotal >> kNumMoveReducingBits];
static BitEncoder()
{
const int kNumBits = (kNumBitModelTotalBits - kNumMoveReducingBits);
for (int i = kNumBits - 1; i >= 0; i--)
{
UInt32 start = (UInt32) 1 << (kNumBits - i - 1);
UInt32 end = (UInt32) 1 << (kNumBits - i);
for (UInt32 j = start; j < end; j++)
ProbPrices[j] = ((UInt32) i << kNumBitPriceShiftBits) +
(((end - j) << kNumBitPriceShiftBits) >> (kNumBits - i - 1));
}
}
public uint GetPrice(uint symbol)
{
return ProbPrices[(((Prob - symbol) ^ ((-(int) symbol))) & (kBitModelTotal - 1)) >> kNumMoveReducingBits];
}
public uint GetPrice0()
{
return ProbPrices[Prob >> kNumMoveReducingBits];
}
public uint GetPrice1()
{
return ProbPrices[(kBitModelTotal - Prob) >> kNumMoveReducingBits];
}
}
internal struct BitDecoder
{
public const int kNumBitModelTotalBits = 11;
public const uint kBitModelTotal = (1 << kNumBitModelTotalBits);
private const int kNumMoveBits = 5;
private uint Prob;
public void UpdateModel(int numMoveBits, uint symbol)
{
if (symbol == 0)
Prob += (kBitModelTotal - Prob) >> numMoveBits;
else
Prob -= (Prob) >> numMoveBits;
}
public void Init()
{
Prob = kBitModelTotal >> 1;
}
public uint Decode(RangeCoder.Decoder rangeDecoder)
{
uint newBound = (uint) (rangeDecoder.Range >> kNumBitModelTotalBits)*(uint) Prob;
if (rangeDecoder.Code < newBound)
{
rangeDecoder.Range = newBound;
Prob += (kBitModelTotal - Prob) >> kNumMoveBits;
if (rangeDecoder.Range < Decoder.kTopValue)
{
rangeDecoder.Code = (rangeDecoder.Code << 8) | (byte) rangeDecoder.Stream.ReadByte();
rangeDecoder.Range <<= 8;
rangeDecoder.Total++;
}
return 0;
}
else
{
rangeDecoder.Range -= newBound;
rangeDecoder.Code -= newBound;
Prob -= (Prob) >> kNumMoveBits;
if (rangeDecoder.Range < Decoder.kTopValue)
{
rangeDecoder.Code = (rangeDecoder.Code << 8) | (byte) rangeDecoder.Stream.ReadByte();
rangeDecoder.Range <<= 8;
rangeDecoder.Total++;
}
return 1;
}
}
}
}

View File

@@ -1,209 +0,0 @@
using System;
using System.IO;
namespace SharpCompress.Compressor.LZMA.Utilites
{
internal class CrcBuilderStream : Stream
{
private long mProcessed;
private Stream mTarget;
private uint mCRC;
private bool mFinished;
private bool isDisposed;
public CrcBuilderStream(Stream target)
{
mTarget = target;
mCRC = CRC.kInitCRC;
}
protected override void Dispose(bool disposing)
{
if (isDisposed)
{
return;
}
isDisposed = true;
mTarget.Dispose();
base.Dispose(disposing);
}
public long Processed
{
get { return mProcessed; }
}
public uint Finish()
{
if (!mFinished)
{
mFinished = true;
mCRC = CRC.Finish(mCRC);
}
return mCRC;
}
public override bool CanRead
{
get { return false; }
}
public override bool CanSeek
{
get { return false; }
}
public override bool CanWrite
{
get { return true; }
}
public override void Flush()
{
}
public override long Length
{
get { throw new NotSupportedException(); }
}
public override long Position
{
get { throw new NotSupportedException(); }
set { throw new NotSupportedException(); }
}
public override int Read(byte[] buffer, int offset, int count)
{
throw new InvalidOperationException();
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();
}
public override void SetLength(long value)
{
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
if (mFinished)
throw new InvalidOperationException("CRC calculation has been finished.");
mProcessed += count;
mCRC = CRC.Update(mCRC, buffer, offset, count);
mTarget.Write(buffer, offset, count);
}
}
internal class ReadingCrcBuilderStream : Stream
{
private long mProcessed;
private Stream mSource;
private uint mCRC;
private bool mFinished;
public ReadingCrcBuilderStream(Stream source)
{
mSource = source;
mCRC = CRC.kInitCRC;
}
protected override void Dispose(bool disposing)
{
try
{
if (disposing)
mSource.Dispose();
}
finally
{
base.Dispose(disposing);
}
}
public long Processed
{
get { return mProcessed; }
}
public uint Finish()
{
if (!mFinished)
{
mFinished = true;
mCRC = CRC.Finish(mCRC);
}
return mCRC;
}
public override bool CanRead
{
get { return mSource.CanRead; }
}
public override bool CanSeek
{
get { return false; }
}
public override bool CanWrite
{
get { return false; }
}
public override void Flush()
{
throw new NotSupportedException();
}
public override long Length
{
get { throw new NotSupportedException(); }
}
public override long Position
{
get { throw new NotSupportedException(); }
set { throw new NotSupportedException(); }
}
public override int Read(byte[] buffer, int offset, int count)
{
if (count > 0 && !mFinished)
{
int read = mSource.Read(buffer, offset, count);
if (read > 0)
{
mProcessed += read;
mCRC = CRC.Update(mCRC, buffer, offset, read);
return read;
}
Finish();
}
return 0;
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();
}
public override void SetLength(long value)
{
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException();
}
}
}

View File

@@ -1,114 +0,0 @@
using System;
using System.IO;
namespace SharpCompress.Compressor.LZMA.Utilites
{
internal class CrcCheckStream : Stream
{
private readonly uint mExpectedCRC;
private uint mCurrentCRC;
private bool mClosed;
private long[] mBytes = new long[256];
private long mLength;
public CrcCheckStream(uint crc)
{
mExpectedCRC = crc;
mCurrentCRC = CRC.kInitCRC;
}
protected override void Dispose(bool disposing)
{
if (mCurrentCRC != mExpectedCRC)
throw new InvalidOperationException();
try
{
if (disposing && !mClosed)
{
mClosed = true;
mCurrentCRC = CRC.Finish(mCurrentCRC);
#if DEBUG
if (mCurrentCRC == mExpectedCRC)
System.Diagnostics.Debug.WriteLine("CRC ok: " + mExpectedCRC.ToString("x8"));
else
{
System.Diagnostics.Debugger.Break();
System.Diagnostics.Debug.WriteLine("bad CRC");
}
double lengthInv = 1.0/mLength;
double entropy = 0;
for (int i = 0; i < 256; i++)
{
if (mBytes[i] != 0)
{
double p = lengthInv*mBytes[i];
entropy -= p*Math.Log(p, 256);
}
}
System.Diagnostics.Debug.WriteLine("entropy: " + (int) (entropy*100) + "%");
#endif
}
}
finally
{
base.Dispose(disposing);
}
}
public override bool CanRead
{
get { return false; }
}
public override bool CanSeek
{
get { return false; }
}
public override bool CanWrite
{
get { return true; }
}
public override void Flush()
{
}
public override long Length
{
get { throw new NotSupportedException(); }
}
public override long Position
{
get { throw new NotSupportedException(); }
set { throw new NotSupportedException(); }
}
public override int Read(byte[] buffer, int offset, int count)
{
throw new InvalidOperationException();
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();
}
public override void SetLength(long value)
{
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
mLength += count;
for (int i = 0; i < count; i++)
mBytes[buffer[offset + i]]++;
mCurrentCRC = CRC.Update(mCurrentCRC, buffer, offset, count);
}
}
}

View File

@@ -1,928 +0,0 @@
using System.Text;
using System.IO;
using SharpCompress.Compressor.Rar;
namespace SharpCompress.Compressor.PPMd.H
{
internal class ModelPPM
{
private void InitBlock()
{
for (int i = 0; i < 25; i++)
{
SEE2Cont[i] = new SEE2Context[16];
}
for (int i2 = 0; i2 < 128; i2++)
{
binSumm[i2] = new int[64];
}
}
public SubAllocator SubAlloc
{
get { return subAlloc; }
}
public virtual SEE2Context DummySEE2Cont
{
get { return dummySEE2Cont; }
}
public virtual int InitRL
{
get { return initRL; }
}
public virtual int EscCount
{
get { return escCount; }
set { this.escCount = value & 0xff; }
}
public virtual int[] CharMask
{
get { return charMask; }
}
public virtual int NumMasked
{
get { return numMasked; }
set { this.numMasked = value; }
}
public virtual int PrevSuccess
{
get { return prevSuccess; }
set { this.prevSuccess = value & 0xff; }
}
public virtual int InitEsc
{
get { return initEsc; }
set { this.initEsc = value; }
}
public virtual int RunLength
{
get { return runLength; }
set { this.runLength = value; }
}
public virtual int HiBitsFlag
{
get { return hiBitsFlag; }
set { this.hiBitsFlag = value & 0xff; }
}
public virtual int[][] BinSumm
{
get { return binSumm; }
}
internal RangeCoder Coder
{
get { return coder; }
}
internal State FoundState
{
get { return foundState; }
}
public virtual byte[] Heap
{
get { return subAlloc.Heap; }
}
public virtual int OrderFall
{
get { return orderFall; }
}
public const int MAX_O = 64; /* maximum allowed model order */
public const int INT_BITS = 7;
public const int PERIOD_BITS = 7;
//UPGRADE_NOTE: Final was removed from the declaration of 'TOT_BITS '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
public static readonly int TOT_BITS = INT_BITS + PERIOD_BITS;
//UPGRADE_NOTE: Final was removed from the declaration of 'INTERVAL '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
public static readonly int INTERVAL = 1 << INT_BITS;
//UPGRADE_NOTE: Final was removed from the declaration of 'BIN_SCALE '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
public static readonly int BIN_SCALE = 1 << TOT_BITS;
public const int MAX_FREQ = 124;
private SEE2Context[][] SEE2Cont = new SEE2Context[25][];
private SEE2Context dummySEE2Cont;
private PPMContext minContext; //medContext
private PPMContext maxContext;
private State foundState; // found next state transition
private int numMasked, initEsc, orderFall, maxOrder, runLength, initRL;
private int[] charMask = new int[256];
private int[] NS2Indx = new int[256];
private int[] NS2BSIndx = new int[256];
private int[] HB2Flag = new int[256];
// byte EscCount, PrevSuccess, HiBitsFlag;
private int escCount, prevSuccess, hiBitsFlag;
private int[][] binSumm = new int[128][]; // binary SEE-contexts
private RangeCoder coder;
private SubAllocator subAlloc = new SubAllocator();
private static int[] InitBinEsc = new int[] {0x3CDD, 0x1F3F, 0x59BF, 0x48F3, 0x64A1, 0x5ABC, 0x6632, 0x6051};
// Temp fields
//UPGRADE_NOTE: Final was removed from the declaration of 'tempState1 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private State tempState1 = new State(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'tempState2 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private State tempState2 = new State(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'tempState3 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private State tempState3 = new State(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'tempState4 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private State tempState4 = new State(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'tempStateRef1 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private StateRef tempStateRef1 = new StateRef();
//UPGRADE_NOTE: Final was removed from the declaration of 'tempStateRef2 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private StateRef tempStateRef2 = new StateRef();
//UPGRADE_NOTE: Final was removed from the declaration of 'tempPPMContext1 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private PPMContext tempPPMContext1 = new PPMContext(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'tempPPMContext2 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private PPMContext tempPPMContext2 = new PPMContext(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'tempPPMContext3 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private PPMContext tempPPMContext3 = new PPMContext(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'tempPPMContext4 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private PPMContext tempPPMContext4 = new PPMContext(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'ps '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private int[] ps = new int[MAX_O];
public ModelPPM()
{
InitBlock();
minContext = null;
maxContext = null;
//medContext = null;
}
private void restartModelRare()
{
Utility.Fill(charMask, 0);
subAlloc.initSubAllocator();
initRL = -(maxOrder < 12 ? maxOrder : 12) - 1;
int addr = subAlloc.allocContext();
minContext.Address = addr;
maxContext.Address = addr;
minContext.setSuffix(0);
orderFall = maxOrder;
minContext.NumStats = 256;
minContext.FreqData.SummFreq = minContext.NumStats + 1;
addr = subAlloc.allocUnits(256/2);
foundState.Address = addr;
minContext.FreqData.SetStats(addr);
State state = new State(subAlloc.Heap);
addr = minContext.FreqData.GetStats();
runLength = initRL;
prevSuccess = 0;
for (int i = 0; i < 256; i++)
{
state.Address = addr + i*State.Size;
state.Symbol = i;
state.Freq = 1;
state.SetSuccessor(0);
}
for (int i = 0; i < 128; i++)
{
for (int k = 0; k < 8; k++)
{
for (int m = 0; m < 64; m += 8)
{
binSumm[i][k + m] = BIN_SCALE - InitBinEsc[k]/(i + 2);
}
}
}
for (int i = 0; i < 25; i++)
{
for (int k = 0; k < 16; k++)
{
SEE2Cont[i][k].Initialize(5*i + 10);
}
}
}
private void startModelRare(int MaxOrder)
{
int i, k, m, Step;
escCount = 1;
this.maxOrder = MaxOrder;
restartModelRare();
// Bug Fixed
NS2BSIndx[0] = 0;
NS2BSIndx[1] = 2;
for (int j = 0; j < 9; j++)
{
NS2BSIndx[2 + j] = 4;
}
for (int j = 0; j < 256 - 11; j++)
{
NS2BSIndx[11 + j] = 6;
}
for (i = 0; i < 3; i++)
{
NS2Indx[i] = i;
}
for (m = i, k = 1, Step = 1; i < 256; i++)
{
NS2Indx[i] = m;
if ((--k) == 0)
{
k = ++Step;
m++;
}
}
for (int j = 0; j < 0x40; j++)
{
HB2Flag[j] = 0;
}
for (int j = 0; j < 0x100 - 0x40; j++)
{
HB2Flag[0x40 + j] = 0x08;
}
dummySEE2Cont.Shift = PERIOD_BITS;
}
private void clearMask()
{
escCount = 1;
Utility.Fill(charMask, 0);
}
internal bool decodeInit(Unpack unpackRead, int escChar)
{
int MaxOrder = unpackRead.Char & 0xff;
bool reset = ((MaxOrder & 0x20) != 0);
int MaxMB = 0;
if (reset)
{
MaxMB = unpackRead.Char;
}
else
{
if (subAlloc.GetAllocatedMemory() == 0)
{
return (false);
}
}
if ((MaxOrder & 0x40) != 0)
{
escChar = unpackRead.Char;
unpackRead.PpmEscChar = escChar;
}
coder = new RangeCoder(unpackRead);
if (reset)
{
MaxOrder = (MaxOrder & 0x1f) + 1;
if (MaxOrder > 16)
{
MaxOrder = 16 + (MaxOrder - 16)*3;
}
if (MaxOrder == 1)
{
subAlloc.stopSubAllocator();
return (false);
}
subAlloc.startSubAllocator((MaxMB + 1) << 20);
minContext = new PPMContext(Heap);
//medContext = new PPMContext(Heap);
maxContext = new PPMContext(Heap);
foundState = new State(Heap);
dummySEE2Cont = new SEE2Context();
for (int i = 0; i < 25; i++)
{
for (int j = 0; j < 16; j++)
{
SEE2Cont[i][j] = new SEE2Context();
}
}
startModelRare(MaxOrder);
}
return (minContext.Address != 0);
}
public virtual int decodeChar()
{
// Debug
//subAlloc.dumpHeap();
if (minContext.Address <= subAlloc.PText || minContext.Address > subAlloc.HeapEnd)
{
return (-1);
}
if (minContext.NumStats != 1)
{
if (minContext.FreqData.GetStats() <= subAlloc.PText ||
minContext.FreqData.GetStats() > subAlloc.HeapEnd)
{
return (-1);
}
if (!minContext.decodeSymbol1(this))
{
return (-1);
}
}
else
{
minContext.decodeBinSymbol(this);
}
coder.Decode();
while (foundState.Address == 0)
{
coder.AriDecNormalize();
do
{
orderFall++;
minContext.Address = minContext.getSuffix(); // =MinContext->Suffix;
if (minContext.Address <= subAlloc.PText || minContext.Address > subAlloc.HeapEnd)
{
return (-1);
}
} while (minContext.NumStats == numMasked);
if (!minContext.decodeSymbol2(this))
{
return (-1);
}
coder.Decode();
}
int Symbol = foundState.Symbol;
if ((orderFall == 0) && foundState.GetSuccessor() > subAlloc.PText)
{
// MinContext=MaxContext=FoundState->Successor;
int addr = foundState.GetSuccessor();
minContext.Address = addr;
maxContext.Address = addr;
}
else
{
updateModel();
//this.foundState.Address=foundState.Address);//TODO just 4 debugging
if (escCount == 0)
{
clearMask();
}
}
coder.AriDecNormalize(); // ARI_DEC_NORMALIZE(Coder.code,Coder.low,Coder.range,Coder.UnpackRead);
return (Symbol);
}
public virtual SEE2Context[][] getSEE2Cont()
{
return SEE2Cont;
}
public virtual void incEscCount(int dEscCount)
{
EscCount = EscCount + dEscCount;
}
public virtual void incRunLength(int dRunLength)
{
RunLength = RunLength + dRunLength;
}
public virtual int[] getHB2Flag()
{
return HB2Flag;
}
public virtual int[] getNS2BSIndx()
{
return NS2BSIndx;
}
public virtual int[] getNS2Indx()
{
return NS2Indx;
}
private int createSuccessors(bool Skip, State p1)
{
//State upState = tempState1.Initialize(null);
StateRef upState = tempStateRef2;
State tempState = tempState1.Initialize(Heap);
// PPM_CONTEXT* pc=MinContext, * UpBranch=FoundState->Successor;
PPMContext pc = tempPPMContext1.Initialize(Heap);
pc.Address = minContext.Address;
PPMContext upBranch = tempPPMContext2.Initialize(Heap);
upBranch.Address = foundState.GetSuccessor();
// STATE * p, * ps[MAX_O], ** pps=ps;
State p = tempState2.Initialize(Heap);
int pps = 0;
bool noLoop = false;
if (!Skip)
{
ps[pps++] = foundState.Address; // *pps++ = FoundState;
if (pc.getSuffix() == 0)
{
noLoop = true;
}
}
if (!noLoop)
{
bool loopEntry = false;
if (p1.Address != 0)
{
p.Address = p1.Address;
pc.Address = pc.getSuffix(); // =pc->Suffix;
loopEntry = true;
}
do
{
if (!loopEntry)
{
pc.Address = pc.getSuffix(); // pc=pc->Suffix;
if (pc.NumStats != 1)
{
p.Address = pc.FreqData.GetStats(); // p=pc->U.Stats
if (p.Symbol != foundState.Symbol)
{
do
{
p.IncrementAddress();
} while (p.Symbol != foundState.Symbol);
}
}
else
{
p.Address = pc.getOneState().Address; // p=&(pc->OneState);
}
} // LOOP_ENTRY:
loopEntry = false;
if (p.GetSuccessor() != upBranch.Address)
{
pc.Address = p.GetSuccessor(); // =p->Successor;
break;
}
ps[pps++] = p.Address;
} while (pc.getSuffix() != 0);
} // NO_LOOP:
if (pps == 0)
{
return pc.Address;
}
upState.Symbol = Heap[upBranch.Address]; // UpState.Symbol=*(byte*)
// UpBranch;
// UpState.Successor=(PPM_CONTEXT*) (((byte*) UpBranch)+1);
upState.SetSuccessor(upBranch.Address + 1); //TODO check if +1 necessary
if (pc.NumStats != 1)
{
if (pc.Address <= subAlloc.PText)
{
return (0);
}
p.Address = pc.FreqData.GetStats();
if (p.Symbol != upState.Symbol)
{
do
{
p.IncrementAddress();
} while (p.Symbol != upState.Symbol);
}
int cf = p.Freq - 1;
int s0 = pc.FreqData.SummFreq - pc.NumStats - cf;
// UpState.Freq=1+((2*cf <= s0)?(5*cf > s0):((2*cf+3*s0-1)/(2*s0)));
upState.Freq = 1 + ((2*cf <= s0) ? (5*cf > s0 ? 1 : 0) : ((2*cf + 3*s0 - 1)/(2*s0)));
}
else
{
upState.Freq = pc.getOneState().Freq; // UpState.Freq=pc->OneState.Freq;
}
do
{
// pc = pc->createChild(this,*--pps,UpState);
tempState.Address = ps[--pps];
pc.Address = pc.createChild(this, tempState, upState);
if (pc.Address == 0)
{
return 0;
}
} while (pps != 0);
return pc.Address;
}
private void updateModelRestart()
{
restartModelRare();
escCount = 0;
}
private void updateModel()
{
//System.out.println("ModelPPM.updateModel()");
// STATE fs = *FoundState, *p = NULL;
StateRef fs = tempStateRef1;
fs.Values = foundState;
State p = tempState3.Initialize(Heap);
State tempState = tempState4.Initialize(Heap);
PPMContext pc = tempPPMContext3.Initialize(Heap);
PPMContext successor = tempPPMContext4.Initialize(Heap);
int ns1, ns, cf, sf, s0;
pc.Address = minContext.getSuffix();
if (fs.Freq < MAX_FREQ/4 && pc.Address != 0)
{
if (pc.NumStats != 1)
{
p.Address = pc.FreqData.GetStats();
if (p.Symbol != fs.Symbol)
{
do
{
p.IncrementAddress();
} while (p.Symbol != fs.Symbol);
tempState.Address = p.Address - State.Size;
if (p.Freq >= tempState.Freq)
{
State.PPMDSwap(p, tempState);
p.DecrementAddress();
}
}
if (p.Freq < MAX_FREQ - 9)
{
p.IncrementFreq(2);
pc.FreqData.IncrementSummFreq(2);
}
}
else
{
p.Address = pc.getOneState().Address;
if (p.Freq < 32)
{
p.IncrementFreq(1);
}
}
}
if (orderFall == 0)
{
foundState.SetSuccessor(createSuccessors(true, p));
minContext.Address = foundState.GetSuccessor();
maxContext.Address = foundState.GetSuccessor();
if (minContext.Address == 0)
{
updateModelRestart();
return;
}
return;
}
subAlloc.Heap[subAlloc.PText] = (byte) fs.Symbol;
subAlloc.incPText();
successor.Address = subAlloc.PText;
if (subAlloc.PText >= subAlloc.FakeUnitsStart)
{
updateModelRestart();
return;
}
// // Debug
// subAlloc.dumpHeap();
if (fs.GetSuccessor() != 0)
{
if (fs.GetSuccessor() <= subAlloc.PText)
{
fs.SetSuccessor(createSuccessors(false, p));
if (fs.GetSuccessor() == 0)
{
updateModelRestart();
return;
}
}
if (--orderFall == 0)
{
successor.Address = fs.GetSuccessor();
if (maxContext.Address != minContext.Address)
{
subAlloc.decPText(1);
}
}
}
else
{
foundState.SetSuccessor(successor.Address);
fs.SetSuccessor(minContext);
}
// // Debug
// subAlloc.dumpHeap();
ns = minContext.NumStats;
s0 = minContext.FreqData.SummFreq - (ns) - (fs.Freq - 1);
for (pc.Address = maxContext.Address; pc.Address != minContext.Address; pc.Address = pc.getSuffix())
{
if ((ns1 = pc.NumStats) != 1)
{
if ((ns1 & 1) == 0)
{
//System.out.println(ns1);
pc.FreqData.SetStats(subAlloc.expandUnits(pc.FreqData.GetStats(), Utility.URShift(ns1, 1)));
if (pc.FreqData.GetStats() == 0)
{
updateModelRestart();
return;
}
}
// bug fixed
// int sum = ((2 * ns1 < ns) ? 1 : 0) +
// 2 * ((4 * ((ns1 <= ns) ? 1 : 0)) & ((pc.getFreqData()
// .getSummFreq() <= 8 * ns1) ? 1 : 0));
int sum = ((2*ns1 < ns) ? 1 : 0) +
2*(((4*ns1 <= ns) ? 1 : 0) & ((pc.FreqData.SummFreq <= 8*ns1) ? 1 : 0));
pc.FreqData.IncrementSummFreq(sum);
}
else
{
p.Address = subAlloc.allocUnits(1);
if (p.Address == 0)
{
updateModelRestart();
return;
}
p.SetValues(pc.getOneState());
pc.FreqData.SetStats(p);
if (p.Freq < MAX_FREQ/4 - 1)
{
p.IncrementFreq(p.Freq);
}
else
{
p.Freq = MAX_FREQ - 4;
}
pc.FreqData.SummFreq = (p.Freq + initEsc + (ns > 3 ? 1 : 0));
}
cf = 2*fs.Freq*(pc.FreqData.SummFreq + 6);
sf = s0 + pc.FreqData.SummFreq;
if (cf < 6*sf)
{
cf = 1 + (cf > sf ? 1 : 0) + (cf >= 4*sf ? 1 : 0);
pc.FreqData.IncrementSummFreq(3);
}
else
{
cf = 4 + (cf >= 9*sf ? 1 : 0) + (cf >= 12*sf ? 1 : 0) + (cf >= 15*sf ? 1 : 0);
pc.FreqData.IncrementSummFreq(cf);
}
p.Address = pc.FreqData.GetStats() + ns1*State.Size;
p.SetSuccessor(successor);
p.Symbol = fs.Symbol;
p.Freq = cf;
pc.NumStats = ++ns1;
}
int address = fs.GetSuccessor();
maxContext.Address = address;
minContext.Address = address;
//TODO-----debug
// int pos = minContext.getFreqData().getStats();
// State a = new State(getHeap());
// a.Address=pos);
// pos+=State.size;
// a.Address=pos);
//--dbg end
return;
}
// Debug
public override System.String ToString()
{
StringBuilder buffer = new StringBuilder();
buffer.Append("ModelPPM[");
buffer.Append("\n numMasked=");
buffer.Append(numMasked);
buffer.Append("\n initEsc=");
buffer.Append(initEsc);
buffer.Append("\n orderFall=");
buffer.Append(orderFall);
buffer.Append("\n maxOrder=");
buffer.Append(maxOrder);
buffer.Append("\n runLength=");
buffer.Append(runLength);
buffer.Append("\n initRL=");
buffer.Append(initRL);
buffer.Append("\n escCount=");
buffer.Append(escCount);
buffer.Append("\n prevSuccess=");
buffer.Append(prevSuccess);
buffer.Append("\n foundState=");
buffer.Append(foundState);
buffer.Append("\n coder=");
buffer.Append(coder);
buffer.Append("\n subAlloc=");
buffer.Append(subAlloc);
buffer.Append("\n]");
return buffer.ToString();
}
// Debug
// public void dumpHeap() {
// subAlloc.dumpHeap();
// }
internal bool decodeInit(Stream stream, int maxOrder, int maxMemory)
{
if (stream != null)
coder = new RangeCoder(stream);
if (maxOrder == 1)
{
subAlloc.stopSubAllocator();
return (false);
}
subAlloc.startSubAllocator(maxMemory);
minContext = new PPMContext(Heap);
//medContext = new PPMContext(Heap);
maxContext = new PPMContext(Heap);
foundState = new State(Heap);
dummySEE2Cont = new SEE2Context();
for (int i = 0; i < 25; i++)
{
for (int j = 0; j < 16; j++)
{
SEE2Cont[i][j] = new SEE2Context();
}
}
startModelRare(maxOrder);
return (minContext.Address != 0);
}
internal void nextContext()
{
int addr = foundState.GetSuccessor();
if (orderFall == 0 && addr > subAlloc.PText)
{
minContext.Address = addr;
maxContext.Address = addr;
}
else
updateModel();
}
public int decodeChar(LZMA.RangeCoder.Decoder decoder)
{
if (minContext.NumStats != 1)
{
State s = tempState1.Initialize(Heap);
s.Address = minContext.FreqData.GetStats();
int i;
int count, hiCnt;
if ((count = (int) decoder.GetThreshold((uint) minContext.FreqData.SummFreq)) < (hiCnt = s.Freq))
{
byte symbol;
decoder.Decode(0, (uint) s.Freq);
symbol = (byte) s.Symbol;
minContext.update1_0(this, s.Address);
nextContext();
return symbol;
}
prevSuccess = 0;
i = minContext.NumStats - 1;
do
{
s.IncrementAddress();
if ((hiCnt += s.Freq) > count)
{
byte symbol;
decoder.Decode((uint) (hiCnt - s.Freq), (uint) s.Freq);
symbol = (byte) s.Symbol;
minContext.update1(this, s.Address);
nextContext();
return symbol;
}
} while (--i > 0);
if (count >= minContext.FreqData.SummFreq)
return -2;
hiBitsFlag = HB2Flag[foundState.Symbol];
decoder.Decode((uint) hiCnt, (uint) (minContext.FreqData.SummFreq - hiCnt));
for (i = 0; i < 256; i++)
charMask[i] = -1;
charMask[s.Symbol] = 0;
i = minContext.NumStats - 1;
do
{
s.DecrementAddress();
charMask[s.Symbol] = 0;
} while (--i > 0);
}
else
{
State rs = tempState1.Initialize(Heap);
rs.Address = minContext.getOneState().Address;
hiBitsFlag = getHB2Flag()[foundState.Symbol];
int off1 = rs.Freq - 1;
int off2 = minContext.getArrayIndex(this, rs);
int bs = binSumm[off1][off2];
if (decoder.DecodeBit((uint) bs, 14) == 0)
{
byte symbol;
binSumm[off1][off2] = (bs + INTERVAL - minContext.getMean(bs, PERIOD_BITS, 2)) & 0xFFFF;
foundState.Address = rs.Address;
symbol = (byte) rs.Symbol;
rs.IncrementFreq((rs.Freq < 128) ? 1 : 0);
prevSuccess = 1;
incRunLength(1);
nextContext();
return symbol;
}
bs = (bs - minContext.getMean(bs, PERIOD_BITS, 2)) & 0xFFFF;
binSumm[off1][off2] = bs;
initEsc = PPMContext.ExpEscape[Utility.URShift(bs, 10)];
int i;
for (i = 0; i < 256; i++)
charMask[i] = -1;
charMask[rs.Symbol] = 0;
prevSuccess = 0;
}
for (;;)
{
State s = tempState1.Initialize(Heap);
int i;
int freqSum, count, hiCnt;
SEE2Context see;
int num, numMasked = minContext.NumStats;
do
{
orderFall++;
minContext.Address = minContext.getSuffix();
if (minContext.Address <= subAlloc.PText || minContext.Address > subAlloc.HeapEnd)
return -1;
} while (minContext.NumStats == numMasked);
hiCnt = 0;
s.Address = minContext.FreqData.GetStats();
i = 0;
num = minContext.NumStats - numMasked;
do
{
int k = charMask[s.Symbol];
hiCnt += s.Freq & k;
minContext.ps[i] = s.Address;
s.IncrementAddress();
i -= k;
} while (i != num);
see = minContext.makeEscFreq(this, numMasked, out freqSum);
freqSum += hiCnt;
count = (int) decoder.GetThreshold((uint) freqSum);
if (count < hiCnt)
{
byte symbol;
State ps = tempState2.Initialize(Heap);
for (hiCnt = 0, i = 0, ps.Address = minContext.ps[i];
(hiCnt += ps.Freq) <= count;
i++, ps.Address = minContext.ps[i]) ;
s.Address = ps.Address;
decoder.Decode((uint) (hiCnt - s.Freq), (uint) s.Freq);
see.update();
symbol = (byte) s.Symbol;
minContext.update2(this, s.Address);
updateModel();
return symbol;
}
if (count >= freqSum)
return -2;
decoder.Decode((uint) hiCnt, (uint) (freqSum - hiCnt));
see.Summ = see.Summ + freqSum;
do
{
s.Address = minContext.ps[--i];
charMask[s.Symbol] = 0;
} while (i != 0);
}
}
}
}

View File

@@ -1,166 +0,0 @@
using System.Text;
using System.IO;
using SharpCompress.Compressor.Rar;
namespace SharpCompress.Compressor.PPMd.H
{
internal class RangeCoder
{
internal const int TOP = 1 << 24;
internal const int BOT = 1 << 15;
internal const long UintMask = 0xFFFFffffL;
// uint low, code, range;
private long low, code, range;
private Unpack unpackRead;
private Stream stream;
internal RangeCoder(Unpack unpackRead)
{
this.unpackRead = unpackRead;
Init();
}
internal RangeCoder(Stream stream)
{
this.stream = stream;
Init();
}
private void Init()
{
this.SubRange = new SubRange();
low = code = 0L;
range = 0xFFFFffffL;
for (int i = 0; i < 4; i++)
{
code = ((code << 8) | Char) & UintMask;
}
}
internal int CurrentCount
{
get
{
range = (range/SubRange.Scale) & UintMask;
return (int) ((code - low)/(range));
}
}
private long Char
{
get
{
if (unpackRead != null)
return (unpackRead.Char);
if (stream != null)
return stream.ReadByte();
return -1;
}
}
internal SubRange SubRange { get; private set; }
internal long GetCurrentShiftCount(int SHIFT)
{
range = Utility.URShift(range, SHIFT);
return ((code - low)/(range)) & UintMask;
}
internal void Decode()
{
low = (low + (range*SubRange.LowCount)) & UintMask;
range = (range*(SubRange.HighCount - SubRange.LowCount)) & UintMask;
}
internal void AriDecNormalize()
{
// while ((low ^ (low + range)) < TOP || range < BOT && ((range = -low & (BOT - 1)) != 0 ? true : true))
// {
// code = ((code << 8) | unpackRead.getChar()&0xff)&uintMask;
// range = (range << 8)&uintMask;
// low = (low << 8)&uintMask;
// }
// Rewrote for clarity
bool c2 = false;
while ((low ^ (low + range)) < TOP || (c2 = range < BOT))
{
if (c2)
{
range = (-low & (BOT - 1)) & UintMask;
c2 = false;
}
code = ((code << 8) | Char) & UintMask;
range = (range << 8) & UintMask;
low = (low << 8) & UintMask;
}
}
// Debug
public override System.String ToString()
{
StringBuilder buffer = new StringBuilder();
buffer.Append("RangeCoder[");
buffer.Append("\n low=");
buffer.Append(low);
buffer.Append("\n code=");
buffer.Append(code);
buffer.Append("\n range=");
buffer.Append(range);
buffer.Append("\n subrange=");
buffer.Append(SubRange);
buffer.Append("]");
return buffer.ToString();
}
}
internal class SubRange
{
// uint LowCount, HighCount, scale;
private long lowCount, highCount, scale;
internal void incScale(int dScale)
{
Scale = Scale + dScale;
}
internal long HighCount
{
get { return highCount; }
set { this.highCount = value & RangeCoder.UintMask; }
}
internal long LowCount
{
get { return lowCount & RangeCoder.UintMask; }
set { this.lowCount = value & RangeCoder.UintMask; }
}
internal long Scale
{
get { return scale; }
set { this.scale = value & RangeCoder.UintMask; }
}
// Debug
public override System.String ToString()
{
StringBuilder buffer = new StringBuilder();
buffer.Append("SubRange[");
buffer.Append("\n lowCount=");
buffer.Append(lowCount);
buffer.Append("\n highCount=");
buffer.Append(highCount);
buffer.Append("\n scale=");
buffer.Append(scale);
buffer.Append("]");
return buffer.ToString();
}
}
}

View File

@@ -1,89 +0,0 @@
using System.Text;
namespace SharpCompress.Compressor.PPMd.H
{
internal class SEE2Context
{
public virtual int Mean
{
get
{
int retVal = Utility.URShift(summ, shift);
summ -= retVal;
return retVal + ((retVal == 0) ? 1 : 0);
}
}
public virtual int Count
{
get { return count; }
set { this.count = value & 0xff; }
}
public virtual int Shift
{
get { return shift; }
set { this.shift = value & 0xff; }
}
public virtual int Summ
{
get { return summ; }
set { this.summ = value & 0xffff; }
}
public const int size = 4;
// ushort Summ;
private int summ;
// byte Shift;
private int shift;
// byte Count;
private int count;
public void Initialize(int initVal)
{
shift = (ModelPPM.PERIOD_BITS - 4) & 0xff;
summ = (initVal << shift) & 0xffff;
count = 4;
}
public virtual void update()
{
if (shift < ModelPPM.PERIOD_BITS && --count == 0)
{
summ += summ;
count = (3 << shift++);
}
summ &= 0xffff;
count &= 0xff;
shift &= 0xff;
}
public virtual void incSumm(int dSumm)
{
Summ = Summ + dSumm;
}
public override System.String ToString()
{
StringBuilder buffer = new StringBuilder();
buffer.Append("SEE2Context[");
buffer.Append("\n size=");
buffer.Append(size);
buffer.Append("\n summ=");
buffer.Append(summ);
buffer.Append("\n shift=");
buffer.Append(shift);
buffer.Append("\n count=");
buffer.Append(count);
buffer.Append("\n]");
return buffer.ToString();
}
}
}

View File

@@ -1,466 +0,0 @@
using System;
using System.Text;
namespace SharpCompress.Compressor.PPMd.H
{
internal class SubAllocator
{
public virtual int FakeUnitsStart
{
get { return fakeUnitsStart; }
set { this.fakeUnitsStart = value; }
}
public virtual int HeapEnd
{
get { return heapEnd; }
}
public virtual int PText
{
get { return pText; }
set { pText = value; }
}
public virtual int UnitsStart
{
get { return unitsStart; }
set { this.unitsStart = value; }
}
public virtual byte[] Heap
{
get { return heap; }
}
//UPGRADE_NOTE: Final was removed from the declaration of 'N4 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
public const int N1 = 4;
public const int N2 = 4;
public const int N3 = 4;
public static readonly int N4 = (128 + 3 - 1*N1 - 2*N2 - 3*N3)/4;
//UPGRADE_NOTE: Final was removed from the declaration of 'N_INDEXES '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
public static readonly int N_INDEXES = N1 + N2 + N3 + N4;
//UPGRADE_NOTE: Final was removed from the declaration of 'UNIT_SIZE '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
//UPGRADE_NOTE: The initialization of 'UNIT_SIZE' was moved to static method 'SharpCompress.Unpack.PPM.SubAllocator'. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1005'"
public static readonly int UNIT_SIZE;
public const int FIXED_UNIT_SIZE = 12;
private int subAllocatorSize;
// byte Indx2Units[N_INDEXES], Units2Indx[128], GlueCount;
private int[] indx2Units = new int[N_INDEXES];
private int[] units2Indx = new int[128];
private int glueCount;
// byte *HeapStart,*LoUnit, *HiUnit;
private int heapStart, loUnit, hiUnit;
//UPGRADE_NOTE: Final was removed from the declaration of 'freeList '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private RarNode[] freeList = new RarNode[N_INDEXES];
// byte *pText, *UnitsStart,*HeapEnd,*FakeUnitsStart;
private int pText, unitsStart, heapEnd, fakeUnitsStart;
private byte[] heap;
private int freeListPos;
private int tempMemBlockPos;
// Temp fields
private RarNode tempRarNode = null;
private RarMemBlock tempRarMemBlock1 = null;
private RarMemBlock tempRarMemBlock2 = null;
private RarMemBlock tempRarMemBlock3 = null;
public SubAllocator()
{
clean();
}
public virtual void clean()
{
subAllocatorSize = 0;
}
private void insertNode(int p, int indx)
{
RarNode temp = tempRarNode;
temp.Address = p;
temp.SetNext(freeList[indx].GetNext());
freeList[indx].SetNext(temp);
}
public virtual void incPText()
{
pText++;
}
private int removeNode(int indx)
{
int retVal = freeList[indx].GetNext();
RarNode temp = tempRarNode;
temp.Address = retVal;
freeList[indx].SetNext(temp.GetNext());
return retVal;
}
private int U2B(int NU)
{
return UNIT_SIZE*NU;
}
/* memblockptr */
private int MBPtr(int BasePtr, int Items)
{
return (BasePtr + U2B(Items));
}
private void splitBlock(int pv, int oldIndx, int newIndx)
{
int i, uDiff = indx2Units[oldIndx] - indx2Units[newIndx];
int p = pv + U2B(indx2Units[newIndx]);
if (indx2Units[i = units2Indx[uDiff - 1]] != uDiff)
{
insertNode(p, --i);
p += U2B(i = indx2Units[i]);
uDiff -= i;
}
insertNode(p, units2Indx[uDiff - 1]);
}
public virtual void stopSubAllocator()
{
if (subAllocatorSize != 0)
{
subAllocatorSize = 0;
//ArrayFactory.BYTES_FACTORY.recycle(heap);
heap = null;
heapStart = 1;
// rarfree(HeapStart);
// Free temp fields
tempRarNode = null;
tempRarMemBlock1 = null;
tempRarMemBlock2 = null;
tempRarMemBlock3 = null;
}
}
public virtual int GetAllocatedMemory()
{
return subAllocatorSize;
}
public virtual bool startSubAllocator(int SASize)
{
int t = SASize;
if (subAllocatorSize == t)
{
return true;
}
stopSubAllocator();
int allocSize = t/FIXED_UNIT_SIZE*UNIT_SIZE + UNIT_SIZE;
// adding space for freelist (needed for poiters)
// 1+ for null pointer
int realAllocSize = 1 + allocSize + 4*N_INDEXES;
// adding space for an additional memblock
tempMemBlockPos = realAllocSize;
realAllocSize += RarMemBlock.size;
heap = new byte[realAllocSize];
heapStart = 1;
heapEnd = heapStart + allocSize - UNIT_SIZE;
subAllocatorSize = t;
// Bug fixed
freeListPos = heapStart + allocSize;
//UPGRADE_ISSUE: The following fragment of code could not be parsed and was not converted. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1156'"
//assert(realAllocSize - tempMemBlockPos == RarMemBlock.size): realAllocSize
//+ + tempMemBlockPos + + RarMemBlock.size;
// Init freeList
for (int i = 0, pos = freeListPos; i < freeList.Length; i++, pos += RarNode.size)
{
freeList[i] = new RarNode(heap);
freeList[i].Address = pos;
}
// Init temp fields
tempRarNode = new RarNode(heap);
tempRarMemBlock1 = new RarMemBlock(heap);
tempRarMemBlock2 = new RarMemBlock(heap);
tempRarMemBlock3 = new RarMemBlock(heap);
return true;
}
private void glueFreeBlocks()
{
RarMemBlock s0 = tempRarMemBlock1;
s0.Address = tempMemBlockPos;
RarMemBlock p = tempRarMemBlock2;
RarMemBlock p1 = tempRarMemBlock3;
int i, k, sz;
if (loUnit != hiUnit)
{
heap[loUnit] = 0;
}
for (i = 0, s0.SetPrev(s0), s0.SetNext(s0); i < N_INDEXES; i++)
{
while (freeList[i].GetNext() != 0)
{
p.Address = removeNode(i); // =(RAR_MEM_BLK*)RemoveNode(i);
p.InsertAt(s0); // p->insertAt(&s0);
p.Stamp = 0xFFFF; // p->Stamp=0xFFFF;
p.SetNU(indx2Units[i]); // p->NU=Indx2Units[i];
}
}
for (p.Address = s0.GetNext(); p.Address != s0.Address; p.Address = p.GetNext())
{
// while ((p1=MBPtr(p,p->NU))->Stamp == 0xFFFF && int(p->NU)+p1->NU
// < 0x10000)
// Bug fixed
p1.Address = MBPtr(p.Address, p.GetNU());
while (p1.Stamp == 0xFFFF && p.GetNU() + p1.GetNU() < 0x10000)
{
p1.Remove();
p.SetNU(p.GetNU() + p1.GetNU()); // ->NU += p1->NU;
p1.Address = MBPtr(p.Address, p.GetNU());
}
}
// while ((p=s0.next) != &s0)
// Bug fixed
p.Address = s0.GetNext();
while (p.Address != s0.Address)
{
for (p.Remove(), sz = p.GetNU(); sz > 128; sz -= 128, p.Address = MBPtr(p.Address, 128))
{
insertNode(p.Address, N_INDEXES - 1);
}
if (indx2Units[i = units2Indx[sz - 1]] != sz)
{
k = sz - indx2Units[--i];
insertNode(MBPtr(p.Address, sz - k), k - 1);
}
insertNode(p.Address, i);
p.Address = s0.GetNext();
}
}
private int allocUnitsRare(int indx)
{
if (glueCount == 0)
{
glueCount = 255;
glueFreeBlocks();
if (freeList[indx].GetNext() != 0)
{
return removeNode(indx);
}
}
int i = indx;
do
{
if (++i == N_INDEXES)
{
glueCount--;
i = U2B(indx2Units[indx]);
int j = FIXED_UNIT_SIZE*indx2Units[indx];
if (fakeUnitsStart - pText > j)
{
fakeUnitsStart -= j;
unitsStart -= i;
return unitsStart;
}
return (0);
}
} while (freeList[i].GetNext() == 0);
int retVal = removeNode(i);
splitBlock(retVal, i, indx);
return retVal;
}
public virtual int allocUnits(int NU)
{
int indx = units2Indx[NU - 1];
if (freeList[indx].GetNext() != 0)
{
return removeNode(indx);
}
int retVal = loUnit;
loUnit += U2B(indx2Units[indx]);
if (loUnit <= hiUnit)
{
return retVal;
}
loUnit -= U2B(indx2Units[indx]);
return allocUnitsRare(indx);
}
public virtual int allocContext()
{
if (hiUnit != loUnit)
return (hiUnit -= UNIT_SIZE);
if (freeList[0].GetNext() != 0)
{
return removeNode(0);
}
return allocUnitsRare(0);
}
public virtual int expandUnits(int oldPtr, int OldNU)
{
int i0 = units2Indx[OldNU - 1];
int i1 = units2Indx[OldNU - 1 + 1];
if (i0 == i1)
{
return oldPtr;
}
int ptr = allocUnits(OldNU + 1);
if (ptr != 0)
{
// memcpy(ptr,OldPtr,U2B(OldNU));
Array.Copy(heap, oldPtr, heap, ptr, U2B(OldNU));
insertNode(oldPtr, i0);
}
return ptr;
}
public virtual int shrinkUnits(int oldPtr, int oldNU, int newNU)
{
// System.out.println("SubAllocator.shrinkUnits(" + OldPtr + ", " +
// OldNU + ", " + NewNU + ")");
int i0 = units2Indx[oldNU - 1];
int i1 = units2Indx[newNU - 1];
if (i0 == i1)
{
return oldPtr;
}
if (freeList[i1].GetNext() != 0)
{
int ptr = removeNode(i1);
// memcpy(ptr,OldPtr,U2B(NewNU));
// for (int i = 0; i < U2B(NewNU); i++) {
// heap[ptr + i] = heap[OldPtr + i];
// }
Array.Copy(heap, oldPtr, heap, ptr, U2B(newNU));
insertNode(oldPtr, i0);
return ptr;
}
else
{
splitBlock(oldPtr, i0, i1);
return oldPtr;
}
}
public virtual void freeUnits(int ptr, int OldNU)
{
insertNode(ptr, units2Indx[OldNU - 1]);
}
public virtual void decPText(int dPText)
{
PText = PText - dPText;
}
public virtual void initSubAllocator()
{
int i, k;
Utility.Fill(heap, freeListPos, freeListPos + sizeOfFreeList(), (byte) 0);
pText = heapStart;
int size2 = FIXED_UNIT_SIZE*(subAllocatorSize/8/FIXED_UNIT_SIZE*7);
int realSize2 = size2/FIXED_UNIT_SIZE*UNIT_SIZE;
int size1 = subAllocatorSize - size2;
int realSize1 = size1/FIXED_UNIT_SIZE*UNIT_SIZE + size1%FIXED_UNIT_SIZE;
hiUnit = heapStart + subAllocatorSize;
loUnit = unitsStart = heapStart + realSize1;
fakeUnitsStart = heapStart + size1;
hiUnit = loUnit + realSize2;
for (i = 0, k = 1; i < N1; i++, k += 1)
{
indx2Units[i] = k & 0xff;
}
for (k++; i < N1 + N2; i++, k += 2)
{
indx2Units[i] = k & 0xff;
}
for (k++; i < N1 + N2 + N3; i++, k += 3)
{
indx2Units[i] = k & 0xff;
}
for (k++; i < (N1 + N2 + N3 + N4); i++, k += 4)
{
indx2Units[i] = k & 0xff;
}
for (glueCount = 0, k = 0, i = 0; k < 128; k++)
{
i += ((indx2Units[i] < (k + 1)) ? 1 : 0);
units2Indx[k] = i & 0xff;
}
}
private int sizeOfFreeList()
{
return freeList.Length*RarNode.size;
}
// Debug
// public void dumpHeap() {
// File file = new File("P:\\test\\heapdumpj");
// OutputStream out = null;
// try {
// out = new FileOutputStream(file);
// out.write(heap, heapStart, heapEnd - heapStart);
// out.flush();
// System.out.println("Heap dumped to " + file.getAbsolutePath());
// }
// catch (IOException e) {
// e.printStackTrace();
// }
// finally {
// FileUtil.close(out);
// }
// }
// Debug
public override System.String ToString()
{
StringBuilder buffer = new StringBuilder();
buffer.Append("SubAllocator[");
buffer.Append("\n subAllocatorSize=");
buffer.Append(subAllocatorSize);
buffer.Append("\n glueCount=");
buffer.Append(glueCount);
buffer.Append("\n heapStart=");
buffer.Append(heapStart);
buffer.Append("\n loUnit=");
buffer.Append(loUnit);
buffer.Append("\n hiUnit=");
buffer.Append(hiUnit);
buffer.Append("\n pText=");
buffer.Append(pText);
buffer.Append("\n unitsStart=");
buffer.Append(unitsStart);
buffer.Append("\n]");
return buffer.ToString();
}
static SubAllocator()
{
UNIT_SIZE = System.Math.Max(PPMContext.size, RarMemBlock.size);
}
}
}

View File

@@ -1,103 +0,0 @@
#region Using
using System;
using System.IO;
#endregion
namespace SharpCompress.Compressor.PPMd.I1
{
/// <summary>
/// A simple range coder.
/// </summary>
/// <remarks>
/// Note that in most cases fields are used rather than properties for performance reasons (for example,
/// <see cref="Scale"/> is a field rather than a property).
/// </remarks>
internal class Coder
{
private const uint RangeTop = 1 << 24;
private const uint RangeBottom = 1 << 15;
private uint low;
private uint code;
private uint range;
public uint LowCount;
public uint HighCount;
public uint Scale;
public void RangeEncoderInitialize()
{
low = 0;
range = uint.MaxValue;
}
public void RangeEncoderNormalize(Stream stream)
{
while ((low ^ (low + range)) < RangeTop ||
range < RangeBottom && ((range = (uint) -low & (RangeBottom - 1)) != 0 || true))
{
stream.WriteByte((byte) (low >> 24));
range <<= 8;
low <<= 8;
}
}
public void RangeEncodeSymbol()
{
low += LowCount*(range /= Scale);
range *= HighCount - LowCount;
}
public void RangeShiftEncodeSymbol(int rangeShift)
{
low += LowCount*(range >>= rangeShift);
range *= HighCount - LowCount;
}
public void RangeEncoderFlush(Stream stream)
{
for (uint index = 0; index < 4; index++)
{
stream.WriteByte((byte) (low >> 24));
low <<= 8;
}
}
public void RangeDecoderInitialize(Stream stream)
{
low = 0;
code = 0;
range = uint.MaxValue;
for (uint index = 0; index < 4; index++)
code = (code << 8) | (byte) stream.ReadByte();
}
public void RangeDecoderNormalize(Stream stream)
{
while ((low ^ (low + range)) < RangeTop ||
range < RangeBottom && ((range = (uint) -low & (RangeBottom - 1)) != 0 || true))
{
code = (code << 8) | (byte) stream.ReadByte();
range <<= 8;
low <<= 8;
}
}
public uint RangeGetCurrentCount()
{
return (code - low)/(range /= Scale);
}
public uint RangeGetCurrentShiftCount(int rangeShift)
{
return (code - low)/(range >>= rangeShift);
}
public void RangeRemoveSubrange()
{
low += range*LowCount;
range *= HighCount - LowCount;
}
}
}

View File

@@ -1,832 +0,0 @@
#region Using
using System;
using System.IO;
#endregion
// This is a port of Dmitry Shkarin's PPMd Variant I Revision 1.
// Ported by Michael Bone (mjbone03@yahoo.com.au).
namespace SharpCompress.Compressor.PPMd.I1
{
/// <summary>
/// The model.
/// </summary>
internal partial class Model
{
public const uint Signature = 0x84acaf8fU;
public const char Variant = 'I';
public const int MaximumOrder = 16; // maximum allowed model order
private const byte UpperFrequency = 5;
private const byte IntervalBitCount = 7;
private const byte PeriodBitCount = 7;
private const byte TotalBitCount = IntervalBitCount + PeriodBitCount;
private const uint Interval = 1 << IntervalBitCount;
private const uint BinaryScale = 1 << TotalBitCount;
private const uint MaximumFrequency = 124;
private const uint OrderBound = 9;
private See2Context[,] see2Contexts;
private See2Context emptySee2Context;
private PpmContext maximumContext;
private ushort[,] binarySummary = new ushort[25,64]; // binary SEE-contexts
private byte[] numberStatisticsToBinarySummaryIndex = new byte[256];
private byte[] probabilities = new byte[260];
private byte[] characterMask = new byte[256];
private byte escapeCount;
private int modelOrder;
private int orderFall;
private int initialEscape;
private int initialRunLength;
private int runLength;
private byte previousSuccess;
private byte numberMasked;
private ModelRestorationMethod method;
private PpmState foundState; // found next state transition
private Allocator Allocator;
private Coder Coder;
private PpmContext minimumContext;
private byte numberStatistics;
private PpmState[] decodeStates = new PpmState[256];
private static readonly ushort[] InitialBinaryEscapes =
{
0x3CDD, 0x1F3F, 0x59BF, 0x48F3, 0x64A1, 0x5ABC, 0x6632,
0x6051
};
private static readonly byte[] ExponentialEscapes = {25, 14, 9, 7, 5, 5, 4, 4, 4, 3, 3, 3, 2, 2, 2, 2};
#region Public Methods
public Model()
{
// Construct the conversion table for number statistics. Initially it will contain the following values.
//
// 0 2 4 4 4 4 4 4 4 4 4 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6
// 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6
// 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6
// 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6
// 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6
// 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6
// 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6
// 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6
numberStatisticsToBinarySummaryIndex[0] = 2*0;
numberStatisticsToBinarySummaryIndex[1] = 2*1;
for (int index = 2; index < 11; index++)
numberStatisticsToBinarySummaryIndex[index] = 2*2;
for (int index = 11; index < 256; index++)
numberStatisticsToBinarySummaryIndex[index] = 2*3;
// Construct the probability table. Initially it will contain the following values (depending on the value of
// the upper frequency).
//
// 00 01 02 03 04 05 06 06 07 07 07 08 08 08 08 09 09 09 09 09 10 10 10 10 10 10 11 11 11 11 11 11
// 11 12 12 12 12 12 12 12 12 13 13 13 13 13 13 13 13 13 14 14 14 14 14 14 14 14 14 14 15 15 15 15
// 15 15 15 15 15 15 15 16 16 16 16 16 16 16 16 16 16 16 16 17 17 17 17 17 17 17 17 17 17 17 17 17
// 18 18 18 18 18 18 18 18 18 18 18 18 18 18 19 19 19 19 19 19 19 19 19 19 19 19 19 19 19 20 20 20
// 20 20 20 20 20 20 20 20 20 20 20 20 20 21 21 21 21 21 21 21 21 21 21 21 21 21 21 21 21 21 22 22
// 22 22 22 22 22 22 22 22 22 22 22 22 22 22 22 22 23 23 23 23 23 23 23 23 23 23 23 23 23 23 23 23
// 23 23 23 24 24 24 24 24 24 24 24 24 24 24 24 24 24 24 24 24 24 24 24 25 25 25 25 25 25 25 25 25
// 25 25 25 25 25 25 25 25 25 25 25 25 26 26 26 26 26 26 26 26 26 26 26 26 26 26 26 26 26 26 26 26
// 26 26 27 27
uint count = 1;
uint step = 1;
uint probability = UpperFrequency;
for (int index = 0; index < UpperFrequency; index++)
probabilities[index] = (byte) index;
for (int index = UpperFrequency; index < 260; index++)
{
probabilities[index] = (byte) probability;
count--;
if (count == 0)
{
step++;
count = step;
probability++;
}
}
// Create the context array.
see2Contexts = new See2Context[24,32];
for (int index1 = 0; index1 < 24; index1++)
for (int index2 = 0; index2 < 32; index2++)
see2Contexts[index1, index2] = new See2Context();
// Set the signature (identifying the algorithm).
emptySee2Context = new See2Context();
emptySee2Context.Summary = (ushort) (Signature & 0x0000ffff);
emptySee2Context.Shift = (byte) ((Signature >> 16) & 0x000000ff);
emptySee2Context.Count = (byte) (Signature >> 24);
}
/// <summary>
/// Encode (ie. compress) a given source stream, writing the encoded result to the target stream.
/// </summary>
public void Encode(Stream target, Stream source, PpmdProperties properties)
{
if (target == null)
throw new ArgumentNullException("target");
if (source == null)
throw new ArgumentNullException("source");
EncodeStart(properties);
EncodeBlock(target, source, true);
}
internal Coder EncodeStart(PpmdProperties properties)
{
Allocator = properties.Allocator;
Coder = new Coder();
Coder.RangeEncoderInitialize();
StartModel(properties.ModelOrder, properties.ModelRestorationMethod);
return Coder;
}
internal void EncodeBlock(Stream target, Stream source, bool final)
{
while (true)
{
minimumContext = maximumContext;
numberStatistics = minimumContext.NumberStatistics;
int c = source.ReadByte();
if (c < 0 && !final)
return;
if (numberStatistics != 0)
{
EncodeSymbol1(c, minimumContext);
Coder.RangeEncodeSymbol();
}
else
{
EncodeBinarySymbol(c, minimumContext);
Coder.RangeShiftEncodeSymbol(TotalBitCount);
}
while (foundState == PpmState.Zero)
{
Coder.RangeEncoderNormalize(target);
do
{
orderFall++;
minimumContext = minimumContext.Suffix;
if (minimumContext == PpmContext.Zero)
goto StopEncoding;
} while (minimumContext.NumberStatistics == numberMasked);
EncodeSymbol2(c, minimumContext);
Coder.RangeEncodeSymbol();
}
if (orderFall == 0 && (Pointer) foundState.Successor >= Allocator.BaseUnit)
{
maximumContext = foundState.Successor;
}
else
{
UpdateModel(minimumContext);
if (escapeCount == 0)
ClearMask();
}
Coder.RangeEncoderNormalize(target);
}
StopEncoding:
Coder.RangeEncoderFlush(target);
}
/// <summary>
/// Dencode (ie. decompress) a given source stream, writing the decoded result to the target stream.
/// </summary>
public void Decode(Stream target, Stream source, PpmdProperties properties)
{
if (target == null)
throw new ArgumentNullException("target");
if (source == null)
throw new ArgumentNullException("source");
DecodeStart(source, properties);
byte[] buffer = new byte[65536];
int read;
while ((read = DecodeBlock(source, buffer, 0, buffer.Length)) != 0)
target.Write(buffer, 0, read);
return;
}
internal Coder DecodeStart(Stream source, PpmdProperties properties)
{
Allocator = properties.Allocator;
Coder = new Coder();
Coder.RangeDecoderInitialize(source);
StartModel(properties.ModelOrder, properties.ModelRestorationMethod);
minimumContext = maximumContext;
numberStatistics = minimumContext.NumberStatistics;
return Coder;
}
internal int DecodeBlock(Stream source, byte[] buffer, int offset, int count)
{
if (minimumContext == PpmContext.Zero)
return 0;
int total = 0;
while (total < count)
{
if (numberStatistics != 0)
DecodeSymbol1(minimumContext);
else
DecodeBinarySymbol(minimumContext);
Coder.RangeRemoveSubrange();
while (foundState == PpmState.Zero)
{
Coder.RangeDecoderNormalize(source);
do
{
orderFall++;
minimumContext = minimumContext.Suffix;
if (minimumContext == PpmContext.Zero)
goto StopDecoding;
} while (minimumContext.NumberStatistics == numberMasked);
DecodeSymbol2(minimumContext);
Coder.RangeRemoveSubrange();
}
buffer[offset] = foundState.Symbol;
offset++;
total++;
if (orderFall == 0 && (Pointer) foundState.Successor >= Allocator.BaseUnit)
{
maximumContext = foundState.Successor;
}
else
{
UpdateModel(minimumContext);
if (escapeCount == 0)
ClearMask();
}
minimumContext = maximumContext;
numberStatistics = minimumContext.NumberStatistics;
Coder.RangeDecoderNormalize(source);
}
StopDecoding:
return total;
}
#endregion
#region Private Methods
/// <summary>
/// Initialise the model (unless the model order is set to 1 in which case the model should be cleared so that
/// the statistics are carried over, allowing "solid" mode compression).
/// </summary>
private void StartModel(int modelOrder, ModelRestorationMethod modelRestorationMethod)
{
Array.Clear(characterMask, 0, characterMask.Length);
escapeCount = 1;
// Compress in "solid" mode if the model order value is set to 1 (this will examine the current PPM context
// structures to determine the value of orderFall).
if (modelOrder < 2)
{
orderFall = this.modelOrder;
for (PpmContext context = maximumContext; context.Suffix != PpmContext.Zero; context = context.Suffix)
orderFall--;
return;
}
this.modelOrder = modelOrder;
orderFall = modelOrder;
method = modelRestorationMethod;
Allocator.Initialize();
initialRunLength = -((modelOrder < 12) ? modelOrder : 12) - 1;
runLength = initialRunLength;
// Allocate the context structure.
maximumContext = Allocator.AllocateContext();
maximumContext.Suffix = PpmContext.Zero;
maximumContext.NumberStatistics = 255;
maximumContext.SummaryFrequency = (ushort) (maximumContext.NumberStatistics + 2);
maximumContext.Statistics = Allocator.AllocateUnits(256/2);
// allocates enough space for 256 PPM states (each is 6 bytes)
previousSuccess = 0;
for (int index = 0; index < 256; index++)
{
PpmState state = maximumContext.Statistics[index];
state.Symbol = (byte) index;
state.Frequency = 1;
state.Successor = PpmContext.Zero;
}
uint probability = 0;
for (int index1 = 0; probability < 25; probability++)
{
while (probabilities[index1] == probability)
index1++;
for (int index2 = 0; index2 < 8; index2++)
binarySummary[probability, index2] =
(ushort) (BinaryScale - InitialBinaryEscapes[index2]/(index1 + 1));
for (int index2 = 8; index2 < 64; index2 += 8)
for (int index3 = 0; index3 < 8; index3++)
binarySummary[probability, index2 + index3] = binarySummary[probability, index3];
}
probability = 0;
for (uint index1 = 0; probability < 24; probability++)
{
while (probabilities[index1 + 3] == probability + 3)
index1++;
for (int index2 = 0; index2 < 32; index2++)
see2Contexts[probability, index2].Initialize(2*index1 + 5);
}
}
private void UpdateModel(PpmContext minimumContext)
{
PpmState state = PpmState.Zero;
PpmContext Successor;
PpmContext currentContext = maximumContext;
uint numberStatistics;
uint ns1;
uint cf;
uint sf;
uint s0;
uint foundStateFrequency = foundState.Frequency;
byte foundStateSymbol = foundState.Symbol;
byte symbol;
byte flag;
PpmContext foundStateSuccessor = foundState.Successor;
PpmContext context = minimumContext.Suffix;
if ((foundStateFrequency < MaximumFrequency/4) && (context != PpmContext.Zero))
{
if (context.NumberStatistics != 0)
{
state = context.Statistics;
if (state.Symbol != foundStateSymbol)
{
do
{
symbol = state[1].Symbol;
state++;
} while (symbol != foundStateSymbol);
if (state[0].Frequency >= state[-1].Frequency)
{
Swap(state[0], state[-1]);
state--;
}
}
cf = (uint) ((state.Frequency < MaximumFrequency - 9) ? 2 : 0);
state.Frequency += (byte) cf;
context.SummaryFrequency += (byte) cf;
}
else
{
state = context.FirstState;
state.Frequency += (byte) ((state.Frequency < 32) ? 1 : 0);
}
}
if (orderFall == 0 && foundStateSuccessor != PpmContext.Zero)
{
foundState.Successor = CreateSuccessors(true, state, minimumContext);
if (foundState.Successor == PpmContext.Zero)
goto RestartModel;
maximumContext = foundState.Successor;
return;
}
Allocator.Text[0] = foundStateSymbol;
Allocator.Text++;
Successor = Allocator.Text;
if (Allocator.Text >= Allocator.BaseUnit)
goto RestartModel;
if (foundStateSuccessor != PpmContext.Zero)
{
if (foundStateSuccessor < Allocator.BaseUnit)
foundStateSuccessor = CreateSuccessors(false, state, minimumContext);
}
else
{
foundStateSuccessor = ReduceOrder(state, minimumContext);
}
if (foundStateSuccessor == PpmContext.Zero)
goto RestartModel;
if (--orderFall == 0)
{
Successor = foundStateSuccessor;
Allocator.Text -= (maximumContext != minimumContext) ? 1 : 0;
}
else if (method > ModelRestorationMethod.Freeze)
{
Successor = foundStateSuccessor;
Allocator.Text = Allocator.Heap;
orderFall = 0;
}
numberStatistics = minimumContext.NumberStatistics;
s0 = minimumContext.SummaryFrequency - numberStatistics - foundStateFrequency;
flag = (byte) ((foundStateSymbol >= 0x40) ? 0x08 : 0x00);
for (; currentContext != minimumContext; currentContext = currentContext.Suffix)
{
ns1 = currentContext.NumberStatistics;
if (ns1 != 0)
{
if ((ns1 & 1) != 0)
{
state = Allocator.ExpandUnits(currentContext.Statistics, (ns1 + 1) >> 1);
if (state == PpmState.Zero)
goto RestartModel;
currentContext.Statistics = state;
}
currentContext.SummaryFrequency += (ushort) ((3*ns1 + 1 < numberStatistics) ? 1 : 0);
}
else
{
state = Allocator.AllocateUnits(1);
if (state == PpmState.Zero)
goto RestartModel;
Copy(state, currentContext.FirstState);
currentContext.Statistics = state;
if (state.Frequency < MaximumFrequency/4 - 1)
state.Frequency += state.Frequency;
else
state.Frequency = (byte) (MaximumFrequency - 4);
currentContext.SummaryFrequency =
(ushort) (state.Frequency + initialEscape + ((numberStatistics > 2) ? 1 : 0));
}
cf = (uint) (2*foundStateFrequency*(currentContext.SummaryFrequency + 6));
sf = s0 + currentContext.SummaryFrequency;
if (cf < 6*sf)
{
cf = (uint) (1 + ((cf > sf) ? 1 : 0) + ((cf >= 4*sf) ? 1 : 0));
currentContext.SummaryFrequency += 4;
}
else
{
cf = (uint) (4 + ((cf > 9*sf) ? 1 : 0) + ((cf > 12*sf) ? 1 : 0) + ((cf > 15*sf) ? 1 : 0));
currentContext.SummaryFrequency += (ushort) cf;
}
state = currentContext.Statistics + (++currentContext.NumberStatistics);
state.Successor = Successor;
state.Symbol = foundStateSymbol;
state.Frequency = (byte) cf;
currentContext.Flags |= flag;
}
maximumContext = foundStateSuccessor;
return;
RestartModel:
RestoreModel(currentContext, minimumContext, foundStateSuccessor);
}
private PpmContext CreateSuccessors(bool skip, PpmState state, PpmContext context)
{
PpmContext upBranch = foundState.Successor;
PpmState[] states = new PpmState[MaximumOrder];
uint stateIndex = 0;
byte symbol = foundState.Symbol;
if (!skip)
{
states[stateIndex++] = foundState;
if (context.Suffix == PpmContext.Zero)
goto NoLoop;
}
bool gotoLoopEntry = false;
if (state != PpmState.Zero)
{
context = context.Suffix;
gotoLoopEntry = true;
}
do
{
if (gotoLoopEntry)
{
gotoLoopEntry = false;
goto LoopEntry;
}
context = context.Suffix;
if (context.NumberStatistics != 0)
{
byte temporary;
state = context.Statistics;
if (state.Symbol != symbol)
{
do
{
temporary = state[1].Symbol;
state++;
} while (temporary != symbol);
}
temporary = (byte) ((state.Frequency < MaximumFrequency - 9) ? 1 : 0);
state.Frequency += temporary;
context.SummaryFrequency += temporary;
}
else
{
state = context.FirstState;
state.Frequency +=
(byte) (((context.Suffix.NumberStatistics == 0) ? 1 : 0) & ((state.Frequency < 24) ? 1 : 0));
}
LoopEntry:
if (state.Successor != upBranch)
{
context = state.Successor;
break;
}
states[stateIndex++] = state;
} while (context.Suffix != PpmContext.Zero);
NoLoop:
if (stateIndex == 0)
return context;
byte localNumberStatistics = 0;
byte localFlags = (byte) ((symbol >= 0x40) ? 0x10 : 0x00);
symbol = upBranch.NumberStatistics;
byte localSymbol = symbol;
byte localFrequency;
PpmContext localSuccessor = ((Pointer) upBranch) + 1;
localFlags |= (byte) ((symbol >= 0x40) ? 0x08 : 0x00);
if (context.NumberStatistics != 0)
{
state = context.Statistics;
if (state.Symbol != symbol)
{
byte temporary;
do
{
temporary = state[1].Symbol;
state++;
} while (temporary != symbol);
}
uint cf = (uint) (state.Frequency - 1);
uint s0 = (uint) (context.SummaryFrequency - context.NumberStatistics - cf);
localFrequency = (byte) (1 + ((2*cf <= s0) ? (uint) ((5*cf > s0) ? 1 : 0) : ((cf + 2*s0 - 3)/s0)));
}
else
{
localFrequency = context.FirstStateFrequency;
}
do
{
PpmContext currentContext = Allocator.AllocateContext();
if (currentContext == PpmContext.Zero)
return PpmContext.Zero;
currentContext.NumberStatistics = localNumberStatistics;
currentContext.Flags = localFlags;
currentContext.FirstStateSymbol = localSymbol;
currentContext.FirstStateFrequency = localFrequency;
currentContext.FirstStateSuccessor = localSuccessor;
currentContext.Suffix = context;
context = currentContext;
states[--stateIndex].Successor = context;
} while (stateIndex != 0);
return context;
}
private PpmContext ReduceOrder(PpmState state, PpmContext context)
{
PpmState currentState;
PpmState[] states = new PpmState[MaximumOrder];
uint stateIndex = 0;
PpmContext currentContext = context;
PpmContext UpBranch = Allocator.Text;
byte temporary;
byte symbol = foundState.Symbol;
states[stateIndex++] = foundState;
foundState.Successor = UpBranch;
orderFall++;
bool gotoLoopEntry = false;
if (state != PpmState.Zero)
{
context = context.Suffix;
gotoLoopEntry = true;
}
while (true)
{
if (gotoLoopEntry)
{
gotoLoopEntry = false;
goto LoopEntry;
}
if (context.Suffix == PpmContext.Zero)
{
if (method > ModelRestorationMethod.Freeze)
{
do
{
states[--stateIndex].Successor = context;
} while (stateIndex != 0);
Allocator.Text = Allocator.Heap + 1;
orderFall = 1;
}
return context;
}
context = context.Suffix;
if (context.NumberStatistics != 0)
{
state = context.Statistics;
if (state.Symbol != symbol)
{
do
{
temporary = state[1].Symbol;
state++;
} while (temporary != symbol);
}
temporary = (byte) ((state.Frequency < MaximumFrequency - 9) ? 2 : 0);
state.Frequency += temporary;
context.SummaryFrequency += temporary;
}
else
{
state = context.FirstState;
state.Frequency += (byte) ((state.Frequency < 32) ? 1 : 0);
}
LoopEntry:
if (state.Successor != PpmContext.Zero)
break;
states[stateIndex++] = state;
state.Successor = UpBranch;
orderFall++;
}
if (method > ModelRestorationMethod.Freeze)
{
context = state.Successor;
do
{
states[--stateIndex].Successor = context;
} while (stateIndex != 0);
Allocator.Text = Allocator.Heap + 1;
orderFall = 1;
return context;
}
else if (state.Successor <= UpBranch)
{
currentState = foundState;
foundState = state;
state.Successor = CreateSuccessors(false, PpmState.Zero, context);
foundState = currentState;
}
if (orderFall == 1 && currentContext == maximumContext)
{
foundState.Successor = state.Successor;
Allocator.Text--;
}
return state.Successor;
}
private void RestoreModel(PpmContext context, PpmContext minimumContext, PpmContext foundStateSuccessor)
{
PpmContext currentContext;
Allocator.Text = Allocator.Heap;
for (currentContext = maximumContext; currentContext != context; currentContext = currentContext.Suffix)
{
if (--currentContext.NumberStatistics == 0)
{
currentContext.Flags =
(byte)
((currentContext.Flags & 0x10) + ((currentContext.Statistics.Symbol >= 0x40) ? 0x08 : 0x00));
PpmState state = currentContext.Statistics;
Copy(currentContext.FirstState, state);
Allocator.SpecialFreeUnits(state);
currentContext.FirstStateFrequency = (byte) ((currentContext.FirstStateFrequency + 11) >> 3);
}
else
{
Refresh((uint) ((currentContext.NumberStatistics + 3) >> 1), false, currentContext);
}
}
for (; currentContext != minimumContext; currentContext = currentContext.Suffix)
{
if (currentContext.NumberStatistics == 0)
currentContext.FirstStateFrequency -= (byte) (currentContext.FirstStateFrequency >> 1);
else if ((currentContext.SummaryFrequency += 4) > 128 + 4*currentContext.NumberStatistics)
Refresh((uint) ((currentContext.NumberStatistics + 2) >> 1), true, currentContext);
}
if (method > ModelRestorationMethod.Freeze)
{
maximumContext = foundStateSuccessor;
Allocator.GlueCount += (uint) (((Allocator.MemoryNodes[1].Stamp & 1) == 0) ? 1 : 0);
}
else if (method == ModelRestorationMethod.Freeze)
{
while (maximumContext.Suffix != PpmContext.Zero)
maximumContext = maximumContext.Suffix;
RemoveBinaryContexts(0, maximumContext);
method = (ModelRestorationMethod) (method + 1);
Allocator.GlueCount = 0;
orderFall = modelOrder;
}
else if (method == ModelRestorationMethod.Restart ||
Allocator.GetMemoryUsed() < (Allocator.AllocatorSize >> 1))
{
StartModel(modelOrder, method);
escapeCount = 0;
}
else
{
while (maximumContext.Suffix != PpmContext.Zero)
maximumContext = maximumContext.Suffix;
do
{
CutOff(0, maximumContext);
Allocator.ExpandText();
} while (Allocator.GetMemoryUsed() > 3*(Allocator.AllocatorSize >> 2));
Allocator.GlueCount = 0;
orderFall = modelOrder;
}
}
private static void Swap(PpmState state1, PpmState state2)
{
byte swapSymbol = state1.Symbol;
byte swapFrequency = state1.Frequency;
PpmContext swapSuccessor = state1.Successor;
state1.Symbol = state2.Symbol;
state1.Frequency = state2.Frequency;
state1.Successor = state2.Successor;
state2.Symbol = swapSymbol;
state2.Frequency = swapFrequency;
state2.Successor = swapSuccessor;
}
private static void Copy(PpmState state1, PpmState state2)
{
state1.Symbol = state2.Symbol;
state1.Frequency = state2.Frequency;
state1.Successor = state2.Successor;
}
private static int Mean(int sum, int shift, int round)
{
return (sum + (1 << (shift - round))) >> shift;
}
private void ClearMask()
{
escapeCount = 1;
Array.Clear(characterMask, 0, characterMask.Length);
}
#endregion
}
}

View File

@@ -1,81 +0,0 @@
using System;
namespace SharpCompress.Compressor.PPMd
{
public enum PpmdVersion
{
H,
H7z,
I1,
}
public class PpmdProperties
{
public PpmdVersion Version = PpmdVersion.I1;
public int ModelOrder;
internal I1.ModelRestorationMethod ModelRestorationMethod;
private int allocatorSize;
internal I1.Allocator Allocator;
public PpmdProperties()
: this(16 << 20, 6)
{
}
public PpmdProperties(int allocatorSize, int modelOrder)
: this(allocatorSize, modelOrder, I1.ModelRestorationMethod.Restart)
{
}
internal PpmdProperties(int allocatorSize, int modelOrder, I1.ModelRestorationMethod modelRestorationMethod)
{
AllocatorSize = allocatorSize;
ModelOrder = modelOrder;
ModelRestorationMethod = modelRestorationMethod;
}
public PpmdProperties(byte[] properties)
{
if (properties.Length == 2)
{
ushort props = BitConverter.ToUInt16(properties, 0);
AllocatorSize = (((props >> 4) & 0xff) + 1) << 20;
ModelOrder = (props & 0x0f) + 1;
ModelRestorationMethod = (I1.ModelRestorationMethod) (props >> 12);
}
else if (properties.Length == 5)
{
Version = PpmdVersion.H7z;
AllocatorSize = BitConverter.ToInt32(properties, 1);
ModelOrder = properties[0];
}
}
public int AllocatorSize
{
get { return allocatorSize; }
set
{
allocatorSize = value;
if (Version == PpmdVersion.I1)
{
if (Allocator == null)
Allocator = new I1.Allocator();
Allocator.Start(allocatorSize);
}
}
}
public byte[] Properties
{
get
{
return
BitConverter.GetBytes(
(ushort)
((ModelOrder - 1) + (((AllocatorSize >> 20) - 1) << 4) + ((ushort) ModelRestorationMethod << 12)));
}
}
}
}

View File

@@ -1,142 +0,0 @@
using System;
using System.IO;
using SharpCompress.Compressor.Rar.PPM;
namespace SharpCompress.Compressor.PPMd
{
public class PpmdStream : Stream
{
private PpmdProperties properties;
private Stream stream;
private bool compress;
private I1.Model model;
private H.ModelPPM modelH;
private LZMA.RangeCoder.Decoder decoder;
private long position = 0;
private bool isDisposed;
public PpmdStream(PpmdProperties properties, Stream stream, bool compress)
{
this.properties = properties;
this.stream = stream;
this.compress = compress;
if (properties.Version == PpmdVersion.I1)
{
model = new I1.Model();
if (compress)
model.EncodeStart(properties);
else
model.DecodeStart(stream, properties);
}
if (properties.Version == PpmdVersion.H)
{
modelH = new H.ModelPPM();
if (compress)
throw new NotImplementedException();
else
modelH.decodeInit(stream, properties.ModelOrder, properties.AllocatorSize);
}
if (properties.Version == PpmdVersion.H7z)
{
modelH = new H.ModelPPM();
if (compress)
throw new NotImplementedException();
else
modelH.decodeInit(null, properties.ModelOrder, properties.AllocatorSize);
decoder = new LZMA.RangeCoder.Decoder();
decoder.Init(stream);
}
}
public override bool CanRead
{
get { return !compress; }
}
public override bool CanSeek
{
get { return false; }
}
public override bool CanWrite
{
get { return compress; }
}
public override void Flush()
{
}
protected override void Dispose(bool isDisposing)
{
if (isDisposed)
{
return;
}
isDisposed = true;
if (isDisposing)
{
if (compress)
model.EncodeBlock(stream, new MemoryStream(), true);
}
base.Dispose(isDisposing);
}
public override long Length
{
get { throw new NotSupportedException(); }
}
public override long Position
{
get { return position; }
set { throw new NotSupportedException(); }
}
public override int Read(byte[] buffer, int offset, int count)
{
if (compress)
return 0;
int size = 0;
if (properties.Version == PpmdVersion.I1)
size = model.DecodeBlock(stream, buffer, offset, count);
if (properties.Version == PpmdVersion.H)
{
int c;
while (size < count && (c = modelH.decodeChar()) >= 0)
{
buffer[offset++] = (byte) c;
size++;
}
}
if (properties.Version == PpmdVersion.H7z)
{
int c;
while (size < count && (c = modelH.decodeChar(decoder)) >= 0)
{
buffer[offset++] = (byte) c;
size++;
}
}
position += size;
return size;
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();
}
public override void SetLength(long value)
{
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
if (compress)
model.EncodeBlock(stream, new MemoryStream(buffer, offset, count), false);
}
}
}

View File

@@ -1,26 +0,0 @@
namespace SharpCompress.Compressor.Rar.decode
{
internal class Compress
{
public const int CODEBUFSIZE = 0x4000;
public const int MAXWINSIZE = 0x400000;
//UPGRADE_NOTE: Final was removed from the declaration of 'MAXWINMASK '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
public static readonly int MAXWINMASK = (MAXWINSIZE - 1);
public const int LOW_DIST_REP_COUNT = 16;
public const int NC = 299; /* alphabet = {0, 1, 2, ..., NC - 1} */
public const int DC = 60;
public const int LDC = 17;
public const int RC = 28;
//UPGRADE_NOTE: Final was removed from the declaration of 'HUFF_TABLE_SIZE '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
public static readonly int HUFF_TABLE_SIZE = (NC + DC + RC + LDC);
public const int BC = 20;
public const int NC20 = 298; /* alphabet = {0, 1, 2, ..., NC - 1} */
public const int DC20 = 48;
public const int RC20 = 28;
public const int BC20 = 19;
public const int MC20 = 257;
}
}

View File

@@ -1,16 +0,0 @@
namespace SharpCompress.Compressor.Rar.decode
{
internal enum FilterType
{
FILTER_NONE,
FILTER_PPM,
FILTER_E8,
FILTER_E8E9,
FILTER_UPCASETOLOW,
FILTER_AUDIO,
FILTER_RGB,
FILTER_DELTA,
FILTER_ITANIUM,
FILTER_E8E9V2,
}
}

View File

@@ -1,8 +0,0 @@
namespace SharpCompress.Compressor.Rar.PPM
{
internal enum BlockTypes
{
BLOCK_LZ = 0,
BLOCK_PPM = 1,
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,46 +0,0 @@
namespace SharpCompress.Compressor.Rar.VM
{
internal class VMCmdFlags
{
public const byte VMCF_OP0 = 0;
public const byte VMCF_OP1 = 1;
public const byte VMCF_OP2 = 2;
public const byte VMCF_OPMASK = 3;
public const byte VMCF_BYTEMODE = 4;
public const byte VMCF_JUMP = 8;
public const byte VMCF_PROC = 16;
public const byte VMCF_USEFLAGS = 32;
public const byte VMCF_CHFLAGS = 64;
public static byte[] VM_CmdFlags = new byte[]
{
VMCF_OP2 | VMCF_BYTEMODE, VMCF_OP2 | VMCF_BYTEMODE | VMCF_CHFLAGS,
VMCF_OP2 | VMCF_BYTEMODE | VMCF_CHFLAGS,
VMCF_OP2 | VMCF_BYTEMODE | VMCF_CHFLAGS,
VMCF_OP1 | VMCF_JUMP | VMCF_USEFLAGS,
VMCF_OP1 | VMCF_JUMP | VMCF_USEFLAGS,
VMCF_OP1 | VMCF_BYTEMODE | VMCF_CHFLAGS,
VMCF_OP1 | VMCF_BYTEMODE | VMCF_CHFLAGS, VMCF_OP1 | VMCF_JUMP,
VMCF_OP2 | VMCF_BYTEMODE | VMCF_CHFLAGS,
VMCF_OP2 | VMCF_BYTEMODE | VMCF_CHFLAGS,
VMCF_OP2 | VMCF_BYTEMODE | VMCF_CHFLAGS,
VMCF_OP2 | VMCF_BYTEMODE | VMCF_CHFLAGS,
VMCF_OP1 | VMCF_JUMP | VMCF_USEFLAGS,
VMCF_OP1 | VMCF_JUMP | VMCF_USEFLAGS,
VMCF_OP1 | VMCF_JUMP | VMCF_USEFLAGS,
VMCF_OP1 | VMCF_JUMP | VMCF_USEFLAGS,
VMCF_OP1 | VMCF_JUMP | VMCF_USEFLAGS,
VMCF_OP1 | VMCF_JUMP | VMCF_USEFLAGS, VMCF_OP1, VMCF_OP1,
VMCF_OP1 | VMCF_PROC, VMCF_OP0 | VMCF_PROC, VMCF_OP1 | VMCF_BYTEMODE,
VMCF_OP2 | VMCF_BYTEMODE | VMCF_CHFLAGS,
VMCF_OP2 | VMCF_BYTEMODE | VMCF_CHFLAGS,
VMCF_OP2 | VMCF_BYTEMODE | VMCF_CHFLAGS,
VMCF_OP1 | VMCF_BYTEMODE | VMCF_CHFLAGS, VMCF_OP0, VMCF_OP0,
VMCF_OP0 | VMCF_USEFLAGS, VMCF_OP0 | VMCF_CHFLAGS, VMCF_OP2, VMCF_OP2,
VMCF_OP2 | VMCF_BYTEMODE, VMCF_OP2 | VMCF_BYTEMODE,
VMCF_OP2 | VMCF_BYTEMODE,
VMCF_OP2 | VMCF_BYTEMODE | VMCF_USEFLAGS | VMCF_CHFLAGS,
VMCF_OP2 | VMCF_BYTEMODE | VMCF_USEFLAGS | VMCF_CHFLAGS, VMCF_OP0
};
}
}

View File

@@ -1,18 +0,0 @@
namespace SharpCompress.Compressor.Rar.VM
{
internal class VMStandardFilterSignature
{
internal VMStandardFilterSignature(int length, uint crc, VMStandardFilters type)
{
this.Length = length;
CRC = crc;
this.Type = type;
}
internal int Length { get; private set; }
internal uint CRC { get; private set; }
internal VMStandardFilters Type { get; private set; }
}
}

Some files were not shown because too many files have changed in this diff Show More