Compare commits

...

375 Commits

Author SHA1 Message Date
Adam Hathcock
3009e6dcfd Mark for 0.32.2 2022-07-29 10:45:56 +01:00
Adam Hathcock
70343b17bc add more tests for uncompressed streaming zips 2022-07-29 09:47:35 +01:00
Adam Hathcock
3f6027ec2c Merge pull request #686 from Erior/477
Mitigation of problems
2022-07-29 09:41:24 +01:00
Lars Vahlenberg
5706732c55 Naive implementation of searching of DataDescriptor, not compatible with big archives (>32bit), but handles test cases. 2022-07-28 23:03:06 +02:00
Lars Vahlenberg
ad633a9dd0 missing test file from error report 2022-07-28 21:20:42 +02:00
Lars Vahlenberg
7c56df1237 Mitigation of problems 2022-07-28 20:36:28 +02:00
Adam Hathcock
c1110f2897 Merge pull request #683 from OwnageIsMagic/patch-1
WriteAll: use delegate instead of Expression
2022-07-27 10:13:50 +01:00
Adam Hathcock
647642578b Merge branch 'master' into patch-1 2022-07-27 09:49:13 +01:00
OwnageIsMagic
5ca4efac31 WriteAll: revert 109a7c1 2022-07-26 21:36:00 +03:00
Adam Hathcock
deddf12b70 Merge pull request #684 from daverant/nuget-license
Include license in nuget package
2022-07-26 16:21:41 +01:00
OwnageIsMagic
109a7c12ea WriteAll: update delegate type 2022-07-19 04:03:26 +03:00
David Rant
f955031e27 Hide license in IDE 2022-07-18 17:16:22 +01:00
David Rant
6a69c6cd02 Reference bundled package license file 2022-07-18 17:11:06 +01:00
David Rant
c1d4ac45ab Include license when packing 2022-07-18 17:10:36 +01:00
OwnageIsMagic
2946a35b0e WriteAll: use delegate instead of Expression 2022-07-18 04:36:31 +03:00
Adam Hathcock
c73a8cb18f Merge pull request #682 from adamhathcock/RarFileVolIdx_RarArcVer_GzCrc 2022-07-16 11:29:48 +01:00
Nanook
574a093038 Minor tweak that got missed in the last tidy. 2022-07-15 21:25:39 +01:00
Nanook
4eb1fe0b80 RarArchive has Min/MaxVersion. RarEntry has Volumne Indexes. GZ CRC fix. 2022-07-15 21:15:10 +01:00
Adam Hathcock
4c46cd725b Merge pull request #679 from louis-michelbergeron/master
Fix LZMADecoder Code function
2022-06-28 08:27:13 +01:00
Adam Hathcock
fdbd0e1fba Merge branch 'master' into master 2022-06-28 08:21:49 +01:00
louis-michel
5801168ce0 Merge branch 'master' of https://github.com/louis-michelbergeron/sharpcompress 2022-06-27 19:13:20 -04:00
louis-michel
d4c7551087 Fix LZMA Code function 2022-06-27 19:13:10 -04:00
Adam Hathcock
c9daf0c9f5 Merge pull request #675 from Erior/feature/#636
ReadOnlySubStream overrides and adds logic #636
2022-06-22 11:17:18 +01:00
Adam Hathcock
8cb566b031 Merge branch 'master' into feature/#636 2022-06-22 09:05:57 +01:00
Lars Vahlenberg
089b16326e ReadOnlySubStream overrides and adds logic to Read byte[], needs to have same logic for Span<byte> for consistency. 2022-06-21 19:30:07 +02:00
Adam Hathcock
c0e43cc0e5 Mark for 0.32.1 2022-06-20 10:32:47 +01:00
Adam Hathcock
514c3539e6 Merge pull request #672 from MartinDemberger/Task_477
Corrected skip-marker on skip of uncompressed ZIP file with missing size informations.
2022-06-20 10:31:31 +01:00
Adam Hathcock
62c94a178c Merge branch 'master' into Task_477 2022-06-20 10:26:45 +01:00
Adam Hathcock
9fee38b18d Merge pull request #674 from MartinDemberger/DeduplicateNonDisposing
Suppress nested NonDisposingStream
2022-06-20 10:25:25 +01:00
Adam Hathcock
cd3114d39e Merge branch 'master' into DeduplicateNonDisposing 2022-06-20 10:20:02 +01:00
Adam Hathcock
12b4e15812 Merge pull request #673 from Erior/feature/Malformed-zip-file-generated
Feature/malformed zip file generated
2022-06-20 10:19:41 +01:00
Martin Demberger
35336a0827 Suppress nested NonDisposingStream 2022-06-19 22:05:52 +02:00
Martin Demberger
ece7cbfec3 Set skip-marker when stream is skipped 2022-06-18 14:35:14 +02:00
Lars Vahlenberg
a00075ee0d Wrong flags set, we do not expose this in the interface 2022-06-17 15:07:07 +02:00
Lars Vahlenberg
b6c4e28b4d Generated test case, however, don't see any problems 2022-06-16 23:32:46 +02:00
Martin Demberger
8b55cce39a Better handling of uncompressed zip files. 2022-06-15 16:28:14 +02:00
Adam Hathcock
6e99446ce5 Mark for 0.32 2022-06-13 15:28:54 +01:00
Adam Hathcock
20a09b4866 Drop net5 2022-06-13 15:24:53 +01:00
Adam Hathcock
7f7db5eabd Merge pull request #669 from louis-michelbergeron/master
XZ decoding BCJ filters support
2022-06-13 08:37:28 +01:00
louis-michelbergeron
0651d064fc Update README.md 2022-06-10 15:32:41 -04:00
louis-michelbergeron
73ca7759d3 Update README.md
Contribution line.
2022-06-10 15:32:08 -04:00
louis-michel
0f112d0685 BCJ executable filter (only for decoding), used by XZ. 2022-06-10 13:29:42 -04:00
Adam Hathcock
fa5c91ecf6 Merge pull request #663 from Nanook/master
Align behavour of 7Zip exception with encrypted filenames arc with rar when no password provided
2022-05-04 08:21:39 +01:00
Nanook
3b2fd1b9fa Merge branch 'adamhathcock:master' into master 2022-05-04 01:36:58 +01:00
Craig
e424094fdf 7z encrypted filename exception with no password matches rar behaviour. 2022-05-04 01:35:58 +01:00
Adam Hathcock
bad9ab2c9d Merge pull request #662 from Nanook/master
Properly integrated zip multivolume and general split support.
2022-05-03 08:23:33 +01:00
Craig
61c01ce9b0 Properly integrated zip multivolume and split support. 2022-04-30 19:35:40 +01:00
Adam Hathcock
3de5df9f38 Merge pull request #661 from Nanook/master
Added multipart Zip support (z01...). Added IEntry.IsSolid
2022-04-29 13:43:37 +01:00
Craig
910aa1c22e Corrected the Crc exception as it was within a #DEBUG define 2022-04-27 14:12:00 +01:00
Craig
71c8f3129f RarStream Position fix, it was returning the file size. 7Zip CrcCheckStream always failed. Added a Solid Rar entry CRC test. 2022-04-27 13:16:05 +01:00
Craig
224614312f Added multipart Zip support (z01...). Added IEntry.IsSolid and implemented Rar and 7Zi[ support. 2022-04-25 01:16:53 +01:00
Adam Hathcock
f717133947 Merge pull request #660 from adamhathcock/dependabot/github_actions/actions/upload-artifact-3
Bump actions/upload-artifact from 2 to 3
2022-04-21 11:16:52 +01:00
dependabot[bot]
fcbfcfed03 Bump actions/upload-artifact from 2 to 3
Bumps [actions/upload-artifact](https://github.com/actions/upload-artifact) from 2 to 3.
- [Release notes](https://github.com/actions/upload-artifact/releases)
- [Commits](https://github.com/actions/upload-artifact/compare/v2...v3)

---
updated-dependencies:
- dependency-name: actions/upload-artifact
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-04-21 08:52:47 +00:00
Adam Hathcock
e6601c91ca Merge pull request #659 from adamhathcock/updates
Dependency updates and start of enforcing some C# standards
2022-04-21 09:52:26 +01:00
Adam Hathcock
0379903689 Fix tests 2022-04-21 08:59:35 +01:00
Adam Hathcock
6950eebf9f Dependency updates and start of enforcing some C# standards 2022-04-21 08:54:59 +01:00
Adam Hathcock
c15f1327c9 Merge pull request #658 from Nanook/master
Added Split archive support with unit tests. …
2022-04-21 08:26:06 +01:00
Craig
ec1999f73d Added Split archive support with unit tests. Added ArchiveFactory.IsArchive() and minor refactor. 2022-04-21 00:12:26 +01:00
Adam Hathcock
3d0a18b05d Merge pull request #655 from Ryhon0/master
Tar file mode, user and group
2022-04-11 16:43:40 +01:00
Ryhon
156a02c5a8 Tar file mode, user and group 2022-04-11 14:33:55 +02:00
Adam Hathcock
eba1a443e3 Merge pull request #652 from ds5678/net6_with_trimming
Add a net 6 target and make trimmable
2022-04-11 10:14:18 +01:00
ds5678
eb44cdc903 Update dotnetcore.yml 2022-04-08 02:21:07 -04:00
Jeremy Pritts
196df63de2 fix build project 2022-03-31 14:52:36 -04:00
Jeremy Pritts
ed3c11f44c update workflow and csproj files 2022-03-31 04:04:56 -04:00
Jeremy Pritts
7f6c877fdc add a net 6 target and make trimmable 2022-03-30 21:24:51 -04:00
Adam Hathcock
eee8309db8 Mark for 0.31 2022-03-30 12:03:03 +01:00
Adam Hathcock
155cfab792 Merge pull request #651 from loop-evgeny/evgeny-upgrade-adler32
Update Adler32 from ImageSharp v2.1.0
2022-03-30 12:01:32 +01:00
Evgeny Morozov
e1c36afdec Update Adler32 from ImageSharp v2.1.0
Adler32.cs is taken from 09b2cdb83a with minimal change to make it build as part of SharpCompress. Fixes https://github.com/adamhathcock/sharpcompress/issues/650 and https://github.com/adamhathcock/sharpcompress/issues/645.
2022-03-30 12:17:14 +02:00
Adam Hathcock
6b0d6a41ca Merge pull request #638 from Nanook/rar2MultiWithTest
Rar2 fix with new unit tests that fail on previous build.
2022-02-16 08:33:12 +00:00
Craig
dab157bb71 Rar2 fix with new unit tests that fail on previous build. 2022-02-15 16:24:22 +00:00
Adam Hathcock
8d17d09455 Merge pull request #624 from adamhathcock/issue-617
Add test and probable fix for Issue 617
2021-11-22 09:20:15 +00:00
Adam Hathcock
05208ccd9b Add test and probable fix for Issue 617 2021-11-22 08:40:40 +00:00
Adam Hathcock
a1e7c0068d Merge pull request #622 from adamhathcock/net461-tests
Net461 tests
2021-10-02 15:32:20 +01:00
Adam Hathcock
e6bec19946 Mark for 0.30 2021-10-02 15:29:22 +01:00
Adam Hathcock
ec2be2869f Fix whitespace from dotnet format 2021-10-02 15:29:03 +01:00
Adam Hathcock
ce5432ed73 Fix tests for multi-targetting 2021-10-02 15:25:43 +01:00
Adam Hathcock
b6e0ad89ce Remove duplicated artifact step 2021-10-02 15:21:05 +01:00
Adam Hathcock
2745bfa19b Minor SDK update 2021-10-02 15:19:51 +01:00
Adam Hathcock
3cdc4b38a6 Test 461 on github actions 2021-10-02 15:19:13 +01:00
Adam Hathcock
fc1ca808d7 Merge pull request #621 from inthemedium/master
Add net461 target to clean up issues with system.* nuget dependencies
2021-10-02 15:08:18 +01:00
Jeff Tyson
6983e66037 Fix preprocessor condition 2021-10-01 16:34:00 +00:00
Jeff Tyson
01f7336d09 Based on docs, the target should bet net461 2021-09-29 22:04:47 +00:00
Jeff Tyson
1561bba538 Add net462 target to clean up issues with system.* nuget dependencies 2021-09-29 21:55:11 +00:00
Adam Hathcock
3ecf8a5e0c Merge pull request #616 from amosonn/patch-1
Fix for chunked read for ZLibBaseStream
2021-09-27 09:14:58 +01:00
Adam Hathcock
e2095fc416 Merge branch 'master' into patch-1 2021-09-27 09:08:58 +01:00
Amos Onn
8398d40106 Fix #615 2021-09-14 22:02:18 +02:00
Amos Onn
134fa8892f Test for bug #615 2021-09-14 21:55:05 +02:00
Adam Hathcock
ea5c8dc063 Merge pull request #614 from adamhathcock/ensure-dest-dir-exists
Ensure destination directory exists.
2021-09-12 08:56:42 +01:00
Adam Hathcock
0209d00164 Minor updates and prep for 0.29 2021-09-12 08:52:00 +01:00
Adam Hathcock
a8d065dc9e Ensure destination directory exists 2021-09-12 08:47:30 +01:00
Adam Hathcock
7bd9711ade Merge pull request #610 from cyr/master
Bugfix for TarWriter - too much padding in large files
2021-09-12 08:43:20 +01:00
cyr
61802eadb4 Merge branch 'adamhathcock:master' into master 2021-09-12 09:37:07 +02:00
Adam Hathcock
b425659058 Merge pull request #611 from Thunderstr1k3/fix-zipheader-seeking
Allowing to seek empty zip files
2021-09-12 08:28:05 +01:00
Christian
3e32e3d7b1 Allowing to seek empty zip files 2021-09-02 13:54:32 +02:00
cyr
1b661c9df1 Fixed bug where large (int32+ file size) adds an additional 512 bytes of padding in tar files. 2021-08-27 22:38:04 +02:00
Adam Hathcock
54fc26b93d Update build and mark for 0.28.3 2021-06-04 13:43:35 +01:00
Adam Hathcock
161f99bbad Merge pull request #601 from salvois/master
Write ZIP64 End of Central Directory only if needed.
2021-06-04 13:22:01 +01:00
Adam Hathcock
c012db0776 Merge branch 'master' into master 2021-06-04 13:17:13 +01:00
Adam Hathcock
8ee257d299 Merge pull request #592 from adamhathcock/memory-downgrade
Downgrade System.Memory to fix buffer version issue
2021-06-04 13:16:40 +01:00
Adam Hathcock
f9522107c3 Merge branch 'master' into memory-downgrade 2021-06-04 13:16:34 +01:00
Adam Hathcock
e07046a37a Merge pull request #596 from DannyBoyk/issue_595_conditionally_read_zip64_extra
Conditionally parse Zip64 extra field based on specification
2021-06-04 13:07:08 +01:00
Salvatore Isaja
ad6d0d9ae8 Write ZIP64 End of Central Directory only if needed. 2021-05-23 21:10:35 +02:00
Daniel Nash
fdc33e91bd Conditionally parse Zip64 extra field based on specification
The Zip64 extra field should look for values based on the corresponding
values in the local entry header.

Fixes adamhathcock/sharpcompress#595
2021-04-26 14:58:10 -04:00
Adam Hathcock
a34f5a855c Mark for 0.28.2 2021-04-25 09:29:56 +01:00
Adam Hathcock
6474741af1 Merge pull request #593 from adamhathcock/fix-pkware-encryption
ReadFully used by pkware encryption didn’t like spans
2021-04-25 09:29:02 +01:00
Adam Hathcock
c10bd840c5 ReadFully used by pkware encryption didn’t like spans 2021-04-25 09:25:51 +01:00
Adam Hathcock
e6dded826b Downgrade System.Memory to fix buffer version issue 2021-04-24 09:16:46 +01:00
Adam Hathcock
8a022c4b18 Update FORMATS.md
remove LZipArchive/Reader/Writer mention
2021-03-28 08:58:11 +01:00
Adam Hathcock
cfef228afc Merge pull request #579 from Looooong/fix/do-not-place-extention-classes-in-common-namespace
Do not place extension classes in common namespace
2021-03-18 13:52:40 +00:00
Nguyễn Đức Long
237ff9f055 Do not place extension classes in common namespace 2021-03-18 20:44:04 +07:00
Adam Hathcock
020f862814 Bug fix for recursive call introduced in 0.28 2021-02-18 08:31:50 +00:00
Adam Hathcock
fa6107200d Merge pull request #572 from Erior/feature/521
Not so elegant perhaps for checking 7z encryption
2021-02-16 08:05:08 +00:00
Adam Hathcock
eb81f972c4 Merge branch 'master' into feature/521 2021-02-16 08:01:32 +00:00
Lars Vahlenberg
93c1ff396e Not so elegant perhaps 2021-02-14 16:29:01 +01:00
Adam Hathcock
403baf05a6 Mark for 0.28 2021-02-14 13:07:35 +00:00
Adam Hathcock
a51b56339a Fix complete entry check for RAR files. 2021-02-14 13:00:43 +00:00
Adam Hathcock
f48a6d47dc Merge pull request #571 from Erior/feature/540
Proposal fixing Extra bytes written when setting zip64
2021-02-14 12:54:17 +00:00
Adam Hathcock
5b52463e4c Merge pull request #570 from Erior/feature/555
Propsal for handling Zip with long comment
2021-02-14 12:52:42 +00:00
Adam Hathcock
6f08bb72d8 Merge pull request #569 from BrendanGrant/improve_how_missing_parts_are_handled
Improve how missing parts are handled
2021-02-14 12:49:49 +00:00
Lars Vahlenberg
045093f453 Linux is case sensitive with files names 2021-02-14 10:26:26 +01:00
Lars Vahlenberg
566c49ce53 Proposal
Zip64 requires version 4.5
Number of disks is 4 bytes and not 8
2021-02-14 02:42:32 +01:00
Lars Vahlenberg
d1d2758ee0 Propsal for handling Zip with long comment 2021-02-13 23:57:03 +01:00
Brendan Grant
5b86c40d5b Properly detect if RAR is complete at the end or not 2021-02-13 13:34:57 -06:00
Brendan Grant
53393e744e Supporting reading contents of incomplete files 2021-02-13 13:33:43 -06:00
Adam Hathcock
2dd17e3882 Be explicit about zip64 extra field sizes. Formatting 2021-02-13 07:05:53 +00:00
Adam Hathcock
c4f7433584 Merge pull request #567 from Nanook/master
Zip64 Header and Size fix
2021-02-13 06:58:41 +00:00
Adam Hathcock
9405a7cf4b Merge pull request #568 from Bond-009/stackalloc
Use stackallocs where possible/sensible
2021-02-13 06:39:32 +00:00
Bond_009
cd677440ce Use stackallocs where possible/sensible 2021-02-12 20:20:15 +01:00
Craig Greenhill
c06f4bc5a8 Zip64 Header and Size fix 2021-02-11 09:37:59 +00:00
Adam Hathcock
4a7337b223 Merge pull request #563 from adamhathcock/add-reader-test-gzip
Fix Rewindable stream Length and add GZip Reader tests
2021-01-13 15:13:34 +00:00
Adam Hathcock
1d8afb817e Bump version 2021-01-13 14:41:25 +00:00
Adam Hathcock
0f06c3d934 Fix rewindable stream to expose length 2021-01-13 14:40:36 +00:00
Adam Hathcock
9d5cb8d119 Add GZip Reader tests 2021-01-13 10:42:59 +00:00
Adam Hathcock
a28d686eb9 Fix relavant package references 2021-01-11 12:01:17 +00:00
Adam Hathcock
ac525a8ec2 Merge branch 'master' of github.com:adamhathcock/sharpcompress 2021-01-11 10:01:49 +00:00
Adam Hathcock
52c44befa2 Merge pull request #560 from adamhathcock/gzip-fixes
Expose Last Modified time on GZipStream.  Add CRC and Size to GZipEntries on Archive
2021-01-11 08:57:19 +00:00
Adam Hathcock
c64251c341 Mark for 0.27 2021-01-09 14:04:44 +00:00
Adam Hathcock
bdc57d3c33 Merge pull request #559 from adamhathcock/net5
Use Net5, NetCoreApp3.1, NetStandard2.1, NetStandard2.0 only
2021-01-09 13:43:37 +00:00
Adam Hathcock
7edc437df2 formatting 2021-01-09 13:40:57 +00:00
Adam Hathcock
57e4395e7d Merge branch 'master' into net5
# Conflicts:
#	build/Program.cs
#	src/SharpCompress/Common/Zip/ZipFilePart.cs
2021-01-09 13:40:09 +00:00
Adam Hathcock
ee17dca9e5 Fix formatting 2021-01-09 13:36:30 +00:00
Adam Hathcock
e9f3add5b9 Merge branch 'master' into gzip-fixes 2021-01-09 13:35:52 +00:00
Adam Hathcock
faf1a9f7e4 Merge pull request #561 from adamhathcock/format
Use dotnet format to ensure some kind of code style
2021-01-09 13:35:26 +00:00
Adam Hathcock
5357bd07c7 Let dotnet format do it’s thing 2021-01-09 13:33:34 +00:00
Adam Hathcock
8c0e2cbd25 Use dotnet format 2021-01-09 13:32:14 +00:00
Adam Hathcock
674f3b4f28 Merge branch 'master' into gzip-fixes 2021-01-09 13:25:55 +00:00
Adam Hathcock
6e42e00974 Merge pull request #485 from adamhathcock/issue-256
Create and using PauseEntryRebuilding for adding large numbers of ent…
2021-01-09 13:23:52 +00:00
Adam Hathcock
8598885258 Read trailer for GZip for CRC and uncompressed size 2021-01-09 13:22:06 +00:00
Adam Hathcock
669e40d53c Merge branch 'master' into issue-256 2021-01-09 13:01:16 +00:00
Adam Hathcock
1adcce6c62 Expose Last Modified time on GZipStream 2021-01-09 12:53:13 +00:00
Adam Hathcock
147be6e6e1 Use Net5, NetCoreApp3.1, NetStandard2.1, NetStandard2.0 only 2021-01-09 10:34:49 +00:00
Adam Hathcock
5879999094 Merge pull request #551 from carbon/alder32
Use hardware accelerated Alder32 impl
2020-11-19 08:21:31 +00:00
Jason Nelson
477a30cf5b Use hardware accelerated Alder32 impl 2020-11-18 11:21:29 -08:00
Adam Hathcock
2fec03e1ac Merge pull request #550 from carbon/cq
Improve Code Quality 3
2020-11-18 18:32:53 +00:00
Jason Nelson
9a17449a02 Format NewSubHeaderType 2020-11-18 09:44:13 -08:00
Jason Nelson
087a6aad8c Cross target .NETCOREAPP3.1 and react to new nullablity annotations 2020-11-18 09:43:08 -08:00
Jason Nelson
e243a8e88f Format AbstractArchive 2020-11-18 09:31:39 -08:00
Jason Nelson
b57df8026a Use pattern matching 2020-11-18 09:29:38 -08:00
Jason Nelson
a1d45b44cd Format ArchiveFactory 2020-11-18 09:28:24 -08:00
Jason Nelson
e47e1d220a Format AesDecoderStream 2020-11-18 09:25:38 -08:00
Jason Nelson
0129a933df Remove NETSTANDARD1_3 symbol 2020-11-18 09:23:50 -08:00
Jason Nelson
fa241bb0d7 Inline variable declarations 2020-11-18 09:21:45 -08:00
Jason Nelson
d8804ae108 Improve conditional logic to prepare to add .NETCOREAPP target 2020-11-18 09:19:21 -08:00
Jason Nelson
8090d269e7 Add polyfills for string.EndWith(char) && string.Contains(char) 2020-11-18 09:16:53 -08:00
Jason Nelson
b0101f20c5 Eliminate culture specific StartsWith comparisions 2020-11-18 09:12:01 -08:00
Jason Nelson
dd48e4299a Simplify .NET framework code exclusions, bump min .NET framework version to 4.6.1 2020-11-18 09:07:30 -08:00
Jason Nelson
c61ee0c24f Update deps 2020-11-18 09:02:11 -08:00
Jason Nelson
9576867c34 Enable C# 9 2020-11-18 09:01:35 -08:00
Adam Hathcock
4426a24298 Merge pull request #549 from adamhathcock/update-deps
Update dependencies
2020-11-03 08:47:58 +00:00
Adam Hathcock
3b43c1e413 Update dependencies 2020-11-03 08:45:10 +00:00
Adam Hathcock
aa6575c8f9 Merge pull request #541 from avao/master
UT and Fix for: Index out of range exception from gzip #532
2020-10-19 12:33:31 +01:00
avao
0268713960 UT and Fix for: Index out of range exception from gzip #532 2020-10-13 19:58:11 +01:00
Adam Hathcock
f36167d425 Merge pull request #531 from carbon/master
Improve CQ3
2020-08-01 06:25:09 +01:00
Jason Nelson
33ffcb9308 Use Array.Empty<byte> 2020-07-31 17:00:46 -07:00
Jason Nelson
a649c25a91 Eliminate two allocations in HuffmanTree 2020-07-31 16:58:21 -07:00
Jason Nelson
fa1e773960 Eliminate two allocations in Crc32 2020-07-31 16:55:07 -07:00
Jason Nelson
62f7238796 Make CMethodId readonly 2020-07-31 16:49:34 -07:00
Jason Nelson
d4ccf73340 Embed FAST_ENCODER_TREE_STRUCTURE_DATA 2020-07-31 16:47:05 -07:00
Jason Nelson
5ddb0f96bc Use switch expressions 2020-07-31 16:37:56 -07:00
Jason Nelson
75a6db8f4c Eliminate three allocations in HbMakeCodeLengths 2020-07-31 16:33:00 -07:00
Jason Nelson
ae5635319b Eliminate header bytes allocation 2020-07-31 16:30:26 -07:00
Jason Nelson
98ed3080d0 Eliminate three allocations 2020-07-31 16:30:09 -07:00
Jason Nelson
c618eacad4 Optimize RijndaelEngine 2020-07-31 16:22:44 -07:00
Jason Nelson
3b11e6ef97 Eliminate two allocations 2020-07-31 16:10:59 -07:00
Jason Nelson
40af9359db Pollyfill and use Stream.Read(Span<byte> buffer) 2020-07-31 16:08:38 -07:00
Jason Nelson
d6bf9dae42 Eliminate allocation 2020-07-31 16:01:09 -07:00
Adam Hathcock
13917941ff Merge pull request #530 from carbon/master
Enable test coverage for net461 and fix regression
2020-07-31 18:39:40 +01:00
Jason Nelson
28f04329ae Merge branch 'master' of https://github.com/carbon/sharpcompress 2020-07-31 10:12:37 -07:00
Jason Nelson
404a6b231d Fix .NET 461 failures 2020-07-31 10:12:34 -07:00
Jason Nelson
184596da3c Merge branch 'master' into master 2020-07-31 11:37:45 -05:00
Jason Nelson
f00f393687 Disable failing net461 tests 2020-07-31 09:30:20 -07:00
Jason Nelson
cbbfb89619 Add failure notes 2020-07-31 09:29:06 -07:00
Jason Nelson
6a5cf11dd0 Fix net461 bug 2020-07-31 09:27:41 -07:00
Jason Nelson
fc1d0a0464 Run tests against net461 2020-07-31 09:27:32 -07:00
Adam Hathcock
74af1759eb Merge pull request #529 from carbon/master
Improve code quality v2
2020-07-31 06:55:35 +01:00
Jason Nelson
ee3162ad71 Fix return 2020-07-30 17:49:29 -07:00
Jason Nelson
4357165163 Add Read/Write overrides to NonDisposingStream 2020-07-30 17:36:03 -07:00
Jason Nelson
6973436b94 Add and use Stream.Write(ReadOnlySpan<byte> buffer) polyfill 2020-07-30 17:29:33 -07:00
Jason Nelson
7750ed7106 Finish spanification of RijndaelEngine 2020-07-30 17:01:13 -07:00
Jason Nelson
773158e9d8 Seal LZipStream 2020-07-30 16:57:30 -07:00
Jason Nelson
4db615597d Refactor ExtraData and enable nullable 2020-07-30 16:48:22 -07:00
Jason Nelson
6bdf2365fc Inline variable declarations 2020-07-30 16:45:38 -07:00
Adam Hathcock
a7944f28c5 Fix CI again 2020-07-26 14:45:32 +01:00
Adam Hathcock
426d459284 Fix CI build 2020-07-26 14:39:13 +01:00
Adam Hathcock
b00b461ada Update documented targets 2020-07-26 14:38:19 +01:00
Adam Hathcock
84834b6348 ignore snukpkg 2020-07-26 14:36:42 +01:00
Adam Hathcock
f521fd35ff Fix tests, update to 0.26 2020-07-26 14:36:07 +01:00
Adam Hathcock
2979fceecf Merge pull request #522 from JTOne123/master
[PR] The proj files have been updated to enable SourceLink
2020-07-26 12:07:13 +01:00
Adam Hathcock
b12e8e793f Merge branch 'master' into master 2020-07-26 12:07:05 +01:00
Adam Hathcock
c77ec59a28 Merge pull request #527 from adamhathcock/default-encoding
Don’t use 437 Encoding by default anymore.
2020-07-26 12:06:30 +01:00
Adam Hathcock
42ba8cf828 Merge branch 'master' into default-encoding 2020-07-26 12:06:22 +01:00
Adam Hathcock
c7618fc895 Merge pull request #528 from DannyBoyk/issue_524_tararchive_fails_read_all_entries
Ensure TarArchive enumerates all entries
2020-07-26 12:05:58 +01:00
Daniel Nash
d055b34efe Ensure TarArchive enumerates all entries
While enumerating the entries of a tar file and writing their contents
to disk using TarArchive, it was discovered TarArchive was not properly
discarding padding bytes in the last block of each entry. TarArchive was
sometimes able to recover depending on the number of padding bytes due
to the logic it uses to find the next entry header, but not always.

TarArchive was changed to use TarReadOnlySubStream when opening entries
and TarReadOnlySubstream was changed to ensure all an entry's blocks are
read when it is being disposed.

Fixes adamhathcock/sharpcompress#524
2020-07-20 12:57:39 -04:00
Adam Hathcock
b7f635f540 Update readme 2020-07-16 15:35:27 +01:00
Adam Hathcock
5e95a54260 Merge branch 'master' into default-encoding 2020-07-16 14:32:51 +01:00
Adam Hathcock
4354e82bb5 Don’t use 437 Encoding by default anymore. 2020-07-16 14:28:37 +01:00
Adam Hathcock
ab7bdc24dc Merge pull request #523 from kdaadk/master
Decompress multipart solid RAR4.x archive
2020-07-01 12:56:58 +01:00
Dmitriy
81997fe1ba rename test 2020-07-01 15:10:21 +05:00
Dmitriy
de6759a83f - remove check of solid archive
- change tests
- add test on multi solid archive
2020-07-01 15:06:54 +05:00
Adam Hathcock
233dc33130 Fix running tests on build 2020-06-25 09:22:15 +01:00
Adam Hathcock
39b07f45f1 Update github action and minor SDK bump 2020-06-25 09:16:15 +01:00
Pavlo Datsiuk
802662a165 [COMMIT] The proj files have been updated to enable SourceLink [SharpCompress.csproj] 2020-06-25 10:58:21 +03:00
Adam Hathcock
2859848fc4 Give the artifacts names 2020-05-24 10:41:06 +01:00
Adam Hathcock
b734d00062 Remove README for appveyor and fix artifacts again 2020-05-24 10:35:39 +01:00
Adam Hathcock
02a17d22f6 Adjust artifacts and remove appveyor 2020-05-24 10:32:50 +01:00
Adam Hathcock
7bfff472c6 Fix yaml 2020-05-24 10:26:51 +01:00
Adam Hathcock
5aa146be17 Remove matrix var 2020-05-24 10:25:40 +01:00
Adam Hathcock
a0bfc22a29 Try upload artifact 2020-05-24 10:23:01 +01:00
Adam Hathcock
6ed46b5fcc Fix CI paths 2020-05-24 09:04:10 +01:00
Adam Hathcock
904e40ef57 Switch to bullseye for building 2020-05-24 09:00:27 +01:00
Adam Hathcock
00ff119ec4 Minor Rider issues resolved. Still two outstanding. 2020-05-24 08:42:36 +01:00
Adam Hathcock
60d2511e80 Remove .NET Standard 1.3 which is no longer in support 2020-05-24 08:42:06 +01:00
Adam Hathcock
ed56a4aa4a Merge pull request #515 from carbon/master
Enable nullable
2020-05-24 08:20:37 +01:00
Jason Nelson
5b6a1c97e3 Enable nullable 2020-05-23 16:27:55 -07:00
Adam Hathcock
8bfc9ef4de Update for 0.25.1 2020-05-22 13:46:36 +01:00
Adam Hathcock
fa949e089e Merge pull request #512 from adamhathcock/fix-codepages
Attempt Windows reference fix
2020-05-22 13:44:10 +01:00
Adam Hathcock
c296ca7660 Merge pull request #513 from RealOrko/symbolic-link-default-write
Added default implementation with warning for symbolic links
2020-05-22 13:43:56 +01:00
RealOrko
538b38869f Added a warning for the writing of symbolic links with a link to the original GitHub issue for the DOTNET runtime 2020-05-21 13:00:25 +01:00
Adam Hathcock
ce328ed90b Merge branch 'master' into fix-codepages 2020-05-15 08:28:25 +01:00
Adam Hathcock
632bae725d Add braces for clarity 2020-05-14 13:47:21 +01:00
Adam Hathcock
4f824b1d9a Add build flags for Core targets 2020-05-14 13:16:00 +01:00
Adam Hathcock
120aee8039 See if windows reference is fixed 2020-05-14 13:09:17 +01:00
Adam Hathcock
3b2b341c4d Merge pull request #508 from turbedi/minor_optimizations
Minor optimizations
2020-04-11 09:08:07 +01:00
Berkan Diler
4cad40f637 Minor string optimizations 2020-04-10 12:33:40 +02:00
Berkan Diler
2c64380019 Use 3 argument Array.Copy when possible 2020-04-10 12:06:38 +02:00
Berkan Diler
ccb9593de2 Replace Span<T>.Fill(0) with Span<T>.Clear() 2020-04-10 12:03:32 +02:00
Berkan Diler
921a99fc32 Replace static readonly byte[] fields with static ReadOnlySpan<byte> properties 2020-04-10 11:54:58 +02:00
Adam Hathcock
400d2c1774 Fix usings and add braces for better merging 2020-04-03 08:47:30 +01:00
Adam Hathcock
762497b1c1 Tag for 0.25.0 and update packages 2020-04-03 08:25:43 +01:00
Adam Hathcock
be9edc7512 Merge pull request #500 from Erior/Issue_86
ZipReader/StreamingZipReaderFactory fails for archive entries which are uncompressed files in ZIP format #86
2020-01-17 09:38:19 +00:00
Lars Vahlenberg
9bf9d34d94 Issue86 Proposal 2020-01-16 22:08:48 +01:00
Adam Hathcock
df8405006c Fix workflow name 2020-01-03 09:24:08 +00:00
Adam Hathcock
d135fdce58 Give github actions build a name and use badge 2020-01-03 09:22:51 +00:00
Adam Hathcock
ba570b93bb Merge pull request #496 from Bond-009/allocations
Reduce the amount of allocations
2020-01-03 09:18:17 +00:00
Adam Hathcock
6dfe0c7a96 Merge branch 'master' into allocations 2020-01-03 09:16:46 +00:00
Adam Hathcock
73d4430a65 Merge pull request #498 from adamhathcock/build-netcore3
Build netcore3
2020-01-03 09:15:14 +00:00
Adam Hathcock
ce6fd9b976 JUst one target 2020-01-03 09:12:10 +00:00
Adam Hathcock
ae7e8c03f2 Put wrong SDK 2020-01-03 09:07:34 +00:00
Adam Hathcock
22e2526f4c Update cake and dependencies 2020-01-03 09:06:13 +00:00
Adam Hathcock
50283d9411 Add new build targets for netcore3 2020-01-03 09:02:04 +00:00
Bond-009
d2c2b58f3b Fix language version and add netstandard2.1 2020-01-02 17:43:58 +01:00
Bond_009
50d4b39ca0 Fix test 2019-12-30 22:17:45 +01:00
Bond_009
1ed675e960 Minor improvement 2019-12-30 19:19:05 +01:00
Bond_009
80b0671844 Reduce the amount of allocations
* Directly fill an array instead of filling a List and copying that to
an array
* Use own buffer when writing bytes to a stream
* Remove DataConverter class, replaced by BinaryPrimitives
2019-12-30 18:58:25 +01:00
Bond-009
6f387336c0 Use functions from System.Memory instead of selfmade ones (#495)
* Use functions from System.Memory instead of selfmade ones

* Update SharpCompress.Test.csproj
2019-12-30 15:19:46 +00:00
Adam Hathcock
3b2e273832 Merge branch 'master' into issue-256 2019-10-10 09:27:46 +01:00
Adam Hathcock
9540b01bcc NET Standard 1.3 and 2.0 only (#482)
* Remove NET35, NET45 and NET Standard 1.0

* Update README and memset

* Remove NETCORE build flag

* NET 46 too?

* Update packages and usage
2019-10-10 09:24:41 +01:00
Adam Hathcock
43c839eb89 Create and using PauseEntryRebuilding for adding large numbers of entries 2019-10-09 09:55:16 +01:00
Adam Hathcock
446d6914c1 Merge pull request #483 from Bond-009/nameof
Use nameof for param names
2019-09-17 14:21:04 +01:00
Bond_009
637223aa53 Use nameof for param names 2019-09-17 13:28:44 +02:00
Adam Hathcock
17d5565120 Merge pull request #478 from Bond-009/buffers
Use System.Buffers Nuget package
2019-09-17 10:05:29 +01:00
Bond_009
4b54187b4c Fix build 2019-09-11 21:33:57 +02:00
Bond_009
cfb1421367 Use System.Buffers Nuget package 2019-09-11 20:06:50 +02:00
Adam Hathcock
5072a0f6f5 Merge pull request #471 from adamhathcock/release-024
Bump version and dependencies
2019-08-20 20:36:38 +01:00
Adam Hathcock
357dff1403 Bump version and dependencies 2019-08-20 14:29:47 -05:00
Adam Hathcock
a2bd66ded8 Merge pull request #460 from itn3000/tar-fix-garbled2
fix filename garbling in tar(#414)
2019-06-27 12:16:53 +01:00
itn3000
6bfa3c25a4 add more comments 2019-06-27 20:01:40 +09:00
itn3000
1ea9ab72c1 add comment for subtracting 2019-06-27 19:59:16 +09:00
itn3000
07c42b8725 replace magic number 2019-06-27 10:59:21 +09:00
itn3000
70392c32e2 use Buffer.BlockCopy for performance 2019-06-27 09:47:26 +09:00
itn3000
9b4b2a9f7c considering encoding in processing filename(#414)
modify test tar archive because it was not expected one.
(expected "тест.txt" in encoding 866, but actual is omitted upper byte)
2019-06-26 17:34:12 +09:00
Adam Hathcock
d3dd708b58 Merge pull request #457 from DannyBoyk/issue_456_zip_bounded_substreams_data_descriptors
Return a bounded substream when data descriptors are used in seekable zips
2019-06-04 13:42:24 +01:00
Daniel Nash
af264cdc58 Return a bounded substream when data descriptors are used in seekable zips 2019-06-04 08:31:42 -04:00
Adam Hathcock
cfd6df976f Merge pull request #455 from DannyBoyk/issue_454_zip_bad_extra_field
Handle a bad extra field in a local file header in zip files
2019-06-04 09:24:55 +01:00
Daniel Nash
b2bd20b47e Handle a bad extra field in a local file header in zip files 2019-06-03 13:02:28 -04:00
Adam Hathcock
ffea093e95 Merge pull request #453 from Lssikkes/master
Fix for clients failing on just having a 64 bit offset in ZIP64
2019-05-24 19:33:59 +01:00
Leroy Sikkes
78eb8fcf92 Fix for clients that don't support ZIP64 standard correctly in case headers are only pointed to in ZIP64 directory structure 2019-05-24 18:27:49 +02:00
Adam Hathcock
a052956881 Merge pull request #452 from Lssikkes/master
Various fixes for ZIP64 writer (zero byte entries, 32 bit where supported)
2019-05-24 16:17:48 +01:00
Lssikkes
9319ea6992 Updated ZIP64 writer to write 32 bit values to directory entries for better compatibility.
Support for zero byte files without corruption errors from WinRAR/7-zip
2019-05-24 16:14:30 +02:00
Adam Hathcock
4e5b70dbfa Merge pull request #444 from eugeny-trunin/mem-opt
Memory and speed optimization
2019-03-20 15:13:00 +00:00
evgeny
c68eaa8397 Memory and speed optimization 2019-03-20 17:46:57 +03:00
Adam Hathcock
bbb7c85ba7 Merge pull request #442 from turbolocust/master
Fix: ArchiveEncoding was ignored in TarWriterOptions
2019-03-19 08:31:31 +00:00
Matthias Fussenegger
8174359228 Fix: ArchiveEncoding was ignored in TarWriterOptions 2019-03-18 18:25:00 +01:00
Adam Hathcock
acf66c5195 Merge pull request #440 from adamhathcock/release-023
Bump release for 0.23 and update dependencies
2019-02-27 12:57:30 +00:00
Adam Hathcock
880c9fa97a Bump release and update dependencies 2019-02-27 12:55:16 +00:00
Adam Hathcock
e5c111f2be Merge pull request #436 from Numpsy/populate_zip_volume_comment
Changes to populate ZipArchive.ZipVolumne.Comment
2019-01-14 08:14:03 +00:00
Richard Webb
4e9cd064dd Unit test to show reading of a Zip volume/archive comment 2019-01-13 21:05:55 +00:00
Richard Webb
12a6d3977e Return the DirectoryEndHeader from SeekableZipHeaderFactory.ReadSeekable so that it can be used by ZipArchive 2018-12-14 22:44:44 +00:00
Adam Hathcock
a95bbaf820 Merge pull request #423 from markfinal/tarsymlink
Tar symlink support
2018-11-05 08:31:48 +00:00
Mark Final
70bafa653b Tar symlink extraction
Extended SharpCompress.Common.ExtractionOptions with a delegate to write symbolic links. If not is null, and a symbolic link is encountered, an exception is thrown.
Removed Mono.Posix.NETStandard from the library, but added to the .NET Core 2.1 test application.
Extended the test to implement the delegate.
2018-11-03 09:45:12 +00:00
Mark Final
3f4338489c Removed unnecessary code 2018-11-01 21:57:49 +00:00
Mark Final
d91e58f2cc Refs #248. Refs #132. Added a test case of a tar containing symlinks
This is a source archive of the MoltenVK project from github, which is my use-case for SharpCompress.
I added a test case in the project, which should extract the tar, and validate any symlink targets with what the tar thinks it ought to be.
2018-11-01 21:51:14 +00:00
Mark Final
192b9c1e8b Ref #248. Ref #132. Tar reader support for symlinks for .NET standard 2 and Posix platforms
Extracts linkname from the tar header, and exposes this on IEntry as the LinkTarget (string) property. If an entry is not a symlink, then that property is null.

Uses Mono.Posix.NETStandard nuget to create a symlink. However, this is only applicable to .NET standard 2.0+. So far, unable to find a nuget that works for older versions.

Also, not sure what to do on Windows.
2018-11-01 21:48:51 +00:00
Adam Hathcock
0941239454 Merge pull request #417 from KyotoFox/fix-entrystream-flush
Allow Flush on EntryStream
2018-10-04 12:48:08 +01:00
Knut Ørland
53ad00cdc4 Use soft tabs 2018-10-04 13:13:14 +02:00
Knut Ørland
6dd5da48f7 Added test that calls EntryStream.Flush() 2018-10-04 13:08:53 +02:00
Knut Ørland
efae8328a9 Don't throw an exception when flushing an EntryStream
From Microsoft docs: “In a class derived from Stream that doesn't
support writing, Flush is typically implemented as an empty method to
ensure full compatibility with other Stream types since it's valid to
flush a read-only stream.”
2018-10-04 13:05:36 +02:00
Adam Hathcock
f1facc51de Merge pull request #409 from G-Research/RecogniseEmptyTarArchives
Recognise empty tar archives.
2018-09-25 13:20:59 +01:00
Adam Hathcock
a471ca6a76 Use Cake tool on circle. Update test packages 2018-08-31 09:27:04 +01:00
Elliot Prior
83f6690576 Recognise empty tar archives.
Currently, when ArchiveFactory.Open is called on an empty tar archive, it throws due to being unable to determine the stream type. This fix allows it to recognise empty tar files by checking for whether the filename is empty, the size is empty and the entry type is defined. Add a test to try opening an empty archive.
2018-08-16 10:25:47 +01:00
Adam Hathcock
1850ea67f6 Merge pull request #408 from majoreffort/master
Test and fix for #407
2018-07-24 09:42:03 +01:00
majoreffort
2fd6178aa9 Fixed length in TarHeader#WriteStringBytes 2018-07-23 19:58:37 +02:00
majoreffort
ec044e6f42 Added Tar test for 100 char filename issue. 2018-07-23 19:48:01 +02:00
Adam Hathcock
bd96279649 Merge pull request #404 from MattKotsenas/bugfix/idisposable
Enable parallel test execution
2018-07-12 19:53:50 +01:00
Matt Kotsenas
f7ad595945 Enable test parallelization and remove garbage collection workaround
Now that the sources of file locking are fixed, enable test parallelization
and the forced garbage collection workaround.

Lastly, remove the `IsLocked` check because it doesn't work in a
parallel test running world - the file may be locked due to another test
running.
2018-07-12 10:33:19 -07:00
Matt Kotsenas
93c0b91de9 Refactor TestSharpCompressWithEmptyStream
Refactor `TestSharpCompressWithEmptyStream` so it asserts that the files
and bytes are the same.
2018-07-12 10:32:08 -07:00
Matt Kotsenas
138038b08f Move RarReaderTests over to user ReaderFactory
- Refactor `RarReaderTests` to use `ReaderFactory`
- Update `ReaderTests.Read` to support Rar tests
2018-07-12 10:32:08 -07:00
Matt Kotsenas
e9a6fed607 FIXUP decouple UseReader from VerifyFiles 2018-07-11 16:53:34 -07:00
Matt Kotsenas
87a1440382 Decouple UseReader from VerifyFiles 2018-07-11 16:49:49 -07:00
Matt Kotsenas
3c2f4ebe9b Combine ForwardOnlyStream and NonSeekableStream
Delete `NonSeekableStream` used in Zip64 tests in favor
of `ForwardOnlyStream` used in Mocks.

Additionally, delete the `ForwardOnlyStream.ReadByte` implementation
as the implementation on the base Stream is sufficient.
2018-07-11 16:42:03 -07:00
Matt Kotsenas
933ffe7828 Remove unused code from ArchiveTests 2018-07-11 16:33:46 -07:00
Matt Kotsenas
7d20ba5243 Simplify RarHeaderTests 2018-07-11 16:21:19 -07:00
Matt Kotsenas
44dc36af48 Update ReaderTests base class to validate Dispose
Update the `ReaderTests` base class to validate that `Dispose` is
called appropriately in both the close and the leave open cases.
2018-07-11 16:21:19 -07:00
Matt Kotsenas
98558c5ba9 Refactor TestStream constructor
Refactor the `TestStream` constructor so by default it defers to
the underlying Stream
2018-07-11 16:21:19 -07:00
Matt Kotsenas
6c25322465 Follow best-practices for Dispose in Volume and ForwardOnlyStream
Update `Volume` and `ForwardOnlyStream` to follow the project's
general pattern and best-practices for `Dispose`
2018-07-11 16:21:19 -07:00
Matt Kotsenas
6d1d62fd32 Delete AppendingStream
`AppendingStream` is unused, so rather than refactor it, just delete it.
2018-07-11 16:21:19 -07:00
Matt Kotsenas
ee4ae661d7 Refactor ListeningStream
Refactor `ListeningStream`:

- Override of `WriteByte` was redundant and removed
- Make `Dispose` delegate to base class
2018-07-11 16:21:19 -07:00
Matt Kotsenas
0473ec1626 Open test archives as read
Update `RarHeaderFactoryTests` and `GZipArchiveTests` to open the test
readers as `FileAccess.Read` and `FileShare.Read` to prevent issues with
multiple test from trying to open exclusive access to files.
2018-07-11 16:21:19 -07:00
Matt Kotsenas
c6cf0d40ee Simplify ReaderTests
The `IEnumerable<string>` version of `ReaderTests` is unused, so delete
it to simplify the code.
2018-07-11 16:21:19 -07:00
Matt Kotsenas
4cd80e96f3 Simplify GZip bad compression test 2018-07-11 16:21:19 -07:00
Matt Kotsenas
16524717ba Fix Stream leak in ArchiveFactory
`ArchiveFactory.Open` has two overloads that take `string` or
`FileInfo` (string delegates to FileInfo). Both of these implementations
open a `Stream` with the default `ReaderOptions`, which leaves the
stream open, resulting in a leak.

The fix is to set `LeaveOpen` to `false` if no options were provided.
Note that if a user was provding options and `LeaveOpen` was set to
`true`, the code did and will still leak.
2018-07-11 16:21:19 -07:00
Matt Kotsenas
cab1ce3d0c Update sub-streams to uniformly inherit from NonDisposingStream
Update the sub-stream classes to all inherit from `NonDisposingStream`.
This allows them to correctly implement the `Dispose` pattern, and delegate
the actual disposal to `NonDisposingStream`.

In doing so, we need to remove some redundant overrides from
`NonDisposingStream`, otherwise `BufferedSubStream` would use the
overrides inherited from `NonDisposingStream` instead of the ones
inherited from `Stream` (i.e. delegate `ReadByte` to `Read`).
2018-07-11 16:17:49 -07:00
Matt Kotsenas
6c2e5e1164 Cleanup NonDisposingStream for reuse
- Remove the duplicate `GC.SuppressFinalization` call
(called in `System.IO.Stream)
- Improve the `ThrowOnDispose` error message
2018-07-11 12:19:34 -07:00
Matt Kotsenas
c2bf540057 Close verification streams in TestBase.CompareArchivesByPath 2018-07-11 12:12:30 -07:00
Matt Kotsenas
a35c66e166 Move RewindableStreamTest.cs to the Streams/ folder 2018-07-10 12:07:33 -07:00
Matt Kotsenas
084c5e2686 Rename StreamTests.cs --> LzmaStreamTests.cs 2018-07-10 12:07:32 -07:00
Matt Kotsenas
6ae715b153 Move the TestStream and ForwardOnlyStream to Mocks folder
Move the `TestStream` and `ForwardOnlyStream` to Mocks/ to separate them
from the test classes.
2018-07-10 12:07:32 -07:00
Adam Hathcock
9c8692806a Merge pull request #403 from MattKotsenas/bugfix/parallel-tests
Fix and re-enable tests
2018-07-10 20:01:20 +01:00
Matt Kotsenas
2d87351d2a Add tests back to AppVeyor 2018-07-10 11:52:00 -07:00
Matt Kotsenas
3114afde0e Add workaround for in-use files
The `TestBase` is not always able to delete the scratch folder in
`Dispose()` because sometimes the files are still in use.

This problem appears to be leaked file handles (likely due to incorrect
handling of `IDisposable`). To avoid the problem for now, force a
garbage collection prior to deleting the scratch folder.
2018-07-10 11:49:38 -07:00
Matt Kotsenas
7b338511cc Create unique scratch path per test
Tests fail in Visual Studio because they try to reuse the same scratch
working space, and each test is responsible for resetting the space. To
simplify the test code:

1. Make `TestBase` `IDisposable` and have it create the scratch space
2. Remove `ResetScratch()` as it is now handled by the base class
3. Add a unique ID to each scrach space folder to prevent collisions
2018-07-10 11:46:44 -07:00
Adam Hathcock
09c27681e1 Merge pull request #402 from a764578566/master
file search support linq Pattern
2018-07-10 13:21:09 +01:00
zhoujr
4ebc1f82b7 file search support linq Pattern 2018-07-10 19:58:59 +08:00
Adam Hathcock
4640ca497a Merge pull request #400 from MattKotsenas/feature/avoid-exception-in-readerfactory
Avoid throwing NotSupportedException in ReaderFactory hot path
2018-07-10 08:47:13 +01:00
Matt Kotsenas
bebccaae28 Avoid throwing NotSupportedException in ReaderFactory hot path
`ReaderFactory.Open()` calls `ZipArchive.IsZipFile()` to determine if
the `Stream` is a zip archive, which calls into
`ZipHeaderFactory.ReadHeader()`, which throws a `NotSupportedException`
when the `Stream` is not a zip archive.

To be clear, this exception is caught and `IsZipFile()` returns `false`,
but when called in a hot-path, these exceptions can become expensive.

To address this issue, `ReadHeader` now returns `null` in the default
cause instead of throwing. All callsites were already checking for and
handling `null`, so no behavior changes.
2018-07-09 18:44:46 -07:00
Adam Hathcock
7ee53373c6 Remove tests as AppVeyor can’t handle them at the moment 2018-07-09 09:05:10 +01:00
Adam Hathcock
d577fe1ac6 Merge pull request #385 from twirpx/master
Fixed EFS flag handling
2018-07-09 08:48:34 +01:00
Adam Hathcock
9f976aaf78 Merge branch 'master' into master 2018-07-09 08:48:26 +01:00
Adam Hathcock
8a7d7e366f Merge pull request #396 from andreas-eriksson/Rar5IsEncrypted
Correctly set IsEncrypted for entries in Rar5.
2018-07-09 08:48:12 +01:00
Adam Hathcock
540ab1c6fa Merge branch 'master' into master 2018-07-09 08:47:32 +01:00
Adam Hathcock
6792afbdb1 Merge branch 'master' into Rar5IsEncrypted 2018-07-09 08:44:32 +01:00
Adam Hathcock
e5a7185671 Mark for 0.22 2018-07-09 08:42:45 +01:00
Adam Hathcock
cdaf453b2d Update dependencies and tests to .NET Core 2.1 2018-07-09 08:39:37 +01:00
Andreas Eriksson
f9cc80e1de Correctly set IsEncrypted for entries in Rar5. 2018-06-29 15:51:40 +02:00
Adam Hathcock
7beff9e83c Merge pull request #395 from adamhathcock/zip-slip-readers
Zip slip for Readers
2018-06-28 11:56:44 +01:00
Adam Hathcock
8f49f1b6f8 Merge remote-tracking branch 'origin/master' into zip-slip-readers 2018-06-28 11:52:43 +01:00
Adam Hathcock
7e336a0247 Slip in new SDK 2018-06-28 11:51:17 +01:00
Adam Hathcock
e37e8bdadc Move path handling for extraction to be common
Reader and Archive now share more extraction logic
2018-06-28 11:46:51 +01:00
Adam Hathcock
40bd61b16b Merge pull request #389 from frankyjuang/patch-1
Fix comment
2018-06-08 08:59:52 +01:00
Juang, Yi-Lin
87fbb45099 Fix comment 2018-06-08 11:27:43 +08:00
twirpx
e822f9a95c Tests fixed to use explicit use of 866 encoding because of usage file named in Russian in several tests 2018-05-30 22:17:27 +05:00
twirpx
8a5a9159e1 Fixed DirectoryEntryHeader Name/Comment decoding in case of EFS flags set 2018-05-30 21:47:31 +05:00
twirpx
73b3c6b419 Merge branch 'master' of https://github.com/adamhathcock/sharpcompress 2018-05-30 20:28:15 +05:00
Adam Hathcock
f9bd7ebdb0 Merge pull request #384 from MrJul/perf-readbyte
Implemented ReadByte/WriteByte on streams to improve performance
2018-05-28 09:21:28 +01:00
Julien Lebosquain
540618c062 Implemented ReadByte/WriteByte on streams to improve performance 2018-05-27 16:31:44 +02:00
Adam Hathcock
9e96dec8c9 Merge pull request #383 from itn3000/add-filename-encoding-example
add example for custom file encoding
2018-05-23 09:14:46 +01:00
itn3000
7b7af612ba add example for custom file encoding 2018-05-23 09:46:36 +09:00
Adam Hathcock
3a747ba87e Update USAGE with new stream handling 2018-05-16 08:51:33 +01:00
Adam Hathcock
87e57e3a9a Mark for 0.21.1 2018-05-15 09:14:56 +01:00
Adam Hathcock
785d0dcebf Merge pull request #381 from adamhathcock/issue-380
Allow forced encoding to override default encoding
2018-05-15 09:13:16 +01:00
Adam Hathcock
2314776f55 Also check for CustomDecoder 2018-05-15 08:28:11 +01:00
Adam Hathcock
473f5d8189 Make GetDecoder use GetEncoding for forced 2018-05-14 16:20:57 +01:00
Adam Hathcock
be971cb6f7 Allow forced encoding to override default encoding 2018-05-14 16:08:31 +01:00
twirpx
149f5e4fb5 Minor fixes 2017-08-22 11:46:32 +05:00
twirpx
1793fc949d Fixed bug: Passing default ReaderOptions when creating ZipReader for solid extraction 2017-08-16 08:57:36 +05:00
374 changed files with 12434 additions and 9724 deletions

View File

@@ -1,15 +0,0 @@
version: 2
jobs:
build:
docker:
- image: microsoft/dotnet:2.0.7-sdk-2.1.105
steps:
- checkout
- run:
name: Install unzip
command: |
apt-get update
apt-get install -y unzip
- run:
name: Build
command: ./build.sh

12
.config/dotnet-tools.json Normal file
View File

@@ -0,0 +1,12 @@
{
"version": 1,
"isRoot": true,
"tools": {
"dotnet-format": {
"version": "4.1.131201",
"commands": [
"dotnet-format"
]
}
}
}

543
.editorconfig Normal file
View File

@@ -0,0 +1,543 @@
# Version: 2.0.1 (Using https://semver.org/)
# Updated: 2020-12-11
# See https://github.com/RehanSaeed/EditorConfig/releases for release notes.
# See https://github.com/RehanSaeed/EditorConfig for updates to this file.
# See http://EditorConfig.org for more information about .editorconfig files.
##########################################
# Common Settings
##########################################
# This file is the top-most EditorConfig file
root = true
# All Files
[*]
charset = utf-8
indent_style = space
indent_size = 4
insert_final_newline = true
trim_trailing_whitespace = true
##########################################
# File Extension Settings
##########################################
# Visual Studio Solution Files
[*.sln]
indent_style = tab
# Visual Studio XML Project Files
[*.{csproj,vbproj,vcxproj.filters,proj,projitems,shproj}]
indent_size = 2
# XML Configuration Files
[*.{xml,config,props,targets,nuspec,resx,ruleset,vsixmanifest,vsct}]
indent_size = 2
# JSON Files
[*.{json,json5,webmanifest}]
indent_size = 2
# YAML Files
[*.{yml,yaml}]
indent_size = 2
# Markdown Files
[*.md]
trim_trailing_whitespace = false
# Web Files
[*.{htm,html,js,jsm,ts,tsx,css,sass,scss,less,svg,vue}]
indent_size = 2
# Batch Files
[*.{cmd,bat}]
end_of_line = crlf
# Bash Files
[*.sh]
end_of_line = lf
# Makefiles
[Makefile]
indent_style = tab
##########################################
# Default .NET Code Style Severities
# https://docs.microsoft.com/dotnet/fundamentals/code-analysis/configuration-options#scope
##########################################
[*.{cs,csx,cake,vb,vbx}]
# Default Severity for all .NET Code Style rules below
dotnet_analyzer_diagnostic.severity = warning
##########################################
# File Header (Uncomment to support file headers)
# https://docs.microsoft.com/visualstudio/ide/reference/add-file-header
##########################################
# [*.{cs,csx,cake,vb,vbx}]
# file_header_template = <copyright file="{fileName}" company="PROJECT-AUTHOR">\n© PROJECT-AUTHOR\n</copyright>
# SA1636: File header copyright text should match
# Justification: .editorconfig supports file headers. If this is changed to a value other than "none", a stylecop.json file will need to added to the project.
# dotnet_diagnostic.SA1636.severity = none
##########################################
# .NET Language Conventions
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions
##########################################
# .NET Code Style Settings
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#net-code-style-settings
[*.{cs,csx,cake,vb,vbx}]
# "this." and "Me." qualifiers
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#this-and-me
#dotnet_style_qualification_for_field = true:warning
#dotnet_style_qualification_for_property = true:warning
#dotnet_style_qualification_for_method = true:warning
#dotnet_style_qualification_for_event = true:warning
# Language keywords instead of framework type names for type references
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#language-keywords
dotnet_style_predefined_type_for_locals_parameters_members = true:warning
dotnet_style_predefined_type_for_member_access = true:warning
# Modifier preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#normalize-modifiers
dotnet_style_require_accessibility_modifiers = always:warning
csharp_preferred_modifier_order = public,private,protected,internal,static,extern,new,virtual,abstract,sealed,override,readonly,unsafe,volatile,async:warning
visual_basic_preferred_modifier_order = Partial,Default,Private,Protected,Public,Friend,NotOverridable,Overridable,MustOverride,Overloads,Overrides,MustInherit,NotInheritable,Static,Shared,Shadows,ReadOnly,WriteOnly,Dim,Const,WithEvents,Widening,Narrowing,Custom,Async:warning
dotnet_style_readonly_field = true:warning
# Parentheses preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#parentheses-preferences
dotnet_style_parentheses_in_arithmetic_binary_operators = always_for_clarity:warning
dotnet_style_parentheses_in_relational_binary_operators = always_for_clarity:warning
dotnet_style_parentheses_in_other_binary_operators = always_for_clarity:warning
dotnet_style_parentheses_in_other_operators = always_for_clarity:suggestion
# Expression-level preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#expression-level-preferences
dotnet_style_object_initializer = true:warning
dotnet_style_collection_initializer = true:warning
dotnet_style_explicit_tuple_names = true:warning
dotnet_style_prefer_inferred_tuple_names = true:warning
dotnet_style_prefer_inferred_anonymous_type_member_names = true:warning
dotnet_style_prefer_auto_properties = true:warning
dotnet_style_prefer_is_null_check_over_reference_equality_method = true:warning
dotnet_style_prefer_conditional_expression_over_assignment = false:suggestion
dotnet_diagnostic.IDE0045.severity = suggestion
dotnet_style_prefer_conditional_expression_over_return = false:suggestion
dotnet_diagnostic.IDE0046.severity = suggestion
dotnet_style_prefer_compound_assignment = true:warning
# Null-checking preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#null-checking-preferences
dotnet_style_coalesce_expression = true:warning
dotnet_style_null_propagation = true:warning
# Parameter preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#parameter-preferences
dotnet_code_quality_unused_parameters = all:warning
# More style options (Undocumented)
# https://github.com/MicrosoftDocs/visualstudio-docs/issues/3641
dotnet_style_operator_placement_when_wrapping = end_of_line
# https://github.com/dotnet/roslyn/pull/40070
dotnet_style_prefer_simplified_interpolation = true:warning
# C# Code Style Settings
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#c-code-style-settings
[*.{cs,csx,cake}]
# Implicit and explicit types
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#implicit-and-explicit-types
csharp_style_var_for_built_in_types = true:warning
csharp_style_var_when_type_is_apparent = true:warning
csharp_style_var_elsewhere = true:warning
# Expression-bodied members
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#expression-bodied-members
csharp_style_expression_bodied_methods = true:warning
csharp_style_expression_bodied_constructors = true:warning
csharp_style_expression_bodied_operators = true:warning
csharp_style_expression_bodied_properties = true:warning
csharp_style_expression_bodied_indexers = true:warning
csharp_style_expression_bodied_accessors = true:warning
csharp_style_expression_bodied_lambdas = true:warning
csharp_style_expression_bodied_local_functions = true:warning
# Pattern matching
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#pattern-matching
csharp_style_pattern_matching_over_is_with_cast_check = true:warning
csharp_style_pattern_matching_over_as_with_null_check = true:warning
# Inlined variable declarations
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#inlined-variable-declarations
csharp_style_inlined_variable_declaration = true:warning
# Expression-level preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#expression-level-preferences
csharp_prefer_simple_default_expression = true:warning
# "Null" checking preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#c-null-checking-preferences
csharp_style_throw_expression = true:warning
csharp_style_conditional_delegate_call = true:warning
# Code block preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#code-block-preferences
csharp_prefer_braces = true:warning
# Unused value preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#unused-value-preferences
csharp_style_unused_value_expression_statement_preference = discard_variable:suggestion
dotnet_diagnostic.IDE0058.severity = suggestion
csharp_style_unused_value_assignment_preference = discard_variable:suggestion
dotnet_diagnostic.IDE0059.severity = suggestion
# Index and range preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#index-and-range-preferences
csharp_style_prefer_index_operator = true:warning
csharp_style_prefer_range_operator = true:warning
# Miscellaneous preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#miscellaneous-preferences
csharp_style_deconstructed_variable_declaration = true:warning
csharp_style_pattern_local_over_anonymous_function = true:warning
csharp_using_directive_placement = outside_namespace:warning
csharp_prefer_static_local_function = true:warning
csharp_prefer_simple_using_statement = true:suggestion
dotnet_diagnostic.IDE0063.severity = suggestion
csharp_style_namespace_declarations = file_scoped
##########################################
# .NET Formatting Conventions
# https://docs.microsoft.com/visualstudio/ide/editorconfig-code-style-settings-reference#formatting-conventions
##########################################
# Organize usings
# https://docs.microsoft.com/visualstudio/ide/editorconfig-formatting-conventions#organize-using-directives
dotnet_sort_system_directives_first = true
# Newline options
# https://docs.microsoft.com/visualstudio/ide/editorconfig-formatting-conventions#new-line-options
csharp_new_line_before_open_brace = all
csharp_new_line_before_else = true
csharp_new_line_before_catch = true
csharp_new_line_before_finally = true
csharp_new_line_before_members_in_object_initializers = true
csharp_new_line_before_members_in_anonymous_types = true
csharp_new_line_between_query_expression_clauses = true
# Indentation options
# https://docs.microsoft.com/visualstudio/ide/editorconfig-formatting-conventions#indentation-options
csharp_indent_case_contents = true
csharp_indent_switch_labels = true
csharp_indent_labels = no_change
csharp_indent_block_contents = true
csharp_indent_braces = false
csharp_indent_case_contents_when_block = false
# Spacing options
# https://docs.microsoft.com/visualstudio/ide/editorconfig-formatting-conventions#spacing-options
csharp_space_after_cast = false
csharp_space_after_keywords_in_control_flow_statements = true
csharp_space_between_parentheses = false
csharp_space_before_colon_in_inheritance_clause = true
csharp_space_after_colon_in_inheritance_clause = true
csharp_space_around_binary_operators = before_and_after
csharp_space_between_method_declaration_parameter_list_parentheses = false
csharp_space_between_method_declaration_empty_parameter_list_parentheses = false
csharp_space_between_method_declaration_name_and_open_parenthesis = false
csharp_space_between_method_call_parameter_list_parentheses = false
csharp_space_between_method_call_empty_parameter_list_parentheses = false
csharp_space_between_method_call_name_and_opening_parenthesis = false
csharp_space_after_comma = true
csharp_space_before_comma = false
csharp_space_after_dot = false
csharp_space_before_dot = false
csharp_space_after_semicolon_in_for_statement = true
csharp_space_before_semicolon_in_for_statement = false
csharp_space_around_declaration_statements = false
csharp_space_before_open_square_brackets = false
csharp_space_between_empty_square_brackets = false
csharp_space_between_square_brackets = false
# Wrapping options
# https://docs.microsoft.com/visualstudio/ide/editorconfig-formatting-conventions#wrap-options
csharp_preserve_single_line_statements = false
csharp_preserve_single_line_blocks = true
##########################################
# .NET Naming Conventions
# https://docs.microsoft.com/visualstudio/ide/editorconfig-naming-conventions
##########################################
[*.{cs,csx,cake,vb,vbx}]
dotnet_diagnostic.CA1000.severity = suggestion
dotnet_diagnostic.CA1001.severity = error
dotnet_diagnostic.CA1018.severity = error
dotnet_diagnostic.CA1051.severity = suggestion
dotnet_diagnostic.CA1068.severity = error
dotnet_diagnostic.CA1069.severity = error
dotnet_diagnostic.CA1304.severity = error
dotnet_diagnostic.CA1305.severity = suggestion
dotnet_diagnostic.CA1309.severity = suggestion
dotnet_diagnostic.CA1310.severity = error
dotnet_diagnostic.CA1707.severity = suggestion
dotnet_diagnostic.CA1708.severity = suggestion
dotnet_diagnostic.CA1711.severity = suggestion
dotnet_diagnostic.CA1716.severity = suggestion
dotnet_diagnostic.CA1720.severity = suggestion
dotnet_diagnostic.CA1725.severity = suggestion
dotnet_diagnostic.CA1805.severity = suggestion
dotnet_diagnostic.CA1816.severity = suggestion
dotnet_diagnostic.CA1822.severity = suggestion
dotnet_diagnostic.CA1825.severity = error
dotnet_diagnostic.CA1826.severity = silent
dotnet_diagnostic.CA1827.severity = error
dotnet_diagnostic.CA1829.severity = suggestion
dotnet_diagnostic.CA1834.severity = error
dotnet_diagnostic.CA1845.severity = suggestion
dotnet_diagnostic.CA1848.severity = suggestion
dotnet_diagnostic.CA2016.severity = suggestion
dotnet_diagnostic.CA2201.severity = error
dotnet_diagnostic.CA2206.severity = error
dotnet_diagnostic.CA2208.severity = error
dotnet_diagnostic.CA2211.severity = error
dotnet_diagnostic.CA2249.severity = error
dotnet_diagnostic.CA2251.severity = error
dotnet_diagnostic.CA2252.severity = none
dotnet_diagnostic.CA2254.severity = suggestion
dotnet_diagnostic.CS0169.severity = error
dotnet_diagnostic.CS0219.severity = error
dotnet_diagnostic.CS1998.severity = error
dotnet_diagnostic.CS8602.severity = error
dotnet_diagnostic.CS8604.severity = error
dotnet_diagnostic.CS8618.severity = error
dotnet_diagnostic.CS0618.severity = error
dotnet_diagnostic.CS1998.severity = error
dotnet_diagnostic.CS4014.severity = error
dotnet_diagnostic.CS8600.severity = error
dotnet_diagnostic.CS8603.severity = error
dotnet_diagnostic.CS8625.severity = error
dotnet_diagnostic.BL0005.severity = suggestion
dotnet_diagnostic.MVC1000.severity = suggestion
dotnet_diagnostic.IDE0055.severity = suggestion # Fix formatting
dotnet_diagnostic.IDE0023.severity = suggestion # use expression body for operators
dotnet_diagnostic.IDE0025.severity = suggestion # use expression body for properties
dotnet_diagnostic.IDE1006.severity = suggestion # Naming rule violation: These words cannot contain lower case characters
dotnet_diagnostic.IDE0072.severity = suggestion # Populate switch - forces population of all cases even when default specified
dotnet_diagnostic.IDE0027.severity = suggestion # Use expression body for accessors
dotnet_diagnostic.IDE0032.severity = suggestion # Use auto property
dotnet_diagnostic.IDE0007.severity = error # Use var
dotnet_diagnostic.IDE0160.severity = suggestion # Use block scoped
dotnet_diagnostic.IDE0011.severity = error # Use braces on if statements
dotnet_diagnostic.IDE0057.severity = suggestion # substring can be simplified
dotnet_diagnostic.IDE0004.severity = error # redundant cast
dotnet_diagnostic.IDE0010.severity = silent # populate switch
dotnet_diagnostic.IDE0021.severity = silent # expression body for constructors
dotnet_diagnostic.IDE0022.severity = silent # expression body for methods
dotnet_diagnostic.IDE0024.severity = silent # expression body for operators
dotnet_diagnostic.IDE0028.severity = silent
dotnet_diagnostic.IDE0033.severity = error # prefer tuple name
dotnet_diagnostic.IDE0040.severity = error # modifiers required
dotnet_diagnostic.IDE0041.severity = error # simplify null
dotnet_diagnostic.IDE0042.severity = error # deconstruct variable
dotnet_diagnostic.IDE0044.severity = error # make field only when possible
dotnet_diagnostic.IDE0047.severity = suggestion # paratemeter name
dotnet_diagnostic.IDE0051.severity = error # unused field
dotnet_diagnostic.IDE0052.severity = error # unused member
dotnet_diagnostic.IDE0060.severity = suggestion # unused parameters
dotnet_diagnostic.IDE0066.severity = suggestion # switch expression
dotnet_diagnostic.IDE0078.severity = suggestion # use pattern matching
dotnet_diagnostic.IDE0090.severity = suggestion # new can be simplified
dotnet_diagnostic.IDE0130.severity = suggestion # namespace folder structure
dotnet_diagnostic.IDE0160.severity = silent # Use block namespaces ARE NOT required
dotnet_diagnostic.IDE0161.severity = suggestion # Please use file namespaces
##########################################
# Styles
##########################################
# camel_case_style - Define the camelCase style
dotnet_naming_style.camel_case_style.capitalization = camel_case
# pascal_case_style - Define the PascalCase style
dotnet_naming_style.pascal_case_style.capitalization = pascal_case
# constant_case - Define the CONSTANT_CASE style
dotnet_naming_style.constant_case.capitalization = all_upper
dotnet_naming_style.constant_case.word_separator = _
# first_upper_style - The first character must start with an upper-case character
dotnet_naming_style.first_upper_style.capitalization = first_word_upper
# prefix_interface_with_i_style - Interfaces must be PascalCase and the first character of an interface must be an 'I'
dotnet_naming_style.prefix_interface_with_i_style.capitalization = pascal_case
dotnet_naming_style.prefix_interface_with_i_style.required_prefix = I
# prefix_type_parameters_with_t_style - Generic Type Parameters must be PascalCase and the first character must be a 'T'
dotnet_naming_style.prefix_type_parameters_with_t_style.capitalization = pascal_case
dotnet_naming_style.prefix_type_parameters_with_t_style.required_prefix = T
# disallowed_style - Anything that has this style applied is marked as disallowed
dotnet_naming_style.disallowed_style.capitalization = pascal_case
dotnet_naming_style.disallowed_style.required_prefix = ____RULE_VIOLATION____
dotnet_naming_style.disallowed_style.required_suffix = ____RULE_VIOLATION____
# internal_error_style - This style should never occur... if it does, it indicates a bug in file or in the parser using the file
dotnet_naming_style.internal_error_style.capitalization = pascal_case
dotnet_naming_style.internal_error_style.required_prefix = ____INTERNAL_ERROR____
dotnet_naming_style.internal_error_style.required_suffix = ____INTERNAL_ERROR____
# prefix_interface_with_i_style - Interfaces must be PascalCase and the first character of an interface must be an 'I'
dotnet_naming_style.underscore_camel_case_style.capitalization = camel_case
dotnet_naming_style.underscore_camel_case_style.required_prefix = _
##########################################
# .NET Design Guideline Field Naming Rules
# Naming rules for fields follow the .NET Framework design guidelines
# https://docs.microsoft.com/dotnet/standard/design-guidelines/index
##########################################
# All public/protected/protected_internal constant fields must be constant_case
# https://docs.microsoft.com/dotnet/standard/design-guidelines/field
dotnet_naming_symbols.public_protected_constant_fields_group.applicable_accessibilities = public, protected, protected_internal
dotnet_naming_symbols.public_protected_constant_fields_group.required_modifiers = const
dotnet_naming_symbols.public_protected_constant_fields_group.applicable_kinds = field
dotnet_naming_rule.public_protected_constant_fields_must_be_pascal_case_rule.symbols = public_protected_constant_fields_group
dotnet_naming_rule.public_protected_constant_fields_must_be_pascal_case_rule.style = constant_case
dotnet_naming_rule.public_protected_constant_fields_must_be_pascal_case_rule.severity = warning
# All public/protected/protected_internal static readonly fields must be constant_case
# https://docs.microsoft.com/dotnet/standard/design-guidelines/field
dotnet_naming_symbols.public_protected_static_readonly_fields_group.applicable_accessibilities = public, protected, protected_internal
dotnet_naming_symbols.public_protected_static_readonly_fields_group.required_modifiers = static, readonly
dotnet_naming_symbols.public_protected_static_readonly_fields_group.applicable_kinds = field
dotnet_naming_rule.public_protected_static_readonly_fields_must_be_pascal_case_rule.symbols = public_protected_static_readonly_fields_group
dotnet_naming_rule.public_protected_static_readonly_fields_must_be_pascal_case_rule.style = constant_case
dotnet_naming_rule.public_protected_static_readonly_fields_must_be_pascal_case_rule.severity = warning
# No other public/protected/protected_internal fields are allowed
# https://docs.microsoft.com/dotnet/standard/design-guidelines/field
dotnet_naming_symbols.other_public_protected_fields_group.applicable_accessibilities = public, protected, protected_internal
dotnet_naming_symbols.other_public_protected_fields_group.applicable_kinds = field
dotnet_naming_rule.other_public_protected_fields_disallowed_rule.symbols = other_public_protected_fields_group
dotnet_naming_rule.other_public_protected_fields_disallowed_rule.style = disallowed_style
dotnet_naming_rule.other_public_protected_fields_disallowed_rule.severity = error
##########################################
# StyleCop Field Naming Rules
# Naming rules for fields follow the StyleCop analyzers
# This does not override any rules using disallowed_style above
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers
##########################################
# All constant fields must be constant_case
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1303.md
dotnet_naming_symbols.stylecop_constant_fields_group.applicable_accessibilities = public, internal, protected_internal, protected, private_protected, private
dotnet_naming_symbols.stylecop_constant_fields_group.required_modifiers = const
dotnet_naming_symbols.stylecop_constant_fields_group.applicable_kinds = field
dotnet_naming_rule.stylecop_constant_fields_must_be_pascal_case_rule.symbols = stylecop_constant_fields_group
dotnet_naming_rule.stylecop_constant_fields_must_be_pascal_case_rule.style = constant_case
dotnet_naming_rule.stylecop_constant_fields_must_be_pascal_case_rule.severity = warning
# All static readonly fields must be constant_case
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1311.md
dotnet_naming_symbols.stylecop_static_readonly_fields_group.applicable_accessibilities = public, internal, protected_internal, protected, private_protected, private
dotnet_naming_symbols.stylecop_static_readonly_fields_group.required_modifiers = static, readonly
dotnet_naming_symbols.stylecop_static_readonly_fields_group.applicable_kinds = field
dotnet_naming_rule.stylecop_static_readonly_fields_must_be_pascal_case_rule.symbols = stylecop_static_readonly_fields_group
dotnet_naming_rule.stylecop_static_readonly_fields_must_be_pascal_case_rule.style = constant_case
dotnet_naming_rule.stylecop_static_readonly_fields_must_be_pascal_case_rule.severity = warning
# No non-private instance fields are allowed
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1401.md
dotnet_naming_symbols.stylecop_fields_must_be_private_group.applicable_accessibilities = public, internal, protected_internal, protected, private_protected
dotnet_naming_symbols.stylecop_fields_must_be_private_group.applicable_kinds = field
dotnet_naming_rule.stylecop_instance_fields_must_be_private_rule.symbols = stylecop_fields_must_be_private_group
dotnet_naming_rule.stylecop_instance_fields_must_be_private_rule.style = disallowed_style
dotnet_naming_rule.stylecop_instance_fields_must_be_private_rule.severity = error
# Private fields must be camelCase
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1306.md
dotnet_naming_symbols.stylecop_private_fields_group.applicable_accessibilities = private
dotnet_naming_symbols.stylecop_private_fields_group.applicable_kinds = field
dotnet_naming_rule.stylecop_private_fields_must_be_camel_case_rule.symbols = stylecop_private_fields_group
dotnet_naming_rule.stylecop_private_fields_must_be_camel_case_rule.style = underscore_camel_case_style
dotnet_naming_rule.stylecop_private_fields_must_be_camel_case_rule.severity = warning
# Local variables must be camelCase
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1312.md
dotnet_naming_symbols.stylecop_local_fields_group.applicable_accessibilities = local
dotnet_naming_symbols.stylecop_local_fields_group.applicable_kinds = local
dotnet_naming_rule.stylecop_local_fields_must_be_camel_case_rule.symbols = stylecop_local_fields_group
dotnet_naming_rule.stylecop_local_fields_must_be_camel_case_rule.style = camel_case_style
dotnet_naming_rule.stylecop_local_fields_must_be_camel_case_rule.severity = warning
# This rule should never fire. However, it's included for at least two purposes:
# First, it helps to understand, reason about, and root-case certain types of issues, such as bugs in .editorconfig parsers.
# Second, it helps to raise immediate awareness if a new field type is added (as occurred recently in C#).
dotnet_naming_symbols.sanity_check_uncovered_field_case_group.applicable_accessibilities = *
dotnet_naming_symbols.sanity_check_uncovered_field_case_group.applicable_kinds = field
dotnet_naming_rule.sanity_check_uncovered_field_case_rule.symbols = sanity_check_uncovered_field_case_group
dotnet_naming_rule.sanity_check_uncovered_field_case_rule.style = internal_error_style
dotnet_naming_rule.sanity_check_uncovered_field_case_rule.severity = error
##########################################
# Other Naming Rules
##########################################
# All of the following must be PascalCase:
# - Namespaces
# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-namespaces
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1300.md
# - Classes and Enumerations
# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-classes-structs-and-interfaces
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1300.md
# - Delegates
# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-classes-structs-and-interfaces#names-of-common-types
# - Constructors, Properties, Events, Methods
# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-type-members
dotnet_naming_symbols.element_group.applicable_kinds = namespace, class, enum, struct, delegate, event, method, property
dotnet_naming_rule.element_rule.symbols = element_group
dotnet_naming_rule.element_rule.style = pascal_case_style
dotnet_naming_rule.element_rule.severity = warning
# Interfaces use PascalCase and are prefixed with uppercase 'I'
# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-classes-structs-and-interfaces
dotnet_naming_symbols.interface_group.applicable_kinds = interface
dotnet_naming_rule.interface_rule.symbols = interface_group
dotnet_naming_rule.interface_rule.style = prefix_interface_with_i_style
dotnet_naming_rule.interface_rule.severity = warning
# Generics Type Parameters use PascalCase and are prefixed with uppercase 'T'
# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-classes-structs-and-interfaces
dotnet_naming_symbols.type_parameter_group.applicable_kinds = type_parameter
dotnet_naming_rule.type_parameter_rule.symbols = type_parameter_group
dotnet_naming_rule.type_parameter_rule.style = prefix_type_parameters_with_t_style
dotnet_naming_rule.type_parameter_rule.severity = warning
# Function parameters use camelCase
# https://docs.microsoft.com/dotnet/standard/design-guidelines/naming-parameters
dotnet_naming_symbols.parameters_group.applicable_kinds = parameter
dotnet_naming_rule.parameters_rule.symbols = parameters_group
dotnet_naming_rule.parameters_rule.style = camel_case_style
dotnet_naming_rule.parameters_rule.severity = warning
##########################################
# License
##########################################
# The following applies as to the .editorconfig file ONLY, and is
# included below for reference, per the requirements of the license
# corresponding to this .editorconfig file.
# See: https://github.com/RehanSaeed/EditorConfig
#
# MIT License
#
# Copyright (c) 2017-2019 Muhammad Rehan Saeed
# Copyright (c) 2019 Henry Gabryjelski
#
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the
# Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute,
# sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject
# to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
##########################################

6
.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,6 @@
version: 2
updates:
- package-ecosystem: "github-actions" # search for actions - there are other options available
directory: "/" # search in .github/workflows under root `/`
schedule:
interval: "weekly" # check for action update every week

20
.github/workflows/dotnetcore.yml vendored Normal file
View File

@@ -0,0 +1,20 @@
name: SharpCompress
on: [push, pull_request]
jobs:
build:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [windows-latest, ubuntu-latest]
steps:
- uses: actions/checkout@v3
- uses: actions/setup-dotnet@v2
with:
dotnet-version: 6.0.x
- run: dotnet run -p build/build.csproj
- uses: actions/upload-artifact@v3
with:
name: ${{ matrix.os }}-sharpcompress.nupkg
path: artifacts/*

2
.gitignore vendored
View File

@@ -17,3 +17,5 @@ tools
.idea/
.DS_Store
*.snupkg
/tests/TestArchives/6d23a38c-f064-4ef1-ad89-b942396f53b9/Scratch

View File

@@ -19,7 +19,6 @@
| Tar.XZ | LZMA2 | Decompress | TarArchive | TarReader | TarWriter (3) |
| GZip (single file) | DEFLATE | Both | GZipArchive | GZipReader | GZipWriter |
| 7Zip (4) | LZMA, LZMA2, BZip2, PPMd, BCJ, BCJ2, Deflate | Decompress | SevenZipArchive | N/A | N/A |
| LZip (single file) (5) | LZip (LZMA) | Both | LZipArchive | LZipReader | LZipWriter |
1. SOLID Rars are only supported in the RarReader API.
2. Zip format supports pkware and WinzipAES encryption. However, encrypted LZMA is not supported. Zip64 reading/writing is supported but only with seekable streams as the Zip spec doesn't support Zip64 data in post data descriptors. Deflate64 is only supported for reading.

View File

@@ -1,14 +1,11 @@
# SharpCompress
SharpCompress is a compression library in pure C# for .NET 3.5, 4.5, .NET Standard 1.0, 1.3 that can unrar, un7zip, unzip, untar unbzip2 and ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip are implemented.
SharpCompress is a compression library in pure C# for .NET Standard 2.0, 2.1, .NET Core 3.1 and .NET 5.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).
AppVeyor Build -
[![Build status](https://ci.appveyor.com/api/projects/status/voxg971oemmvxh1e/branch/master?svg=true)](https://ci.appveyor.com/project/adamhathcock/sharpcompress/branch/master)
Circle CI Build -
[![CircleCI](https://circleci.com/gh/adamhathcock/sharpcompress.svg?style=svg)](https://circleci.com/gh/adamhathcock/sharpcompress)
GitHub Actions Build -
[![GitHubActions](https://github.com/adamhathcock/sharpcompress/workflows/SharpCompress/badge.svg)](https://circleci.com/gh/adamhathcock/sharpcompress)
## Need Help?
@@ -185,6 +182,8 @@ I'm always looking for help or ideas. Please submit code or email with ideas. Un
XZ implementation based on: https://github.com/sambott/XZ.NET by @sambott
XZ BCJ filters support contributed by Louis-Michel Bergeron, on behalf of aDolus Technology Inc. - 2022
7Zip implementation based on: https://code.google.com/p/managed-lzma/
LICENSE

View File

@@ -13,6 +13,8 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SharpCompress", "src\SharpC
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SharpCompress.Test", "tests\SharpCompress.Test\SharpCompress.Test.csproj", "{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "build", "build\build.csproj", "{D4D613CB-5E94-47FB-85BE-B8423D20C545}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -27,6 +29,10 @@ Global
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}.Debug|Any CPU.Build.0 = Debug|Any CPU
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}.Release|Any CPU.ActiveCfg = Release|Any CPU
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}.Release|Any CPU.Build.0 = Release|Any CPU
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Debug|Any CPU.Build.0 = Debug|Any CPU
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Release|Any CPU.ActiveCfg = Release|Any CPU
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE

View File

@@ -122,7 +122,11 @@
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpKeepExistingMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpPlaceEmbeddedOnSameLineMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpRenamePlacementToArrangementMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpUseContinuousIndentInsideBracesMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EAddAccessorOwnerDeclarationBracesMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002ECSharpPlaceAttributeOnSameLineMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateBlankLinesAroundFieldToBlankLinesAroundProperty/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateThisQualifierSettings/@EntryIndexedValue">True</s:Boolean></wpf:ResourceDictionary>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateThisQualifierSettings/@EntryIndexedValue">True</s:Boolean>
<s:String x:Key="/Default/Environment/UnitTesting/UnitTestSessionStore/Sessions/=6af8f80e_002D9fdd_002D4223_002D8e02_002D473db916f9b2/@EntryIndexedValue">&lt;SessionState ContinuousTestingIsOn="False" ContinuousTestingMode="0" FrameworkVersion="{x:Null}" IsLocked="False" Name="All tests from Solution" PlatformMonoPreference="{x:Null}" PlatformType="{x:Null}" xmlns="urn:schemas-jetbrains-com:jetbrains-ut-session" xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"&gt;
&lt;Solution /&gt;
&lt;/SessionState&gt;</s:String></wpf:ResourceDictionary>

View File

@@ -1,8 +1,8 @@
# SharpCompress Usage
## Stream Rules
## Stream Rules (changed with 0.21)
When dealing with Streams, the rule should be that you don't close a stream you didn't create. This, in effect, should mean you should always put a Stream in a using block to dispose it.
When dealing with Streams, the rule should be that you don't close a stream you didn't create. This, in effect, should mean you should always put a Stream in a using block to dispose it.
However, the .NET Framework often has classes that will dispose streams by default to make things "easy" like the following:
@@ -23,13 +23,12 @@ using (var reader = new StreamReader(fileStream))
}
```
To deal with the "correct" rules as well as the expectations of users, I've decided on this:
* When writing, leave streams open.
* When reading, close streams
To deal with the "correct" rules as well as the expectations of users, I've decided to always close wrapped streams as of 0.21.
To be explicit though, consider always using the overloads that use `ReaderOptions` or `WriterOptions` and explicitly set `LeaveStreamOpen` the way you want.
If using Compression Stream classes directly and you don't want the wrapped stream to be closed. Use the `NonDisposingStream` as a wrapped to prevent the stream being disposed. The change in 0.21 simplified a lot even though the usage is a bit more convoluted.
## Samples
Also, look over the tests for more thorough [examples](https://github.com/adamhathcock/sharpcompress/tree/master/tests/SharpCompress.Test)
@@ -128,3 +127,20 @@ using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, new WriterOption
writer.WriteAll("D:\\temp", "*", SearchOption.AllDirectories);
}
```
### Extract zip which has non-utf8 encoded filename(cp932)
```C#
var opts = new SharpCompress.Readers.ReaderOptions();
var encoding = Encoding.GetEncoding(932);
opts.ArchiveEncoding = new SharpCompress.Common.ArchiveEncoding();
opts.ArchiveEncoding.CustomDecoder = (data, x, y) =>
{
return encoding.GetString(data);
};
var tr = SharpCompress.Archives.Zip.ZipArchive.Open("test.zip", opts);
foreach(var entry in tr.Entries)
{
Console.WriteLine($"{entry.Key}");
}
```

View File

@@ -1,20 +0,0 @@
version: '{build}'
image: Visual Studio 2017
pull_requests:
do_not_increment_build_number: true
branches:
only:
- master
nuget:
disable_publish_on_pr: true
build_script:
- ps: .\build.ps1
test: off
artifacts:
- path: src\SharpCompress\bin\Release\*.nupkg

View File

@@ -1,89 +0,0 @@
var target = Argument("target", "Default");
var tag = Argument("tag", "cake");
Task("Restore")
.Does(() =>
{
DotNetCoreRestore(".");
});
Task("Build")
.IsDependentOn("Restore")
.Does(() =>
{
if (IsRunningOnWindows())
{
MSBuild("./sharpcompress.sln", c =>
{
c.SetConfiguration("Release")
.SetVerbosity(Verbosity.Minimal)
.UseToolVersion(MSBuildToolVersion.VS2017);
});
}
else
{
var settings = new DotNetCoreBuildSettings
{
Framework = "netstandard1.0",
Configuration = "Release",
NoRestore = true
};
DotNetCoreBuild("./src/SharpCompress/SharpCompress.csproj", settings);
settings.Framework = "netstandard1.3";
DotNetCoreBuild("./src/SharpCompress/SharpCompress.csproj", settings);
settings.Framework = "netstandard2.0";
DotNetCoreBuild("./src/SharpCompress/SharpCompress.csproj", settings);
}
});
Task("Test")
.IsDependentOn("Build")
.Does(() =>
{
var files = GetFiles("tests/**/*.csproj");
foreach(var file in files)
{
var settings = new DotNetCoreTestSettings
{
Configuration = "Release",
Framework = "netcoreapp2.0"
};
DotNetCoreTest(file.ToString(), settings);
}
});
Task("Pack")
.IsDependentOn("Build")
.Does(() =>
{
if (IsRunningOnWindows())
{
MSBuild("src/SharpCompress/SharpCompress.csproj", c => c
.SetConfiguration("Release")
.SetVerbosity(Verbosity.Minimal)
.UseToolVersion(MSBuildToolVersion.VS2017)
.WithProperty("NoBuild", "true")
.WithTarget("Pack"));
}
else
{
Information("Skipping Pack as this is not Windows");
}
});
Task("Default")
.IsDependentOn("Restore")
.IsDependentOn("Build")
.IsDependentOn("Test")
.IsDependentOn("Pack");
Task("RunTests")
.IsDependentOn("Restore")
.IsDependentOn("Build")
.IsDependentOn("Test");
RunTarget(target);

228
build.ps1
View File

@@ -1,228 +0,0 @@
##########################################################################
# This is the Cake bootstrapper script for PowerShell.
# This file was downloaded from https://github.com/cake-build/resources
# Feel free to change this file to fit your needs.
##########################################################################
<#
.SYNOPSIS
This is a Powershell script to bootstrap a Cake build.
.DESCRIPTION
This Powershell script will download NuGet if missing, restore NuGet tools (including Cake)
and execute your Cake build script with the parameters you provide.
.PARAMETER Script
The build script to execute.
.PARAMETER Target
The build script target to run.
.PARAMETER Configuration
The build configuration to use.
.PARAMETER Verbosity
Specifies the amount of information to be displayed.
.PARAMETER Experimental
Tells Cake to use the latest Roslyn release.
.PARAMETER WhatIf
Performs a dry run of the build script.
No tasks will be executed.
.PARAMETER Mono
Tells Cake to use the Mono scripting engine.
.PARAMETER SkipToolPackageRestore
Skips restoring of packages.
.PARAMETER ScriptArgs
Remaining arguments are added here.
.LINK
http://cakebuild.net
#>
[CmdletBinding()]
Param(
[string]$Script = "build.cake",
[string]$Target = "Default",
[ValidateSet("Release", "Debug")]
[string]$Configuration = "Release",
[ValidateSet("Quiet", "Minimal", "Normal", "Verbose", "Diagnostic")]
[string]$Verbosity = "Verbose",
[switch]$Experimental,
[Alias("DryRun","Noop")]
[switch]$WhatIf,
[switch]$Mono,
[switch]$SkipToolPackageRestore,
[Parameter(Position=0,Mandatory=$false,ValueFromRemainingArguments=$true)]
[string[]]$ScriptArgs
)
[Reflection.Assembly]::LoadWithPartialName("System.Security") | Out-Null
function MD5HashFile([string] $filePath)
{
if ([string]::IsNullOrEmpty($filePath) -or !(Test-Path $filePath -PathType Leaf))
{
return $null
}
[System.IO.Stream] $file = $null;
[System.Security.Cryptography.MD5] $md5 = $null;
try
{
$md5 = [System.Security.Cryptography.MD5]::Create()
$file = [System.IO.File]::OpenRead($filePath)
return [System.BitConverter]::ToString($md5.ComputeHash($file))
}
finally
{
if ($file -ne $null)
{
$file.Dispose()
}
}
}
Write-Host "Preparing to run build script..."
if(!$PSScriptRoot){
$PSScriptRoot = Split-Path $MyInvocation.MyCommand.Path -Parent
}
$TOOLS_DIR = Join-Path $PSScriptRoot "tools"
$ADDINS_DIR = Join-Path $TOOLS_DIR "addins"
$MODULES_DIR = Join-Path $TOOLS_DIR "modules"
$NUGET_EXE = Join-Path $TOOLS_DIR "nuget.exe"
$CAKE_EXE = Join-Path $TOOLS_DIR "Cake/Cake.exe"
$NUGET_URL = "https://dist.nuget.org/win-x86-commandline/latest/nuget.exe"
$PACKAGES_CONFIG = Join-Path $TOOLS_DIR "packages.config"
$PACKAGES_CONFIG_MD5 = Join-Path $TOOLS_DIR "packages.config.md5sum"
$ADDINS_PACKAGES_CONFIG = Join-Path $ADDINS_DIR "packages.config"
$MODULES_PACKAGES_CONFIG = Join-Path $MODULES_DIR "packages.config"
# Should we use mono?
$UseMono = "";
if($Mono.IsPresent) {
Write-Verbose -Message "Using the Mono based scripting engine."
$UseMono = "-mono"
}
# Should we use the new Roslyn?
$UseExperimental = "";
if($Experimental.IsPresent -and !($Mono.IsPresent)) {
Write-Verbose -Message "Using experimental version of Roslyn."
$UseExperimental = "-experimental"
}
# Is this a dry run?
$UseDryRun = "";
if($WhatIf.IsPresent) {
$UseDryRun = "-dryrun"
}
# Make sure tools folder exists
if ((Test-Path $PSScriptRoot) -and !(Test-Path $TOOLS_DIR)) {
Write-Verbose -Message "Creating tools directory..."
New-Item -Path $TOOLS_DIR -Type directory | out-null
}
# Make sure that packages.config exist.
if (!(Test-Path $PACKAGES_CONFIG)) {
Write-Verbose -Message "Downloading packages.config..."
try { (New-Object System.Net.WebClient).DownloadFile("http://cakebuild.net/download/bootstrapper/packages", $PACKAGES_CONFIG) } catch {
Throw "Could not download packages.config."
}
}
# Try find NuGet.exe in path if not exists
if (!(Test-Path $NUGET_EXE)) {
Write-Verbose -Message "Trying to find nuget.exe in PATH..."
$existingPaths = $Env:Path -Split ';' | Where-Object { (![string]::IsNullOrEmpty($_)) -and (Test-Path $_ -PathType Container) }
$NUGET_EXE_IN_PATH = Get-ChildItem -Path $existingPaths -Filter "nuget.exe" | Select -First 1
if ($NUGET_EXE_IN_PATH -ne $null -and (Test-Path $NUGET_EXE_IN_PATH.FullName)) {
Write-Verbose -Message "Found in PATH at $($NUGET_EXE_IN_PATH.FullName)."
$NUGET_EXE = $NUGET_EXE_IN_PATH.FullName
}
}
# Try download NuGet.exe if not exists
if (!(Test-Path $NUGET_EXE)) {
Write-Verbose -Message "Downloading NuGet.exe..."
try {
(New-Object System.Net.WebClient).DownloadFile($NUGET_URL, $NUGET_EXE)
} catch {
Throw "Could not download NuGet.exe."
}
}
# Save nuget.exe path to environment to be available to child processed
$ENV:NUGET_EXE = $NUGET_EXE
# Restore tools from NuGet?
if(-Not $SkipToolPackageRestore.IsPresent) {
Push-Location
Set-Location $TOOLS_DIR
# Check for changes in packages.config and remove installed tools if true.
[string] $md5Hash = MD5HashFile($PACKAGES_CONFIG)
if((!(Test-Path $PACKAGES_CONFIG_MD5)) -Or
($md5Hash -ne (Get-Content $PACKAGES_CONFIG_MD5 ))) {
Write-Verbose -Message "Missing or changed package.config hash..."
Remove-Item * -Recurse -Exclude packages.config,nuget.exe
}
Write-Verbose -Message "Restoring tools from NuGet..."
$NuGetOutput = Invoke-Expression "&`"$NUGET_EXE`" install -ExcludeVersion -OutputDirectory `"$TOOLS_DIR`""
if ($LASTEXITCODE -ne 0) {
Throw "An error occured while restoring NuGet tools."
}
else
{
$md5Hash | Out-File $PACKAGES_CONFIG_MD5 -Encoding "ASCII"
}
Write-Verbose -Message ($NuGetOutput | out-string)
Pop-Location
}
# Restore addins from NuGet
if (Test-Path $ADDINS_PACKAGES_CONFIG) {
Push-Location
Set-Location $ADDINS_DIR
Write-Verbose -Message "Restoring addins from NuGet..."
$NuGetOutput = Invoke-Expression "&`"$NUGET_EXE`" install -ExcludeVersion -OutputDirectory `"$ADDINS_DIR`""
if ($LASTEXITCODE -ne 0) {
Throw "An error occured while restoring NuGet addins."
}
Write-Verbose -Message ($NuGetOutput | out-string)
Pop-Location
}
# Restore modules from NuGet
if (Test-Path $MODULES_PACKAGES_CONFIG) {
Push-Location
Set-Location $MODULES_DIR
Write-Verbose -Message "Restoring modules from NuGet..."
$NuGetOutput = Invoke-Expression "&`"$NUGET_EXE`" install -ExcludeVersion -OutputDirectory `"$MODULES_DIR`""
if ($LASTEXITCODE -ne 0) {
Throw "An error occured while restoring NuGet modules."
}
Write-Verbose -Message ($NuGetOutput | out-string)
Pop-Location
}
# Make sure that Cake has been installed.
if (!(Test-Path $CAKE_EXE)) {
Throw "Could not find Cake.exe at $CAKE_EXE"
}
# Start Cake
Write-Host "Running build script..."
Invoke-Expression "& `"$CAKE_EXE`" `"$Script`" -target=`"$Target`" -configuration=`"$Configuration`" -verbosity=`"$Verbosity`" $UseMono $UseDryRun $UseExperimental $ScriptArgs"
exit $LASTEXITCODE

View File

@@ -1,42 +0,0 @@
#!/usr/bin/env bash
##########################################################################
# This is the Cake bootstrapper script for Linux and OS X.
# This file was downloaded from https://github.com/cake-build/resources
# Feel free to change this file to fit your needs.
##########################################################################
# Define directories.
SCRIPT_DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
TOOLS_DIR=$SCRIPT_DIR/tools
CAKE_VERSION=0.27.1
CAKE_DLL=$TOOLS_DIR/Cake.CoreCLR.$CAKE_VERSION/Cake.dll
# Make sure the tools folder exist.
if [ ! -d "$TOOLS_DIR" ]; then
mkdir "$TOOLS_DIR"
fi
###########################################################################
# INSTALL CAKE
###########################################################################
if [ ! -f "$CAKE_DLL" ]; then
curl -Lsfo Cake.CoreCLR.zip "https://www.nuget.org/api/v2/package/Cake.CoreCLR/$CAKE_VERSION" && unzip -q Cake.CoreCLR.zip -d "$TOOLS_DIR/Cake.CoreCLR.$CAKE_VERSION" && rm -f Cake.CoreCLR.zip
if [ $? -ne 0 ]; then
echo "An error occured while installing Cake."
exit 1
fi
fi
# Make sure that Cake has been installed.
if [ ! -f "$CAKE_DLL" ]; then
echo "Could not find Cake.exe at '$CAKE_DLL'."
exit 1
fi
###########################################################################
# RUN BUILD SCRIPT
###########################################################################
# Start Cake
exec dotnet "$CAKE_DLL" "$@"

83
build/Program.cs Normal file
View File

@@ -0,0 +1,83 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Runtime.InteropServices;
using GlobExpressions;
using static Bullseye.Targets;
using static SimpleExec.Command;
const string Clean = "clean";
const string Format = "format";
const string Build = "build";
const string Test = "test";
const string Publish = "publish";
Target(Clean,
ForEach("**/bin", "**/obj"),
dir =>
{
IEnumerable<string> GetDirectories(string d)
{
return Glob.Directories(".", d);
}
void RemoveDirectory(string d)
{
if (Directory.Exists(d))
{
Console.WriteLine(d);
Directory.Delete(d, true);
}
}
foreach (var d in GetDirectories(dir))
{
RemoveDirectory(d);
}
});
Target(Format,
() =>
{
Run("dotnet", "tool restore");
Run("dotnet", "format --check");
});
Target(Build,
DependsOn(Format),
framework =>
{
Run("dotnet", "build src/SharpCompress/SharpCompress.csproj -c Release");
});
Target(Test,
DependsOn(Build),
ForEach("net6.0", "net461"),
framework =>
{
IEnumerable<string> GetFiles(string d)
{
return Glob.Files(".", d);
}
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows) && framework == "net461")
{
return;
}
foreach (var file in GetFiles("**/*.Test.csproj"))
{
Run("dotnet", $"test {file} -c Release -f {framework}");
}
});
Target(Publish,
DependsOn(Test),
() =>
{
Run("dotnet", "pack src/SharpCompress/SharpCompress.csproj -c Release -o artifacts/");
});
Target("default", DependsOn(Publish), () => Console.WriteLine("Done!"));
await RunTargetsAndExitAsync(args);

14
build/build.csproj Normal file
View File

@@ -0,0 +1,14 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net6.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Bullseye" Version="4.0.0" />
<PackageReference Include="Glob" Version="1.1.9" />
<PackageReference Include="SimpleExec" Version="10.0.0" />
</ItemGroup>
</Project>

6
global.json Normal file
View File

@@ -0,0 +1,6 @@
{
"sdk": {
"version": "6.0.200",
"rollForward": "latestFeature"
}
}

View File

@@ -0,0 +1,420 @@
// Copyright (c) Six Labors.
// Licensed under the Apache License, Version 2.0.
#if !NETSTANDARD2_0 && !NETSTANDARD2_1 && !NETFRAMEWORK
#define SUPPORTS_RUNTIME_INTRINSICS
#define SUPPORTS_HOTPATH
#endif
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
#if SUPPORTS_RUNTIME_INTRINSICS
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
#endif
#pragma warning disable IDE0007 // Use implicit type
namespace SharpCompress.Algorithms
{
/// <summary>
/// Calculates the 32 bit Adler checksum of a given buffer according to
/// RFC 1950. ZLIB Compressed Data Format Specification version 3.3)
/// </summary>
internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/blob/main/src/ImageSharp/Compression/Zlib/Adler32.cs
{
/// <summary>
/// Global inlining options. Helps temporarily disable inlining for better profiler output.
/// </summary>
private static class InliningOptions // From https://github.com/SixLabors/ImageSharp/blob/main/src/ImageSharp/Common/Helpers/InliningOptions.cs
{
/// <summary>
/// <see cref="MethodImplOptions.AggressiveInlining"/> regardless of the build conditions.
/// </summary>
public const MethodImplOptions AlwaysInline = MethodImplOptions.AggressiveInlining;
#if PROFILING
public const MethodImplOptions HotPath = MethodImplOptions.NoInlining;
public const MethodImplOptions ShortMethod = MethodImplOptions.NoInlining;
#else
#if SUPPORTS_HOTPATH
public const MethodImplOptions HotPath = MethodImplOptions.AggressiveOptimization;
#else
public const MethodImplOptions HotPath = MethodImplOptions.AggressiveInlining;
#endif
public const MethodImplOptions ShortMethod = MethodImplOptions.AggressiveInlining;
#endif
public const MethodImplOptions ColdPath = MethodImplOptions.NoInlining;
}
#if SUPPORTS_RUNTIME_INTRINSICS
/// <summary>
/// Provides optimized static methods for trigonometric, logarithmic,
/// and other common mathematical functions.
/// </summary>
private static class Numerics // From https://github.com/SixLabors/ImageSharp/blob/main/src/ImageSharp/Common/Helpers/Numerics.cs
{
/// <summary>
/// Reduces elements of the vector into one sum.
/// </summary>
/// <param name="accumulator">The accumulator to reduce.</param>
/// <returns>The sum of all elements.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int ReduceSum(Vector256<int> accumulator)
{
// Add upper lane to lower lane.
Vector128<int> vsum = Sse2.Add(accumulator.GetLower(), accumulator.GetUpper());
// Add odd to even.
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_11_01_01));
// Add high to low.
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_10_11_10));
return Sse2.ConvertToInt32(vsum);
}
/// <summary>
/// Reduces even elements of the vector into one sum.
/// </summary>
/// <param name="accumulator">The accumulator to reduce.</param>
/// <returns>The sum of even elements.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int EvenReduceSum(Vector256<int> accumulator)
{
Vector128<int> vsum = Sse2.Add(accumulator.GetLower(), accumulator.GetUpper()); // add upper lane to lower lane
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_10_11_10)); // add high to low
// Vector128<int>.ToScalar() isn't optimized pre-net5.0 https://github.com/dotnet/runtime/pull/37882
return Sse2.ConvertToInt32(vsum);
}
}
#endif
/// <summary>
/// The default initial seed value of a Adler32 checksum calculation.
/// </summary>
public const uint SeedValue = 1U;
// Largest prime smaller than 65536
private const uint BASE = 65521;
// NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1
private const uint NMAX = 5552;
#if SUPPORTS_RUNTIME_INTRINSICS
private const int MinBufferSize = 64;
private const int BlockSize = 1 << 5;
// The C# compiler emits this as a compile-time constant embedded in the PE file.
private static ReadOnlySpan<byte> Tap1Tap2 => new byte[]
{
32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17, // tap1
16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1 // tap2
};
#endif
/// <summary>
/// Calculates the Adler32 checksum with the bytes taken from the span.
/// </summary>
/// <param name="buffer">The readonly span of bytes.</param>
/// <returns>The <see cref="uint"/>.</returns>
[MethodImpl(InliningOptions.ShortMethod)]
public static uint Calculate(ReadOnlySpan<byte> buffer)
=> Calculate(SeedValue, buffer);
/// <summary>
/// Calculates the Adler32 checksum with the bytes taken from the span and seed.
/// </summary>
/// <param name="adler">The input Adler32 value.</param>
/// <param name="buffer">The readonly span of bytes.</param>
/// <returns>The <see cref="uint"/>.</returns>
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
public static uint Calculate(uint adler, ReadOnlySpan<byte> buffer)
{
if (buffer.IsEmpty)
{
return adler;
}
#if SUPPORTS_RUNTIME_INTRINSICS
if (Avx2.IsSupported && buffer.Length >= MinBufferSize)
{
return CalculateAvx2(adler, buffer);
}
if (Ssse3.IsSupported && buffer.Length >= MinBufferSize)
{
return CalculateSse(adler, buffer);
}
return CalculateScalar(adler, buffer);
#else
return CalculateScalar(adler, buffer);
#endif
}
// Based on https://github.com/chromium/chromium/blob/master/third_party/zlib/adler32_simd.c
#if SUPPORTS_RUNTIME_INTRINSICS
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
private static unsafe uint CalculateSse(uint adler, ReadOnlySpan<byte> buffer)
{
uint s1 = adler & 0xFFFF;
uint s2 = (adler >> 16) & 0xFFFF;
// Process the data in blocks.
uint length = (uint)buffer.Length;
uint blocks = length / BlockSize;
length -= blocks * BlockSize;
fixed (byte* bufferPtr = &MemoryMarshal.GetReference(buffer))
{
fixed (byte* tapPtr = &MemoryMarshal.GetReference(Tap1Tap2))
{
byte* localBufferPtr = bufferPtr;
// _mm_setr_epi8 on x86
Vector128<sbyte> tap1 = Sse2.LoadVector128((sbyte*)tapPtr);
Vector128<sbyte> tap2 = Sse2.LoadVector128((sbyte*)(tapPtr + 0x10));
Vector128<byte> zero = Vector128<byte>.Zero;
var ones = Vector128.Create((short)1);
while (blocks > 0)
{
uint n = NMAX / BlockSize; /* The NMAX constraint. */
if (n > blocks)
{
n = blocks;
}
blocks -= n;
// Process n blocks of data. At most NMAX data bytes can be
// processed before s2 must be reduced modulo BASE.
Vector128<uint> v_ps = Vector128.CreateScalar(s1 * n);
Vector128<uint> v_s2 = Vector128.CreateScalar(s2);
Vector128<uint> v_s1 = Vector128<uint>.Zero;
do
{
// Load 32 input bytes.
Vector128<byte> bytes1 = Sse3.LoadDquVector128(localBufferPtr);
Vector128<byte> bytes2 = Sse3.LoadDquVector128(localBufferPtr + 0x10);
// Add previous block byte sum to v_ps.
v_ps = Sse2.Add(v_ps, v_s1);
// Horizontally add the bytes for s1, multiply-adds the
// bytes by [ 32, 31, 30, ... ] for s2.
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes1, zero).AsUInt32());
Vector128<short> mad1 = Ssse3.MultiplyAddAdjacent(bytes1, tap1);
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad1, ones).AsUInt32());
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes2, zero).AsUInt32());
Vector128<short> mad2 = Ssse3.MultiplyAddAdjacent(bytes2, tap2);
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad2, ones).AsUInt32());
localBufferPtr += BlockSize;
}
while (--n > 0);
v_s2 = Sse2.Add(v_s2, Sse2.ShiftLeftLogical(v_ps, 5));
// Sum epi32 ints v_s1(s2) and accumulate in s1(s2).
const byte S2301 = 0b1011_0001; // A B C D -> B A D C
const byte S1032 = 0b0100_1110; // A B C D -> C D A B
v_s1 = Sse2.Add(v_s1, Sse2.Shuffle(v_s1, S1032));
s1 += v_s1.ToScalar();
v_s2 = Sse2.Add(v_s2, Sse2.Shuffle(v_s2, S2301));
v_s2 = Sse2.Add(v_s2, Sse2.Shuffle(v_s2, S1032));
s2 = v_s2.ToScalar();
// Reduce.
s1 %= BASE;
s2 %= BASE;
}
if (length > 0)
{
HandleLeftOver(localBufferPtr, length, ref s1, ref s2);
}
return s1 | (s2 << 16);
}
}
}
// Based on: https://github.com/zlib-ng/zlib-ng/blob/develop/arch/x86/adler32_avx2.c
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
public static unsafe uint CalculateAvx2(uint adler, ReadOnlySpan<byte> buffer)
{
uint s1 = adler & 0xFFFF;
uint s2 = (adler >> 16) & 0xFFFF;
uint length = (uint)buffer.Length;
fixed (byte* bufferPtr = &MemoryMarshal.GetReference(buffer))
{
byte* localBufferPtr = bufferPtr;
Vector256<byte> zero = Vector256<byte>.Zero;
var dot3v = Vector256.Create((short)1);
var dot2v = Vector256.Create(32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1);
// Process n blocks of data. At most NMAX data bytes can be
// processed before s2 must be reduced modulo BASE.
var vs1 = Vector256.CreateScalar(s1);
var vs2 = Vector256.CreateScalar(s2);
while (length >= 32)
{
int k = length < NMAX ? (int)length : (int)NMAX;
k -= k % 32;
length -= (uint)k;
Vector256<uint> vs10 = vs1;
Vector256<uint> vs3 = Vector256<uint>.Zero;
while (k >= 32)
{
// Load 32 input bytes.
Vector256<byte> block = Avx.LoadVector256(localBufferPtr);
// Sum of abs diff, resulting in 2 x int32's
Vector256<ushort> vs1sad = Avx2.SumAbsoluteDifferences(block, zero);
vs1 = Avx2.Add(vs1, vs1sad.AsUInt32());
vs3 = Avx2.Add(vs3, vs10);
// sum 32 uint8s to 16 shorts.
Vector256<short> vshortsum2 = Avx2.MultiplyAddAdjacent(block, dot2v);
// sum 16 shorts to 8 uint32s.
Vector256<int> vsum2 = Avx2.MultiplyAddAdjacent(vshortsum2, dot3v);
vs2 = Avx2.Add(vsum2.AsUInt32(), vs2);
vs10 = vs1;
localBufferPtr += BlockSize;
k -= 32;
}
// Defer the multiplication with 32 to outside of the loop.
vs3 = Avx2.ShiftLeftLogical(vs3, 5);
vs2 = Avx2.Add(vs2, vs3);
s1 = (uint)Numerics.EvenReduceSum(vs1.AsInt32());
s2 = (uint)Numerics.ReduceSum(vs2.AsInt32());
s1 %= BASE;
s2 %= BASE;
vs1 = Vector256.CreateScalar(s1);
vs2 = Vector256.CreateScalar(s2);
}
if (length > 0)
{
HandleLeftOver(localBufferPtr, length, ref s1, ref s2);
}
return s1 | (s2 << 16);
}
}
private static unsafe void HandleLeftOver(byte* localBufferPtr, uint length, ref uint s1, ref uint s2)
{
if (length >= 16)
{
s2 += s1 += localBufferPtr[0];
s2 += s1 += localBufferPtr[1];
s2 += s1 += localBufferPtr[2];
s2 += s1 += localBufferPtr[3];
s2 += s1 += localBufferPtr[4];
s2 += s1 += localBufferPtr[5];
s2 += s1 += localBufferPtr[6];
s2 += s1 += localBufferPtr[7];
s2 += s1 += localBufferPtr[8];
s2 += s1 += localBufferPtr[9];
s2 += s1 += localBufferPtr[10];
s2 += s1 += localBufferPtr[11];
s2 += s1 += localBufferPtr[12];
s2 += s1 += localBufferPtr[13];
s2 += s1 += localBufferPtr[14];
s2 += s1 += localBufferPtr[15];
localBufferPtr += 16;
length -= 16;
}
while (length-- > 0)
{
s2 += s1 += *localBufferPtr++;
}
if (s1 >= BASE)
{
s1 -= BASE;
}
s2 %= BASE;
}
#endif
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
private static unsafe uint CalculateScalar(uint adler, ReadOnlySpan<byte> buffer)
{
uint s1 = adler & 0xFFFF;
uint s2 = (adler >> 16) & 0xFFFF;
uint k;
fixed (byte* bufferPtr = buffer)
{
var localBufferPtr = bufferPtr;
uint length = (uint)buffer.Length;
while (length > 0)
{
k = length < NMAX ? length : NMAX;
length -= k;
while (k >= 16)
{
s2 += s1 += localBufferPtr[0];
s2 += s1 += localBufferPtr[1];
s2 += s1 += localBufferPtr[2];
s2 += s1 += localBufferPtr[3];
s2 += s1 += localBufferPtr[4];
s2 += s1 += localBufferPtr[5];
s2 += s1 += localBufferPtr[6];
s2 += s1 += localBufferPtr[7];
s2 += s1 += localBufferPtr[8];
s2 += s1 += localBufferPtr[9];
s2 += s1 += localBufferPtr[10];
s2 += s1 += localBufferPtr[11];
s2 += s1 += localBufferPtr[12];
s2 += s1 += localBufferPtr[13];
s2 += s1 += localBufferPtr[14];
s2 += s1 += localBufferPtr[15];
localBufferPtr += 16;
k -= 16;
}
while (k-- > 0)
{
s2 += s1 += *localBufferPtr++;
}
s1 %= BASE;
s2 %= BASE;
}
return (s2 << 16) | s1;
}
}
}
}

View File

@@ -1,8 +1,9 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives
@@ -14,48 +15,34 @@ namespace SharpCompress.Archives
private readonly LazyReadOnlyCollection<TVolume> lazyVolumes;
private readonly LazyReadOnlyCollection<TEntry> lazyEntries;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionBegin;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionEnd;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionBegin;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionEnd;
public event EventHandler<CompressedBytesReadEventArgs> CompressedBytesRead;
public event EventHandler<FilePartExtractionBeginEventArgs> FilePartExtractionBegin;
public event EventHandler<CompressedBytesReadEventArgs>? CompressedBytesRead;
public event EventHandler<FilePartExtractionBeginEventArgs>? FilePartExtractionBegin;
protected ReaderOptions ReaderOptions { get; }
private bool disposed;
protected SourceStream SrcStream;
#if !NO_FILE
internal AbstractArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerOptions)
internal AbstractArchive(ArchiveType type, SourceStream srcStream)
{
Type = type;
if (!fileInfo.Exists)
{
throw new ArgumentException("File does not exist: " + fileInfo.FullName);
}
ReaderOptions = readerOptions;
readerOptions.LeaveStreamOpen = false;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(fileInfo));
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
}
protected abstract IEnumerable<TVolume> LoadVolumes(FileInfo file);
#endif
internal AbstractArchive(ArchiveType type, IEnumerable<Stream> streams, ReaderOptions readerOptions)
{
Type = type;
ReaderOptions = readerOptions;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(streams.Select(CheckStreams)));
ReaderOptions = srcStream.ReaderOptions;
SrcStream = srcStream;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(SrcStream));
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
}
#nullable disable
internal AbstractArchive(ArchiveType type)
{
Type = type;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
}
#nullable enable
public ArchiveType Type { get; }
@@ -81,29 +68,29 @@ namespace SharpCompress.Archives
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveEntries across the one or many parts of the RarArchive.
/// </summary>
public virtual ICollection<TEntry> Entries { get { return lazyEntries; } }
public virtual ICollection<TEntry> Entries => lazyEntries;
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveVolumes across the one or many parts of the RarArchive.
/// </summary>
public ICollection<TVolume> Volumes { get { return lazyVolumes; } }
public ICollection<TVolume> Volumes => lazyVolumes;
/// <summary>
/// The total size of the files compressed in the archive.
/// </summary>
public virtual long TotalSize { get { return Entries.Aggregate(0L, (total, cf) => total + cf.CompressedSize); } }
public virtual long TotalSize => Entries.Aggregate(0L, (total, cf) => total + cf.CompressedSize);
/// <summary>
/// The total size of the files as uncompressed in the archive.
/// </summary>
public virtual long TotalUncompressSize { get { return Entries.Aggregate(0L, (total, cf) => total + cf.Size); } }
public virtual long TotalUncompressSize => Entries.Aggregate(0L, (total, cf) => total + cf.Size);
protected abstract IEnumerable<TVolume> LoadVolumes(IEnumerable<Stream> streams);
protected abstract IEnumerable<TVolume> LoadVolumes(SourceStream srcStream);
protected abstract IEnumerable<TEntry> LoadEntries(IEnumerable<TVolume> volumes);
IEnumerable<IArchiveEntry> IArchive.Entries { get { return Entries.Cast<IArchiveEntry>(); } }
IEnumerable<IArchiveEntry> IArchive.Entries => Entries.Cast<IArchiveEntry>();
IEnumerable<IVolume> IArchive.Volumes { get { return lazyVolumes.Cast<IVolume>(); } }
IEnumerable<IVolume> IArchive.Volumes => lazyVolumes.Cast<IVolume>();
public virtual void Dispose()
{
@@ -111,6 +98,8 @@ namespace SharpCompress.Archives
{
lazyVolumes.ForEach(v => v.Dispose());
lazyEntries.GetLoaded().Cast<Entry>().ForEach(x => x.Close());
if (SrcStream != null)
SrcStream.Dispose();
disposed = true;
}
}
@@ -123,31 +112,29 @@ namespace SharpCompress.Archives
void IExtractionListener.FireCompressedBytesRead(long currentPartCompressedBytes, long compressedReadBytes)
{
CompressedBytesRead?.Invoke(this, new CompressedBytesReadEventArgs
{
CurrentFilePartCompressedBytesRead = currentPartCompressedBytes,
CompressedBytesRead = compressedReadBytes
});
CompressedBytesRead?.Invoke(this, new CompressedBytesReadEventArgs(
currentFilePartCompressedBytesRead: currentPartCompressedBytes,
compressedBytesRead: compressedReadBytes
));
}
void IExtractionListener.FireFilePartExtractionBegin(string name, long size, long compressedSize)
{
FilePartExtractionBegin?.Invoke(this, new FilePartExtractionBeginEventArgs
{
CompressedSize = compressedSize,
Size = size,
Name = name
});
FilePartExtractionBegin?.Invoke(this, new FilePartExtractionBeginEventArgs(
compressedSize: compressedSize,
size: size,
name: name
));
}
/// <summary>
/// Use this method to extract all entries in an archive in order.
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
/// extracted sequentially for the best performance.
///
///
/// This method will load all entry information from the archive.
///
/// WARNING: this will reuse the underlying stream for the archive. Errors may
///
/// WARNING: this will reuse the underlying stream for the archive. Errors may
/// occur if this is used at the same time as other extraction methods on this instance.
/// </summary>
/// <returns></returns>
@@ -162,7 +149,7 @@ namespace SharpCompress.Archives
/// <summary>
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
/// </summary>
public virtual bool IsSolid { get { return false; } }
public virtual bool IsSolid => false;
/// <summary>
/// The archive can find all the parts of the archive needed to fully extract the archive. This forces the parsing of the entire archive.
@@ -176,4 +163,4 @@ namespace SharpCompress.Archives
}
}
}
}
}

View File

@@ -1,8 +1,9 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Writers;
@@ -12,29 +13,39 @@ namespace SharpCompress.Archives
where TEntry : IArchiveEntry
where TVolume : IVolume
{
private class RebuildPauseDisposable : IDisposable
{
private readonly AbstractWritableArchive<TEntry, TVolume> archive;
public RebuildPauseDisposable(AbstractWritableArchive<TEntry, TVolume> archive)
{
this.archive = archive;
archive.pauseRebuilding = true;
}
public void Dispose()
{
archive.pauseRebuilding = false;
archive.RebuildModifiedCollection();
}
}
private readonly List<TEntry> newEntries = new List<TEntry>();
private readonly List<TEntry> removedEntries = new List<TEntry>();
private readonly List<TEntry> modifiedEntries = new List<TEntry>();
private bool hasModifications;
private bool pauseRebuilding;
internal AbstractWritableArchive(ArchiveType type)
: base(type)
{
}
internal AbstractWritableArchive(ArchiveType type, Stream stream, ReaderOptions readerFactoryOptions)
: base(type, stream.AsEnumerable(), readerFactoryOptions)
internal AbstractWritableArchive(ArchiveType type, SourceStream srcStream)
: base(type, srcStream)
{
}
#if !NO_FILE
internal AbstractWritableArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerFactoryOptions)
: base(type, fileInfo, readerFactoryOptions)
{
}
#endif
public override ICollection<TEntry> Entries
{
get
@@ -47,8 +58,17 @@ namespace SharpCompress.Archives
}
}
public IDisposable PauseEntryRebuilding()
{
return new RebuildPauseDisposable(this);
}
private void RebuildModifiedCollection()
{
if (pauseRebuilding)
{
return;
}
hasModifications = true;
newEntries.RemoveAll(v => removedEntries.Contains(v));
modifiedEntries.Clear();
@@ -85,8 +105,7 @@ namespace SharpCompress.Archives
public TEntry AddEntry(string key, Stream source, bool closeStream,
long size = 0, DateTime? modified = null)
{
if (key.StartsWith("/")
|| key.StartsWith("\\"))
if (key.Length > 0 && key[0] is '/' or '\\')
{
key = key.Substring(1);
}
@@ -105,7 +124,7 @@ namespace SharpCompress.Archives
foreach (var path in Entries.Select(x => x.Key))
{
var p = path.Replace('/', '\\');
if (p.StartsWith("\\"))
if (p.Length > 0 && p[0] == '\\')
{
p = p.Substring(1);
}
@@ -144,4 +163,4 @@ namespace SharpCompress.Archives
modifiedEntries.Cast<Entry>().ForEach(x => x.Close());
}
}
}
}

View File

@@ -1,17 +1,19 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Archives.GZip;
using SharpCompress.Archives.Rar;
using SharpCompress.Archives.SevenZip;
using SharpCompress.Archives.Tar;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
using SharpCompress.Compressors.LZMA;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives
{
public class ArchiveFactory
public static class ArchiveFactory
{
/// <summary>
/// Opens an Archive for random access
@@ -19,80 +21,57 @@ namespace SharpCompress.Archives
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <returns></returns>
public static IArchive Open(Stream stream, ReaderOptions readerOptions = null)
public static IArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull("stream");
stream.CheckNotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
{
throw new ArgumentException("Stream should be readable and seekable");
}
readerOptions = readerOptions ?? new ReaderOptions();
if (ZipArchive.IsZipFile(stream, null))
readerOptions ??= new ReaderOptions();
ArchiveType? type;
IsArchive(stream, out type); //test and reset stream position
if (type != null)
{
stream.Seek(0, SeekOrigin.Begin);
return ZipArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
if (SevenZipArchive.IsSevenZipFile(stream))
{
stream.Seek(0, SeekOrigin.Begin);
return SevenZipArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
if (GZipArchive.IsGZipFile(stream))
{
stream.Seek(0, SeekOrigin.Begin);
return GZipArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
if (RarArchive.IsRarFile(stream, readerOptions))
{
stream.Seek(0, SeekOrigin.Begin);
return RarArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
if (TarArchive.IsTarFile(stream))
{
stream.Seek(0, SeekOrigin.Begin);
return TarArchive.Open(stream, readerOptions);
switch (type.Value)
{
case ArchiveType.Zip:
return ZipArchive.Open(stream, readerOptions);
case ArchiveType.SevenZip:
return SevenZipArchive.Open(stream, readerOptions);
case ArchiveType.GZip:
return GZipArchive.Open(stream, readerOptions);
case ArchiveType.Rar:
return RarArchive.Open(stream, readerOptions);
case ArchiveType.Tar:
return TarArchive.Open(stream, readerOptions);
}
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip, LZip");
}
public static IWritableArchive Create(ArchiveType type)
{
switch (type)
return type switch
{
case ArchiveType.Zip:
{
return ZipArchive.Create();
}
case ArchiveType.Tar:
{
return TarArchive.Create();
}
case ArchiveType.GZip:
{
return GZipArchive.Create();
}
default:
{
throw new NotSupportedException("Cannot create Archives of type: " + type);
}
}
ArchiveType.Zip => ZipArchive.Create(),
ArchiveType.Tar => TarArchive.Create(),
ArchiveType.GZip => GZipArchive.Create(),
_ => throw new NotSupportedException("Cannot create Archives of type: " + type)
};
}
#if !NO_FILE
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
public static IArchive Open(string filePath, ReaderOptions options = null)
public static IArchive Open(string filePath, ReaderOptions? options = null)
{
filePath.CheckNotNullOrEmpty("filePath");
return Open(new FileInfo(filePath), options ?? new ReaderOptions());
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), options);
}
/// <summary>
@@ -100,59 +79,218 @@ namespace SharpCompress.Archives
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
public static IArchive Open(FileInfo fileInfo, ReaderOptions options = null)
public static IArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
{
fileInfo.CheckNotNull("fileInfo");
options = options ?? new ReaderOptions();
using (var stream = fileInfo.OpenRead())
fileInfo.CheckNotNull(nameof(fileInfo));
options ??= new ReaderOptions { LeaveStreamOpen = false };
ArchiveType? type;
using (Stream stream = fileInfo.OpenRead())
{
if (ZipArchive.IsZipFile(stream, null))
IsArchive(stream, out type); //test and reset stream position
if (type != null)
{
stream.Dispose();
return ZipArchive.Open(fileInfo, options);
switch (type.Value)
{
case ArchiveType.Zip:
return ZipArchive.Open(fileInfo, options);
case ArchiveType.SevenZip:
return SevenZipArchive.Open(fileInfo, options);
case ArchiveType.GZip:
return GZipArchive.Open(fileInfo, options);
case ArchiveType.Rar:
return RarArchive.Open(fileInfo, options);
case ArchiveType.Tar:
return TarArchive.Open(fileInfo, options);
}
}
stream.Seek(0, SeekOrigin.Begin);
if (SevenZipArchive.IsSevenZipFile(stream))
{
stream.Dispose();
return SevenZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (GZipArchive.IsGZipFile(stream))
{
stream.Dispose();
return GZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (RarArchive.IsRarFile(stream, options))
{
stream.Dispose();
return RarArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (TarArchive.IsTarFile(stream))
{
stream.Dispose();
return TarArchive.Open(fileInfo, options);
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="options"></param>
public static IArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? options = null)
{
fileInfos.CheckNotNull(nameof(fileInfos));
FileInfo[] files = fileInfos.ToArray();
if (files.Length == 0)
throw new InvalidOperationException("No files to open");
FileInfo fileInfo = files[0];
if (files.Length == 1)
return Open(fileInfo, options);
fileInfo.CheckNotNull(nameof(fileInfo));
options ??= new ReaderOptions { LeaveStreamOpen = false };
ArchiveType? type;
using (Stream stream = fileInfo.OpenRead())
IsArchive(stream, out type); //test and reset stream position
if (type != null)
{
switch (type.Value)
{
case ArchiveType.Zip:
return ZipArchive.Open(files, options);
case ArchiveType.SevenZip:
return SevenZipArchive.Open(files, options);
case ArchiveType.GZip:
return GZipArchive.Open(files, options);
case ArchiveType.Rar:
return RarArchive.Open(files, options);
case ArchiveType.Tar:
return TarArchive.Open(files, options);
}
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
public static IArchive Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
{
streams.CheckNotNull(nameof(streams));
if (streams.Count() == 0)
throw new InvalidOperationException("No streams");
if (streams.Count() == 1)
return Open(streams.First(), options);
options ??= new ReaderOptions();
ArchiveType? type;
using (Stream stream = streams.First())
IsArchive(stream, out type); //test and reset stream position
if (type != null)
{
switch (type.Value)
{
case ArchiveType.Zip:
return ZipArchive.Open(streams, options);
case ArchiveType.SevenZip:
return SevenZipArchive.Open(streams, options);
case ArchiveType.GZip:
return GZipArchive.Open(streams, options);
case ArchiveType.Rar:
return RarArchive.Open(streams, options);
case ArchiveType.Tar:
return TarArchive.Open(streams, options);
}
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(string sourceArchive, string destinationDirectory,
ExtractionOptions options = null)
ExtractionOptions? options = null)
{
using (IArchive archive = Open(sourceArchive))
using IArchive archive = Open(sourceArchive);
foreach (IArchiveEntry entry in archive.Entries)
{
foreach (IArchiveEntry entry in archive.Entries)
{
entry.WriteToDirectory(destinationDirectory, options);
}
entry.WriteToDirectory(destinationDirectory, options);
}
}
public static bool IsArchive(string filePath, out ArchiveType? type)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
using (Stream s = File.OpenRead(filePath))
return IsArchive(s, out type);
}
private static bool IsArchive(Stream stream, out ArchiveType? type)
{
type = null;
stream.CheckNotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
{
throw new ArgumentException("Stream should be readable and seekable");
}
if (ZipArchive.IsZipFile(stream, null))
type = ArchiveType.Zip;
stream.Seek(0, SeekOrigin.Begin);
if (type == null)
{
if (SevenZipArchive.IsSevenZipFile(stream))
type = ArchiveType.SevenZip;
stream.Seek(0, SeekOrigin.Begin);
}
if (type == null)
{
if (GZipArchive.IsGZipFile(stream))
type = ArchiveType.GZip;
stream.Seek(0, SeekOrigin.Begin);
}
if (type == null)
{
if (RarArchive.IsRarFile(stream))
type = ArchiveType.Rar;
stream.Seek(0, SeekOrigin.Begin);
}
if (type == null)
{
if (TarArchive.IsTarFile(stream))
type = ArchiveType.Tar;
stream.Seek(0, SeekOrigin.Begin);
}
if (type == null) //test multipartzip as it could find zips in other non compressed archive types?
{
if (ZipArchive.IsZipMulti(stream)) //test the zip (last) file of a multipart zip
type = ArchiveType.Zip;
stream.Seek(0, SeekOrigin.Begin);
}
return type != null;
}
/// <summary>
/// From a passed in archive (zip, rar, 7z, 001), return all parts.
/// </summary>
/// <param name="part1"></param>
/// <returns></returns>
public static IEnumerable<string> GetFileParts(string part1)
{
part1.CheckNotNullOrEmpty(nameof(part1));
return GetFileParts(new FileInfo(part1)).Select(a => a.FullName);
}
/// <summary>
/// From a passed in archive (zip, rar, 7z, 001), return all parts.
/// </summary>
/// <param name="part1"></param>
/// <returns></returns>
public static IEnumerable<FileInfo> GetFileParts(FileInfo part1)
{
part1.CheckNotNull(nameof(part1));
yield return part1;
int i = 1;
FileInfo? part = RarArchiveVolumeFactory.GetFilePart(i++, part1);
if (part != null)
{
yield return part;
while ((part = RarArchiveVolumeFactory.GetFilePart(i++, part1)) != null) //tests split too
yield return part;
}
else
{
i = 1;
while ((part = ZipArchiveVolumeFactory.GetFilePart(i++, part1)) != null) //tests split too
yield return part;
}
}
#endif
}
}
}

View File

@@ -0,0 +1,29 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Readers;
using System.Linq;
using System.Text;
using SharpCompress.Common.Rar.Headers;
using System.Text.RegularExpressions;
namespace SharpCompress.Archives
{
internal abstract class ArchiveVolumeFactory
{
internal static FileInfo? GetFilePart(int index, FileInfo part1) //base the name on the first part
{
FileInfo? item = null;
//split 001, 002 ...
Match m = Regex.Match(part1.Name, @"^(.*\.)([0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(Path.Combine(part1.DirectoryName!, String.Concat(m.Groups[1].Value, (index + 1).ToString().PadLeft(m.Groups[2].Value.Length, '0'))));
if (item != null && item.Exists)
return item;
return null;
}
}
}

View File

@@ -1,9 +1,10 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.Common.GZip;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.GZip;
using SharpCompress.Writers;
@@ -13,16 +14,14 @@ namespace SharpCompress.Archives.GZip
{
public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
{
#if !NO_FILE
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(string filePath, ReaderOptions readerOptions = null)
public static GZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.CheckNotNullOrEmpty("filePath");
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
@@ -31,22 +30,45 @@ namespace SharpCompress.Archives.GZip
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull("fileInfo");
return new GZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
fileInfo.CheckNotNull(nameof(fileInfo));
return new GZipArchive(new SourceStream(fileInfo, i => ArchiveVolumeFactory.GetFilePart(i, fileInfo), readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? readerOptions = null)
{
fileInfos.CheckNotNull(nameof(fileInfos));
FileInfo[] files = fileInfos.ToArray();
return new GZipArchive(new SourceStream(files[0], i => i < files.Length ? files[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.CheckNotNull(nameof(streams));
Stream[] strms = streams.ToArray();
return new GZipArchive(new SourceStream(strms[0], i => i < strms.Length ? strms[i] : null, readerOptions ?? new ReaderOptions()));
}
#endif
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
public static GZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull("stream");
return new GZipArchive(stream, readerOptions ?? new ReaderOptions());
stream.CheckNotNull(nameof(stream));
return new GZipArchive(new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions()));
}
public static GZipArchive Create()
@@ -54,23 +76,22 @@ namespace SharpCompress.Archives.GZip
return new GZipArchive();
}
#if !NO_FILE
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal GZipArchive(FileInfo fileInfo, ReaderOptions options)
: base(ArchiveType.GZip, fileInfo, options)
internal GZipArchive(SourceStream srcStream)
: base(ArchiveType.Tar, srcStream)
{
}
protected override IEnumerable<GZipVolume> LoadVolumes(FileInfo file)
protected override IEnumerable<GZipVolume> LoadVolumes(SourceStream srcStream)
{
return new GZipVolume(file, ReaderOptions).AsEnumerable();
srcStream.LoadAllParts();
int idx = 0;
return srcStream.Streams.Select(a => new GZipVolume(a, ReaderOptions, idx++));
}
public static bool IsGZipFile(string filePath)
{
return IsGZipFile(new FileInfo(filePath));
@@ -82,10 +103,9 @@ namespace SharpCompress.Archives.GZip
{
return false;
}
using (Stream stream = fileInfo.OpenRead())
{
return IsGZipFile(stream);
}
using Stream stream = fileInfo.OpenRead();
return IsGZipFile(stream);
}
public void SaveTo(string filePath)
@@ -100,12 +120,11 @@ namespace SharpCompress.Archives.GZip
SaveTo(stream, new WriterOptions(CompressionType.GZip));
}
}
#endif
public static bool IsGZipFile(Stream stream)
{
// read the header on the first read
byte[] header = new byte[10];
Span<byte> header = stackalloc byte[10];
// workitem 8501: handle edge case (decompress empty stream)
if (!stream.ReadFully(header))
@@ -121,16 +140,6 @@ namespace SharpCompress.Archives.GZip
return true;
}
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
internal GZipArchive(Stream stream, ReaderOptions options)
: base(ArchiveType.GZip, stream, options)
{
}
internal GZipArchive()
: base(ArchiveType.GZip)
{
@@ -167,11 +176,6 @@ namespace SharpCompress.Archives.GZip
}
}
protected override IEnumerable<GZipVolume> LoadVolumes(IEnumerable<Stream> streams)
{
return new GZipVolume(streams.First(), ReaderOptions).AsEnumerable();
}
protected override IEnumerable<GZipArchiveEntry> LoadEntries(IEnumerable<GZipVolume> volumes)
{
Stream stream = volumes.Single().Stream;
@@ -185,4 +189,4 @@ namespace SharpCompress.Archives.GZip
return GZipReader.Open(stream);
}
}
}
}

View File

@@ -15,7 +15,7 @@ namespace SharpCompress.Archives.GZip
public virtual Stream OpenEntryStream()
{
//this is to reset the stream to be read multiple times
var part = Parts.Single() as GZipFilePart;
var part = (GZipFilePart)Parts.Single();
if (part.GetRawStream().Position != part.EntryStartPosition)
{
part.GetRawStream().Position = part.EntryStartPosition;

View File

@@ -1,4 +1,6 @@
using System;
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
@@ -6,7 +8,7 @@ using SharpCompress.IO;
namespace SharpCompress.Archives.GZip
{
internal class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArchiveEntry
internal sealed class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArchiveEntry
{
private readonly bool closeStream;
private readonly Stream stream;
@@ -52,7 +54,7 @@ namespace SharpCompress.Archives.GZip
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new NonDisposingStream(stream);
return NonDisposingStream.Create(stream);
}
internal override void Close()
@@ -63,4 +65,4 @@ namespace SharpCompress.Archives.GZip
}
}
}
}
}

View File

@@ -1,7 +1,6 @@
using System.IO;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives
{
@@ -9,22 +8,17 @@ namespace SharpCompress.Archives
{
public static void WriteTo(this IArchiveEntry archiveEntry, Stream streamToWriteTo)
{
if (archiveEntry.Archive.Type == ArchiveType.Rar && archiveEntry.Archive.IsSolid)
{
throw new InvalidFormatException("Cannot use Archive random access on SOLID Rar files.");
}
if (archiveEntry.IsDirectory)
{
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
}
var streamListener = archiveEntry.Archive as IArchiveExtractionListener;
var streamListener = (IArchiveExtractionListener)archiveEntry.Archive;
streamListener.EnsureEntriesLoaded();
streamListener.FireEntryExtractionBegin(archiveEntry);
streamListener.FireFilePartExtractionBegin(archiveEntry.Key, archiveEntry.Size, archiveEntry.CompressedSize);
var entryStream = archiveEntry.OpenEntryStream();
if (entryStream == null)
if (entryStream is null)
{
return;
}
@@ -38,84 +32,32 @@ namespace SharpCompress.Archives
streamListener.FireEntryExtractionEnd(archiveEntry);
}
#if !NO_FILE
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(this IArchiveEntry entry, string destinationDirectory,
ExtractionOptions options = null)
ExtractionOptions? options = null)
{
string destinationFileName;
string file = Path.GetFileName(entry.Key);
string fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
options = options ?? new ExtractionOptions()
{
Overwrite = true
};
if (options.ExtractFullPath)
{
string folder = Path.GetDirectoryName(entry.Key);
string destdir = Path.GetFullPath(
Path.Combine(fullDestinationDirectoryPath, folder)
);
if (!Directory.Exists(destdir))
{
if (!destdir.StartsWith(fullDestinationDirectoryPath))
{
throw new ExtractionException("Entry is trying to create a directory outside of the destination directory.");
}
Directory.CreateDirectory(destdir);
}
destinationFileName = Path.Combine(destdir, file);
}
else
{
destinationFileName = Path.Combine(fullDestinationDirectoryPath, file);
}
if (!entry.IsDirectory)
{
destinationFileName = Path.GetFullPath(destinationFileName);
if (!destinationFileName.StartsWith(fullDestinationDirectoryPath))
{
throw new ExtractionException("Entry is trying to write a file outside of the destination directory.");
}
entry.WriteToFile(destinationFileName, options);
}
ExtractionMethods.WriteEntryToDirectory(entry, destinationDirectory, options,
entry.WriteToFile);
}
/// <summary>
/// Extract to specific file
/// </summary>
public static void WriteToFile(this IArchiveEntry entry, string destinationFileName,
ExtractionOptions options = null)
public static void WriteToFile(this IArchiveEntry entry,
string destinationFileName,
ExtractionOptions? options = null)
{
FileMode fm = FileMode.Create;
options = options ?? new ExtractionOptions()
{
Overwrite = true
};
if (!options.Overwrite)
{
fm = FileMode.CreateNew;
}
using (FileStream fs = File.Open(destinationFileName, fm))
{
entry.WriteTo(fs);
}
entry.PreserveExtractionOptions(destinationFileName, options);
ExtractionMethods.WriteEntryToFile(entry, destinationFileName, options,
(x, fm) =>
{
using (FileStream fs = File.Open(destinationFileName, fm))
{
entry.WriteTo(fs);
}
});
}
#endif
}
}

View File

@@ -1,26 +1,20 @@
#if !NO_FILE
using System.Linq;
using SharpCompress.Readers;
#endif
using System.Linq;
using SharpCompress.Common;
namespace SharpCompress.Archives
{
public static class IArchiveExtensions
{
#if !NO_FILE
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(this IArchive archive, string destinationDirectory,
ExtractionOptions options = null)
ExtractionOptions? options = null)
{
foreach (IArchiveEntry entry in archive.Entries.Where(x => !x.IsDirectory))
{
entry.WriteToDirectory(destinationDirectory, options);
}
}
#endif
}
}

View File

@@ -11,5 +11,11 @@ namespace SharpCompress.Archives
IArchiveEntry AddEntry(string key, Stream source, bool closeStream, long size = 0, DateTime? modified = null);
void SaveTo(Stream stream, WriterOptions options);
/// <summary>
/// Use this to pause entry rebuilding when adding large collections of entries. Dispose when complete. A using statement is recommended.
/// </summary>
/// <returns>IDisposeable to resume entry rebuilding</returns>
IDisposable PauseEntryRebuilding();
}
}

View File

@@ -1,6 +1,4 @@
#if !NO_FILE
using System;
#endif
using System;
using System.IO;
using SharpCompress.Writers;
@@ -8,8 +6,6 @@ namespace SharpCompress.Archives
{
public static class IWritableArchiveExtensions
{
#if !NO_FILE
public static void AddEntry(this IWritableArchive writableArchive,
string entryPath, string filePath)
{
@@ -39,15 +35,14 @@ namespace SharpCompress.Archives
this IWritableArchive writableArchive,
string filePath, string searchPattern = "*.*", SearchOption searchOption = SearchOption.AllDirectories)
{
#if NET35
foreach (var path in Directory.GetFiles(filePath, searchPattern, searchOption))
#else
foreach (var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption))
#endif
using (writableArchive.PauseEntryRebuilding())
{
var fileInfo = new FileInfo(path);
writableArchive.AddEntry(path.Substring(filePath.Length), fileInfo.OpenRead(), true, fileInfo.Length,
fileInfo.LastWriteTime);
foreach (var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption))
{
var fileInfo = new FileInfo(path);
writableArchive.AddEntry(path.Substring(filePath.Length), fileInfo.OpenRead(), true, fileInfo.Length,
fileInfo.LastWriteTime);
}
}
}
public static IArchiveEntry AddEntry(this IWritableArchive writableArchive, string key, FileInfo fileInfo)
@@ -58,6 +53,5 @@ namespace SharpCompress.Archives
}
return writableArchive.AddEntry(key, fileInfo.OpenRead(), true, fileInfo.Length, fileInfo.LastWriteTime);
}
#endif
}
}

View File

@@ -1,7 +1,6 @@
#if !NO_FILE
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
@@ -14,11 +13,11 @@ namespace SharpCompress.Archives.Rar
/// </summary>
internal class FileInfoRarArchiveVolume : RarVolume
{
internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options)
: base(StreamingMode.Seekable, fileInfo.OpenRead(), FixOptions(options))
internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options, int index = 0)
: base(StreamingMode.Seekable, fileInfo.OpenRead(), FixOptions(options), index)
{
FileInfo = fileInfo;
FileParts = GetVolumeFileParts().ToReadOnly();
FileParts = GetVolumeFileParts().ToArray().ToReadOnly();
}
private static ReaderOptions FixOptions(ReaderOptions options)
@@ -43,4 +42,3 @@ namespace SharpCompress.Archives.Rar
}
}
}
#endif

View File

@@ -1,14 +1,12 @@
#if !NO_FILE
using System.IO;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Archives.Rar
{
internal class FileInfoRarFilePart : SeekableFilePart
internal sealed class FileInfoRarFilePart : SeekableFilePart
{
internal FileInfoRarFilePart(FileInfoRarArchiveVolume volume, string password, MarkHeader mh, FileHeader fh, FileInfo fi)
: base(mh, fh, volume.Stream, password)
internal FileInfoRarFilePart(FileInfoRarArchiveVolume volume, string? password, MarkHeader mh, FileHeader fh, FileInfo fi)
: base(mh, fh, volume.Index, volume.Stream, password)
{
FileInfo = fi;
}
@@ -25,4 +23,3 @@ namespace SharpCompress.Archives.Rar
}
}
}
#endif

View File

@@ -5,52 +5,49 @@ using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Compressors.Rar;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.Rar;
namespace SharpCompress.Archives.Rar
{
public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
public class
RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
{
internal Lazy<IRarUnpack> UnpackV2017 { get; } = new Lazy<IRarUnpack>(() => new SharpCompress.Compressors.Rar.UnpackV2017.Unpack());
internal Lazy<IRarUnpack> UnpackV1 { get; } = new Lazy<IRarUnpack>(() => new SharpCompress.Compressors.Rar.UnpackV1.Unpack());
#if !NO_FILE
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal RarArchive(FileInfo fileInfo, ReaderOptions options)
: base(ArchiveType.Rar, fileInfo, options)
{
}
protected override IEnumerable<RarVolume> LoadVolumes(FileInfo file)
{
return RarArchiveVolumeFactory.GetParts(file, ReaderOptions);
}
#endif
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
internal RarArchive(IEnumerable<Stream> streams, ReaderOptions options)
: base(ArchiveType.Rar, streams, options)
internal RarArchive(SourceStream srcStream)
: base(ArchiveType.Rar, srcStream)
{
}
protected override IEnumerable<RarArchiveEntry> LoadEntries(IEnumerable<RarVolume> volumes)
{
return RarArchiveEntryFactory.GetEntries(this, volumes);
return RarArchiveEntryFactory.GetEntries(this, volumes, ReaderOptions);
}
protected override IEnumerable<RarVolume> LoadVolumes(IEnumerable<Stream> streams)
protected override IEnumerable<RarVolume> LoadVolumes(SourceStream srcStream)
{
return RarArchiveVolumeFactory.GetParts(streams, ReaderOptions);
base.SrcStream.LoadAllParts(); //request all streams
Stream[] streams = base.SrcStream.Streams.ToArray();
int idx = 0;
if (streams.Length > 1 && IsRarFile(streams[1], ReaderOptions)) //test part 2 - true = multipart not split
{
base.SrcStream.IsVolumes = true;
streams[1].Position = 0;
base.SrcStream.Position = 0;
return srcStream.Streams.Select(a => new StreamRarArchiveVolume(a, ReaderOptions, idx++));
}
else //split mode or single file
return new StreamRarArchiveVolume(base.SrcStream, ReaderOptions, idx++).AsEnumerable();
}
protected override IReader CreateReaderForSolidExtraction()
@@ -62,19 +59,20 @@ namespace SharpCompress.Archives.Rar
public override bool IsSolid => Volumes.First().IsSolidArchive;
public virtual int MinVersion => Volumes.First().MinVersion;
public virtual int MaxVersion => Volumes.First().MaxVersion;
#region Creation
#if !NO_FILE
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
public static RarArchive Open(string filePath, ReaderOptions options = null)
public static RarArchive Open(string filePath, ReaderOptions? options = null)
{
filePath.CheckNotNullOrEmpty("filePath");
return new RarArchive(new FileInfo(filePath), options ?? new ReaderOptions());
filePath.CheckNotNullOrEmpty(nameof(filePath));
FileInfo fileInfo = new FileInfo(filePath);
return new RarArchive(new SourceStream(fileInfo, i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo), options ?? new ReaderOptions()));
}
/// <summary>
@@ -82,36 +80,48 @@ namespace SharpCompress.Archives.Rar
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
public static RarArchive Open(FileInfo fileInfo, ReaderOptions options = null)
public static RarArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
{
fileInfo.CheckNotNull("fileInfo");
return new RarArchive(fileInfo, options ?? new ReaderOptions());
fileInfo.CheckNotNull(nameof(fileInfo));
return new RarArchive(new SourceStream(fileInfo, i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo), options ?? new ReaderOptions()));
}
#endif
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
public static RarArchive Open(Stream stream, ReaderOptions options = null)
public static RarArchive Open(Stream stream, ReaderOptions? options = null)
{
stream.CheckNotNull("stream");
return Open(stream.AsEnumerable(), options ?? new ReaderOptions());
stream.CheckNotNull(nameof(stream));
return new RarArchive(new SourceStream(stream, i => null, options ?? new ReaderOptions()));
}
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
/// Constructor with all file parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
public static RarArchive Open(IEnumerable<Stream> streams, ReaderOptions options = null)
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static RarArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? readerOptions = null)
{
streams.CheckNotNull("streams");
return new RarArchive(streams, options ?? new ReaderOptions());
fileInfos.CheckNotNull(nameof(fileInfos));
FileInfo[] files = fileInfos.ToArray();
return new RarArchive(new SourceStream(files[0], i => i < files.Length ? files[i] : null, readerOptions ?? new ReaderOptions()));
}
#if !NO_FILE
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static RarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.CheckNotNull(nameof(streams));
Stream[] strms = streams.ToArray();
return new RarArchive(new SourceStream(strms[0], i => i < strms.Length ? strms[i] : null, readerOptions ?? new ReaderOptions()));
}
public static bool IsRarFile(string filePath)
{
return IsRarFile(new FileInfo(filePath));
@@ -128,9 +138,8 @@ namespace SharpCompress.Archives.Rar
return IsRarFile(stream);
}
}
#endif
public static bool IsRarFile(Stream stream, ReaderOptions options = null)
public static bool IsRarFile(Stream stream, ReaderOptions? options = null)
{
try
{
@@ -145,4 +154,4 @@ namespace SharpCompress.Archives.Rar
#endregion
}
}
}

View File

@@ -6,6 +6,7 @@ using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Compressors.Rar;
using SharpCompress.Readers;
namespace SharpCompress.Archives.Rar
{
@@ -13,11 +14,14 @@ namespace SharpCompress.Archives.Rar
{
private readonly ICollection<RarFilePart> parts;
private readonly RarArchive archive;
private readonly ReaderOptions readerOptions;
internal RarArchiveEntry(RarArchive archive, IEnumerable<RarFilePart> parts)
internal RarArchiveEntry(RarArchive archive, IEnumerable<RarFilePart> parts, ReaderOptions readerOptions)
{
this.parts = parts.ToList();
this.archive = archive;
this.readerOptions = readerOptions;
this.IsSolid = this.FileHeader.IsSolid;
}
public override CompressionType CompressionType => CompressionType.Rar;
@@ -57,33 +61,29 @@ namespace SharpCompress.Archives.Rar
public Stream OpenEntryStream()
{
if (archive.IsSolid)
{
throw new InvalidOperationException("Use ExtractAllEntries to extract SOLID archives.");
}
if (IsRarV3)
{
return new RarStream(archive.UnpackV1.Value, FileHeader, new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive));
}
return new RarStream(archive.UnpackV2017.Value, FileHeader, new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive));
}
public bool IsComplete
{
get
{
return parts.Select(fp => fp.FileHeader).Any(fh => !fh.IsSplitAfter);
}
public bool IsComplete
{
get
{
var headers = parts.Select(x => x.FileHeader);
return !headers.First().IsSplitBefore && !headers.Last().IsSplitAfter;
}
}
private void CheckIncomplete()
{
if (!IsComplete)
if (!readerOptions.DisableCheckIncomplete && !IsComplete)
{
throw new IncompleteArchiveException("ArchiveEntry is incomplete and cannot perform this operation.");
}
}
}
}
}

View File

@@ -1,5 +1,6 @@
using System.Collections.Generic;
using SharpCompress.Common.Rar;
using SharpCompress.Readers;
namespace SharpCompress.Archives.Rar
{
@@ -36,11 +37,12 @@ namespace SharpCompress.Archives.Rar
}
internal static IEnumerable<RarArchiveEntry> GetEntries(RarArchive archive,
IEnumerable<RarVolume> rarParts)
IEnumerable<RarVolume> rarParts,
ReaderOptions readerOptions)
{
foreach (var groupedParts in GetMatchedFileParts(rarParts))
{
yield return new RarArchiveEntry(archive, groupedParts);
yield return new RarArchiveEntry(archive, groupedParts, readerOptions);
}
}
}

View File

@@ -1,147 +1,40 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Readers;
#if !NO_FILE
using System.Linq;
using System.Text;
using SharpCompress.Common.Rar.Headers;
#endif
using System.Text.RegularExpressions;
namespace SharpCompress.Archives.Rar
{
internal static class RarArchiveVolumeFactory
{
internal static IEnumerable<RarVolume> GetParts(IEnumerable<Stream> streams, ReaderOptions options)
internal static FileInfo? GetFilePart(int index, FileInfo part1) //base the name on the first part
{
foreach (Stream s in streams)
{
if (!s.CanRead || !s.CanSeek)
{
throw new ArgumentException("Stream is not readable and seekable");
}
StreamRarArchiveVolume part = new StreamRarArchiveVolume(s, options);
yield return part;
}
}
FileInfo? item = null;
#if !NO_FILE
internal static IEnumerable<RarVolume> GetParts(FileInfo fileInfo, ReaderOptions options)
{
FileInfoRarArchiveVolume part = new FileInfoRarArchiveVolume(fileInfo, options);
yield return part;
ArchiveHeader ah = part.ArchiveHeader;
if (!ah.IsVolume)
{
yield break; //if file isn't volume then there is no reason to look
}
fileInfo = GetNextFileInfo(ah, part.FileParts.FirstOrDefault() as FileInfoRarFilePart);
//we use fileinfo because rar is dumb and looks at file names rather than archive info for another volume
while (fileInfo != null && fileInfo.Exists)
{
part = new FileInfoRarArchiveVolume(fileInfo, options);
fileInfo = GetNextFileInfo(ah, part.FileParts.FirstOrDefault() as FileInfoRarFilePart);
yield return part;
}
}
private static FileInfo GetNextFileInfo(ArchiveHeader ah, FileInfoRarFilePart currentFilePart)
{
if (currentFilePart == null)
{
return null;
}
bool oldNumbering = ah.OldNumberingFormat
|| currentFilePart.MarkHeader.OldNumberingFormat;
if (oldNumbering)
{
return FindNextFileWithOldNumbering(currentFilePart.FileInfo);
}
//new style rar - ..part1 | /part01 | part001 ....
Match m = Regex.Match(part1.Name, @"^(.*\.part)([0-9]+)(\.rar)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(Path.Combine(part1.DirectoryName!, String.Concat(m.Groups[1].Value, (index + 1).ToString().PadLeft(m.Groups[2].Value.Length, '0'), m.Groups[3].Value)));
else
{
return FindNextFileWithNewNumbering(currentFilePart.FileInfo);
//old style - ...rar, .r00, .r01 ...
m = Regex.Match(part1.Name, @"^(.*\.r)(ar|[0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(Path.Combine(part1.DirectoryName!, String.Concat(m.Groups[1].Value, index == 0 ? "ar" : (index - 1).ToString().PadLeft(m.Groups[2].Value.Length, '0'))));
else //split .001, .002 ....
return ArchiveVolumeFactory.GetFilePart(index, part1);
}
if (item != null && item.Exists)
return item;
return null; //no more items
}
private static FileInfo FindNextFileWithOldNumbering(FileInfo currentFileInfo)
{
// .rar, .r00, .r01, ...
string extension = currentFileInfo.Extension;
StringBuilder buffer = new StringBuilder(currentFileInfo.FullName.Length);
buffer.Append(currentFileInfo.FullName.Substring(0,
currentFileInfo.FullName.Length - extension.Length));
if (string.Compare(extension, ".rar", StringComparison.OrdinalIgnoreCase) == 0)
{
buffer.Append(".r00");
}
else
{
int num = 0;
if (int.TryParse(extension.Substring(2, 2), out num))
{
num++;
buffer.Append(".r");
if (num < 10)
{
buffer.Append('0');
}
buffer.Append(num);
}
else
{
ThrowInvalidFileName(currentFileInfo);
}
}
return new FileInfo(buffer.ToString());
}
private static FileInfo FindNextFileWithNewNumbering(FileInfo currentFileInfo)
{
// part1.rar, part2.rar, ...
string extension = currentFileInfo.Extension;
if (string.Compare(extension, ".rar", StringComparison.OrdinalIgnoreCase) != 0)
{
throw new ArgumentException("Invalid extension, expected 'rar': " + currentFileInfo.FullName);
}
int startIndex = currentFileInfo.FullName.LastIndexOf(".part");
if (startIndex < 0)
{
ThrowInvalidFileName(currentFileInfo);
}
StringBuilder buffer = new StringBuilder(currentFileInfo.FullName.Length);
buffer.Append(currentFileInfo.FullName, 0, startIndex);
int num = 0;
string numString = currentFileInfo.FullName.Substring(startIndex + 5,
currentFileInfo.FullName.IndexOf('.', startIndex + 5) -
startIndex - 5);
buffer.Append(".part");
if (int.TryParse(numString, out num))
{
num++;
for (int i = 0; i < numString.Length - num.ToString().Length; i++)
{
buffer.Append('0');
}
buffer.Append(num);
}
else
{
ThrowInvalidFileName(currentFileInfo);
}
buffer.Append(".rar");
return new FileInfo(buffer.ToString());
}
private static void ThrowInvalidFileName(FileInfo fileInfo)
{
throw new ArgumentException("Filename invalid or next archive could not be found:"
+ fileInfo.FullName);
}
#endif
}
}
}

View File

@@ -1,4 +1,4 @@
using System.IO;
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
@@ -7,10 +7,10 @@ namespace SharpCompress.Archives.Rar
internal class SeekableFilePart : RarFilePart
{
private readonly Stream stream;
private readonly string password;
private readonly string? password;
internal SeekableFilePart(MarkHeader mh, FileHeader fh, Stream stream, string password)
: base(mh, fh)
internal SeekableFilePart(MarkHeader mh, FileHeader fh, int index, Stream stream, string? password)
: base(mh, fh, index)
{
this.stream = stream;
this.password = password;
@@ -19,15 +19,13 @@ namespace SharpCompress.Archives.Rar
internal override Stream GetCompressedStream()
{
stream.Position = FileHeader.DataStartPosition;
#if !NO_CRYPTO
if (FileHeader.R4Salt != null)
{
return new RarCryptoWrapper(stream, password, FileHeader.R4Salt);
return new RarCryptoWrapper(stream, password!, FileHeader.R4Salt);
}
#endif
return stream;
}
internal override string FilePartName => "Unknown Stream - File Entry: " + FileHeader.FileName;
}
}
}

View File

@@ -1,4 +1,4 @@
using System.Collections.Generic;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
@@ -9,8 +9,8 @@ namespace SharpCompress.Archives.Rar
{
internal class StreamRarArchiveVolume : RarVolume
{
internal StreamRarArchiveVolume(Stream stream, ReaderOptions options)
: base(StreamingMode.Seekable, stream, options)
internal StreamRarArchiveVolume(Stream stream, ReaderOptions options, int index = 0)
: base(StreamingMode.Seekable, stream, options, index)
{
}
@@ -21,7 +21,7 @@ namespace SharpCompress.Archives.Rar
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader)
{
return new SeekableFilePart(markHeader, fileHeader, Stream, ReaderOptions.Password);
return new SeekableFilePart(markHeader, fileHeader, this.Index, Stream, ReaderOptions.Password);
}
}
}
}

View File

@@ -1,4 +1,6 @@
using System;
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -13,8 +15,6 @@ namespace SharpCompress.Archives.SevenZip
public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVolume>
{
private ArchiveDatabase database;
#if !NO_FILE
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
@@ -34,9 +34,33 @@ namespace SharpCompress.Archives.SevenZip
public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
{
fileInfo.CheckNotNull("fileInfo");
return new SevenZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
return new SevenZipArchive(new SourceStream(fileInfo, i => ArchiveVolumeFactory.GetFilePart(i, fileInfo), readerOptions ?? new ReaderOptions()));
}
#endif
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions readerOptions = null)
{
fileInfos.CheckNotNull(nameof(fileInfos));
FileInfo[] files = fileInfos.ToArray();
return new SevenZipArchive(new SourceStream(files[0], i => i < files.Length ? files[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(IEnumerable<Stream> streams, ReaderOptions readerOptions = null)
{
streams.CheckNotNull(nameof(streams));
Stream[] strms = streams.ToArray();
return new SevenZipArchive(new SourceStream(strms[0], i => i < strms.Length ? strms[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
@@ -45,18 +69,24 @@ namespace SharpCompress.Archives.SevenZip
public static SevenZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
{
stream.CheckNotNull("stream");
return new SevenZipArchive(stream, readerOptions ?? new ReaderOptions());
return new SevenZipArchive(new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions()));
}
#if !NO_FILE
internal SevenZipArchive(FileInfo fileInfo, ReaderOptions readerOptions)
: base(ArchiveType.SevenZip, fileInfo, readerOptions)
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal SevenZipArchive(SourceStream srcStream)
: base(ArchiveType.SevenZip, srcStream)
{
}
protected override IEnumerable<SevenZipVolume> LoadVolumes(FileInfo file)
protected override IEnumerable<SevenZipVolume> LoadVolumes(SourceStream srcStream)
{
return new SevenZipVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
base.SrcStream.LoadAllParts(); //request all streams
int idx = 0;
return new SevenZipVolume(srcStream, ReaderOptions, idx++).AsEnumerable(); //simple single volume or split, multivolume not supported
}
public static bool IsSevenZipFile(string filePath)
@@ -75,45 +105,38 @@ namespace SharpCompress.Archives.SevenZip
return IsSevenZipFile(stream);
}
}
#endif
internal SevenZipArchive(Stream stream, ReaderOptions readerOptions)
: base(ArchiveType.SevenZip, stream.AsEnumerable(), readerOptions)
{
}
internal SevenZipArchive()
: base(ArchiveType.SevenZip)
{
}
protected override IEnumerable<SevenZipVolume> LoadVolumes(IEnumerable<Stream> streams)
{
foreach (Stream s in streams)
{
if (!s.CanRead || !s.CanSeek)
{
throw new ArgumentException("Stream is not readable and seekable");
}
SevenZipVolume volume = new SevenZipVolume(s, ReaderOptions);
yield return volume;
}
}
protected override IEnumerable<SevenZipArchiveEntry> LoadEntries(IEnumerable<SevenZipVolume> volumes)
{
var stream = volumes.Single().Stream;
LoadFactory(stream);
var entries = new SevenZipArchiveEntry[database._files.Count];
for (int i = 0; i < database._files.Count; i++)
{
var file = database._files[i];
yield return new SevenZipArchiveEntry(this, new SevenZipFilePart(stream, database, i, file, ReaderOptions.ArchiveEncoding));
entries[i] = new SevenZipArchiveEntry(this, new SevenZipFilePart(stream, database, i, file, ReaderOptions.ArchiveEncoding));
}
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
{
var isSolid = false;
foreach (var entry in group)
{
entry.IsSolid = isSolid;
isSolid = true; //mark others in this group as solid - same as rar behaviour.
}
}
return entries;
}
private void LoadFactory(Stream stream)
{
if (database == null)
if (database is null)
{
stream.Position = 0;
var reader = new ArchiveReader();
@@ -134,13 +157,13 @@ namespace SharpCompress.Archives.SevenZip
}
}
private static readonly byte[] SIGNATURE = {(byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C};
private static ReadOnlySpan<byte> SIGNATURE => new byte[] { (byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C };
private static bool SignatureMatch(Stream stream)
{
BinaryReader reader = new BinaryReader(stream);
byte[] signatureBytes = reader.ReadBytes(6);
return signatureBytes.BinaryEquals(SIGNATURE);
ReadOnlySpan<byte> signatureBytes = reader.ReadBytes(6);
return signatureBytes.SequenceEqual(SIGNATURE);
}
protected override IReader CreateReaderForSolidExtraction()
@@ -159,7 +182,7 @@ namespace SharpCompress.Archives.SevenZip
}
}
private class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
private sealed class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
{
private readonly SevenZipArchive archive;
private CFolder currentFolder;
@@ -185,7 +208,7 @@ namespace SharpCompress.Archives.SevenZip
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
{
currentFolder = group.Key;
if (group.Key == null)
if (group.Key is null)
{
currentStream = Stream.Null;
}
@@ -206,7 +229,7 @@ namespace SharpCompress.Archives.SevenZip
return CreateEntryStream(new ReadOnlySubStream(currentStream, currentItem.Size));
}
}
private class PasswordProvider : IPasswordProvider
{
private readonly string _password;
@@ -214,7 +237,6 @@ namespace SharpCompress.Archives.SevenZip
public PasswordProvider(string password)
{
_password = password;
}
public string CryptoGetTextPassword()

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -15,16 +15,14 @@ namespace SharpCompress.Archives.Tar
{
public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
{
#if !NO_FILE
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(string filePath, ReaderOptions readerOptions = null)
public static TarArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.CheckNotNullOrEmpty("filePath");
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
@@ -33,26 +31,47 @@ namespace SharpCompress.Archives.Tar
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
public static TarArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull("fileInfo");
return new TarArchive(fileInfo, readerOptions ?? new ReaderOptions());
fileInfo.CheckNotNull(nameof(fileInfo));
return new TarArchive(new SourceStream(fileInfo, i => ArchiveVolumeFactory.GetFilePart(i, fileInfo), readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? readerOptions = null)
{
fileInfos.CheckNotNull(nameof(fileInfos));
FileInfo[] files = fileInfos.ToArray();
return new TarArchive(new SourceStream(files[0], i => i < files.Length ? files[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.CheckNotNull(nameof(streams));
Stream[] strms = streams.ToArray();
return new TarArchive(new SourceStream(strms[0], i => i < strms.Length ? strms[i] : null, readerOptions ?? new ReaderOptions()));
}
#endif
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(Stream stream, ReaderOptions readerOptions = null)
public static TarArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull("stream");
return new TarArchive(stream, readerOptions ?? new ReaderOptions());
stream.CheckNotNull(nameof(stream));
return new TarArchive(new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions()));
}
#if !NO_FILE
public static bool IsTarFile(string filePath)
{
return IsTarFile(new FileInfo(filePath));
@@ -69,15 +88,15 @@ namespace SharpCompress.Archives.Tar
return IsTarFile(stream);
}
}
#endif
public static bool IsTarFile(Stream stream)
{
try
{
TarHeader tar = new TarHeader(new ArchiveEncoding());
tar.Read(new BinaryReader(stream));
return tar.Name.Length > 0 && Enum.IsDefined(typeof(EntryType), tar.EntryType);
TarHeader tarHeader = new TarHeader(new ArchiveEncoding());
bool readSucceeded = tarHeader.Read(new BinaryReader(stream));
bool isEmptyArchive = tarHeader.Name.Length == 0 && tarHeader.Size == 0 && Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
return readSucceeded || isEmptyArchive;
}
catch
{
@@ -85,31 +104,20 @@ namespace SharpCompress.Archives.Tar
return false;
}
#if !NO_FILE
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
internal TarArchive(FileInfo fileInfo, ReaderOptions readerOptions)
: base(ArchiveType.Tar, fileInfo, readerOptions)
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream srcStream)
{
base.SrcStream.LoadAllParts(); //request all streams
int idx = 0;
return new TarVolume(srcStream, ReaderOptions, idx++).AsEnumerable(); //simple single volume or split, multivolume not supported
}
protected override IEnumerable<TarVolume> LoadVolumes(FileInfo file)
{
return new TarVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
}
#endif
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
internal TarArchive(Stream stream, ReaderOptions readerOptions)
: base(ArchiveType.Tar, stream, readerOptions)
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal TarArchive(SourceStream srcStream)
: base(ArchiveType.Tar, srcStream)
{
}
@@ -118,16 +126,11 @@ namespace SharpCompress.Archives.Tar
{
}
protected override IEnumerable<TarVolume> LoadVolumes(IEnumerable<Stream> streams)
{
return new TarVolume(streams.First(), ReaderOptions).AsEnumerable();
}
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
{
Stream stream = volumes.Single().Stream;
TarHeader previousHeader = null;
foreach (TarHeader header in TarHeaderFactory.ReadHeader(StreamingMode.Seekable, stream, ReaderOptions.ArchiveEncoding))
TarHeader? previousHeader = null;
foreach (TarHeader? header in TarHeaderFactory.ReadHeader(StreamingMode.Seekable, stream, ReaderOptions.ArchiveEncoding))
{
if (header != null)
{
@@ -202,4 +205,4 @@ namespace SharpCompress.Archives.Tar
return TarReader.Open(stream);
}
}
}
}

View File

@@ -1,4 +1,6 @@
using System;
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
@@ -6,7 +8,7 @@ using SharpCompress.IO;
namespace SharpCompress.Archives.Tar
{
internal class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiveEntry
internal sealed class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiveEntry
{
private readonly bool closeStream;
private readonly Stream stream;
@@ -51,7 +53,7 @@ namespace SharpCompress.Archives.Tar
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new NonDisposingStream(stream);
return NonDisposingStream.Create(stream);
}
internal override void Close()
@@ -62,4 +64,4 @@ namespace SharpCompress.Archives.Tar
}
}
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -6,6 +6,7 @@ using SharpCompress.Common;
using SharpCompress.Common.Zip;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors.Deflate;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.Zip;
using SharpCompress.Writers;
@@ -15,7 +16,9 @@ namespace SharpCompress.Archives.Zip
{
public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
{
#nullable disable
private readonly SeekableZipHeaderFactory headerFactory;
#nullable enable
/// <summary>
/// Gets or sets the compression level applied to files added to the archive,
@@ -23,16 +26,25 @@ namespace SharpCompress.Archives.Zip
/// </summary>
public CompressionLevel DeflateCompressionLevel { get; set; }
#if !NO_FILE
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal ZipArchive(SourceStream srcStream)
: base(ArchiveType.Zip, srcStream)
{
headerFactory = new SeekableZipHeaderFactory(srcStream.ReaderOptions.Password, srcStream.ReaderOptions.ArchiveEncoding);
}
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(string filePath, ReaderOptions readerOptions = null)
public static ZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.CheckNotNullOrEmpty("filePath");
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
@@ -41,32 +53,53 @@ namespace SharpCompress.Archives.Zip
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull("fileInfo");
return new ZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
fileInfo.CheckNotNull(nameof(fileInfo));
return new ZipArchive(new SourceStream(fileInfo, i => ZipArchiveVolumeFactory.GetFilePart(i, fileInfo), readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? readerOptions = null)
{
fileInfos.CheckNotNull(nameof(fileInfos));
FileInfo[] files = fileInfos.ToArray();
return new ZipArchive(new SourceStream(files[0], i => i < files.Length ? files[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.CheckNotNull(nameof(streams));
Stream[] strms = streams.ToArray();
return new ZipArchive(new SourceStream(strms[0], i => i < strms.Length ? strms[i] : null, readerOptions ?? new ReaderOptions()));
}
#endif
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
public static ZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull("stream");
return new ZipArchive(stream, readerOptions ?? new ReaderOptions());
stream.CheckNotNull(nameof(stream));
return new ZipArchive(new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions()));
}
#if !NO_FILE
public static bool IsZipFile(string filePath, string password = null)
public static bool IsZipFile(string filePath, string? password = null)
{
return IsZipFile(new FileInfo(filePath), password);
}
public static bool IsZipFile(FileInfo fileInfo, string password = null)
public static bool IsZipFile(FileInfo fileInfo, string? password = null)
{
if (!fileInfo.Exists)
{
@@ -77,16 +110,14 @@ namespace SharpCompress.Archives.Zip
return IsZipFile(stream, password);
}
}
#endif
public static bool IsZipFile(Stream stream, string password = null)
public static bool IsZipFile(Stream stream, string? password = null)
{
StreamingZipHeaderFactory headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding());
try
{
ZipHeader header =
headerFactory.ReadStreamHeader(stream).FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header == null)
ZipHeader? header = headerFactory.ReadStreamHeader(stream).FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
return false;
}
@@ -102,70 +133,98 @@ namespace SharpCompress.Archives.Zip
}
}
#if !NO_FILE
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
internal ZipArchive(FileInfo fileInfo, ReaderOptions readerOptions)
: base(ArchiveType.Zip, fileInfo, readerOptions)
public static bool IsZipMulti(Stream stream, string? password = null)
{
headerFactory = new SeekableZipHeaderFactory(readerOptions.Password, readerOptions.ArchiveEncoding);
StreamingZipHeaderFactory headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding());
try
{
ZipHeader? header = headerFactory.ReadStreamHeader(stream).FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
if (stream.CanSeek) //could be multipart. Test for central directory - might not be z64 safe
{
SeekableZipHeaderFactory z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
var x = z.ReadSeekableHeader(stream).FirstOrDefault();
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
}
else
return false;
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
protected override IEnumerable<ZipVolume> LoadVolumes(FileInfo file)
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream srcStream)
{
return new ZipVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
base.SrcStream.LoadAllParts(); //request all streams
base.SrcStream.Position = 0;
List<Stream> streams = base.SrcStream.Streams.ToList();
int idx = 0;
if (streams.Count > 1) //test part 2 - true = multipart not split
{
streams[1].Position += 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
bool isZip = IsZipFile(streams[1], ReaderOptions.Password);
streams[1].Position -= 4;
if (isZip)
{
base.SrcStream.IsVolumes = true;
var tmp = streams[0]; //arcs as zip, z01 ... swap the zip the end
streams.RemoveAt(0);
streams.Add(tmp);
//streams[0].Position = 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
return streams.Select(a => new ZipVolume(a, ReaderOptions, idx++));
}
}
//split mode or single file
return new ZipVolume(base.SrcStream, ReaderOptions, idx++).AsEnumerable();
}
#endif
internal ZipArchive()
: base(ArchiveType.Zip)
{
}
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
internal ZipArchive(Stream stream, ReaderOptions readerOptions)
: base(ArchiveType.Zip, stream, readerOptions)
{
headerFactory = new SeekableZipHeaderFactory(readerOptions.Password, readerOptions.ArchiveEncoding);
}
protected override IEnumerable<ZipVolume> LoadVolumes(IEnumerable<Stream> streams)
{
return new ZipVolume(streams.First(), ReaderOptions).AsEnumerable();
}
protected override IEnumerable<ZipArchiveEntry> LoadEntries(IEnumerable<ZipVolume> volumes)
{
var volume = volumes.Single();
Stream stream = volume.Stream;
foreach (ZipHeader h in headerFactory.ReadSeekableHeader(stream))
var vols = volumes.ToArray();
foreach (ZipHeader h in headerFactory.ReadSeekableHeader(vols.Last().Stream))
{
if (h != null)
{
switch (h.ZipHeaderType)
{
case ZipHeaderType.DirectoryEntry:
{
DirectoryEntryHeader deh = (DirectoryEntryHeader)h;
Stream s;
if (deh.RelativeOffsetOfEntryHeader + deh.CompressedSize > vols[deh.DiskNumberStart].Stream.Length)
{
yield return new ZipArchiveEntry(this,
new SeekableZipFilePart(headerFactory,
h as DirectoryEntryHeader,
stream));
var v = vols.Skip(deh.DiskNumberStart).ToArray();
s = new SourceStream(v[0].Stream, i => i < v.Length ? v[i].Stream : null, new ReaderOptions() { LeaveStreamOpen = true });
}
break;
else
s = vols[deh.DiskNumberStart].Stream;
yield return new ZipArchiveEntry(this, new SeekableZipFilePart(headerFactory, deh, s));
}
break;
case ZipHeaderType.DirectoryEnd:
{
byte[] bytes = (h as DirectoryEndHeader).Comment;
volume.Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
yield break;
}
{
byte[] bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
volumes.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
yield break;
}
}
}
}
@@ -211,4 +270,4 @@ namespace SharpCompress.Archives.Zip
return ZipReader.Open(stream, ReaderOptions);
}
}
}
}

View File

@@ -6,7 +6,7 @@ namespace SharpCompress.Archives.Zip
{
public class ZipArchiveEntry : ZipEntry, IArchiveEntry
{
internal ZipArchiveEntry(ZipArchive archive, SeekableZipFilePart part)
internal ZipArchiveEntry(ZipArchive archive, SeekableZipFilePart? part)
: base(part)
{
Archive = archive;
@@ -25,6 +25,6 @@ namespace SharpCompress.Archives.Zip
#endregion
public string Comment => (Parts.Single() as SeekableZipFilePart).Comment;
public string? Comment => ((SeekableZipFilePart)Parts.Single()).Comment;
}
}

View File

@@ -0,0 +1,34 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Readers;
using System.Linq;
using System.Text;
using SharpCompress.Common.Rar.Headers;
using System.Text.RegularExpressions;
namespace SharpCompress.Archives.Zip
{
internal static class ZipArchiveVolumeFactory
{
internal static FileInfo? GetFilePart(int index, FileInfo part1) //base the name on the first part
{
FileInfo? item = null;
//load files with zip/zipx first. Swapped to end once loaded in ZipArchive
//new style .zip, z01.. | .zipx, zx01 - if the numbers go beyond 99 then they use 100 ...1000 etc
Match m = Regex.Match(part1.Name, @"^(.*\.)(zipx?|zx?[0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(Path.Combine(part1.DirectoryName!, String.Concat(m.Groups[1].Value, Regex.Replace(m.Groups[2].Value, @"[^xz]", ""), index.ToString().PadLeft(2, '0'))));
else //split - 001, 002 ...
return ArchiveVolumeFactory.GetFilePart(index, part1);
if (item != null && item.Exists)
return item;
return null; //no more items
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
@@ -53,7 +53,7 @@ namespace SharpCompress.Archives.Zip
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new NonDisposingStream(stream);
return NonDisposingStream.Create(stream);
}
internal override void Close()
@@ -65,4 +65,4 @@ namespace SharpCompress.Archives.Zip
}
}
}
}
}

View File

@@ -5,7 +5,6 @@ using System.Runtime.CompilerServices;
[assembly: AssemblyTitle("SharpCompress")]
[assembly: AssemblyProduct("SharpCompress")]
[assembly: InternalsVisibleTo("SharpCompress.Test" + SharpCompress.AssemblyInfo.PublicKeySuffix)]
[assembly: InternalsVisibleTo("SharpCompress.Test.Portable" + SharpCompress.AssemblyInfo.PublicKeySuffix)]
[assembly: CLSCompliant(true)]
namespace SharpCompress

View File

@@ -1,119 +0,0 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
#if NETCORE
using System.Runtime.CompilerServices;
using System.Threading;
namespace SharpCompress.Buffers
{
/// <summary>
/// Provides a resource pool that enables reusing instances of type <see cref="T:T[]"/>.
/// </summary>
/// <remarks>
/// <para>
/// Renting and returning buffers with an <see cref="ArrayPool{T}"/> can increase performance
/// in situations where arrays are created and destroyed frequently, resulting in significant
/// memory pressure on the garbage collector.
/// </para>
/// <para>
/// This class is thread-safe. All members may be used by multiple threads concurrently.
/// </para>
/// </remarks>
internal abstract class ArrayPool<T>
{
/// <summary>The lazily-initialized shared pool instance.</summary>
private static ArrayPool<T> s_sharedInstance = null;
/// <summary>
/// Retrieves a shared <see cref="ArrayPool{T}"/> instance.
/// </summary>
/// <remarks>
/// The shared pool provides a default implementation of <see cref="ArrayPool{T}"/>
/// that's intended for general applicability. It maintains arrays of multiple sizes, and
/// may hand back a larger array than was actually requested, but will never hand back a smaller
/// array than was requested. Renting a buffer from it with <see cref="Rent"/> will result in an
/// existing buffer being taken from the pool if an appropriate buffer is available or in a new
/// buffer being allocated if one is not available.
/// </remarks>
public static ArrayPool<T> Shared
{
[MethodImpl(MethodImplOptions.AggressiveInlining)]
get { return Volatile.Read(ref s_sharedInstance) ?? EnsureSharedCreated(); }
}
/// <summary>Ensures that <see cref="s_sharedInstance"/> has been initialized to a pool and returns it.</summary>
[MethodImpl(MethodImplOptions.NoInlining)]
private static ArrayPool<T> EnsureSharedCreated()
{
Interlocked.CompareExchange(ref s_sharedInstance, Create(), null);
return s_sharedInstance;
}
/// <summary>
/// Creates a new <see cref="ArrayPool{T}"/> instance using default configuration options.
/// </summary>
/// <returns>A new <see cref="ArrayPool{T}"/> instance.</returns>
public static ArrayPool<T> Create()
{
return new DefaultArrayPool<T>();
}
/// <summary>
/// Creates a new <see cref="ArrayPool{T}"/> instance using custom configuration options.
/// </summary>
/// <param name="maxArrayLength">The maximum length of array instances that may be stored in the pool.</param>
/// <param name="maxArraysPerBucket">
/// The maximum number of array instances that may be stored in each bucket in the pool. The pool
/// groups arrays of similar lengths into buckets for faster access.
/// </param>
/// <returns>A new <see cref="ArrayPool{T}"/> instance with the specified configuration options.</returns>
/// <remarks>
/// The created pool will group arrays into buckets, with no more than <paramref name="maxArraysPerBucket"/>
/// in each bucket and with those arrays not exceeding <paramref name="maxArrayLength"/> in length.
/// </remarks>
public static ArrayPool<T> Create(int maxArrayLength, int maxArraysPerBucket)
{
return new DefaultArrayPool<T>(maxArrayLength, maxArraysPerBucket);
}
/// <summary>
/// Retrieves a buffer that is at least the requested length.
/// </summary>
/// <param name="minimumLength">The minimum length of the array needed.</param>
/// <returns>
/// An <see cref="T:T[]"/> that is at least <paramref name="minimumLength"/> in length.
/// </returns>
/// <remarks>
/// This buffer is loaned to the caller and should be returned to the same pool via
/// <see cref="Return"/> so that it may be reused in subsequent usage of <see cref="Rent"/>.
/// It is not a fatal error to not return a rented buffer, but failure to do so may lead to
/// decreased application performance, as the pool may need to create a new buffer to replace
/// the one lost.
/// </remarks>
public abstract T[] Rent(int minimumLength);
/// <summary>
/// Returns to the pool an array that was previously obtained via <see cref="Rent"/> on the same
/// <see cref="ArrayPool{T}"/> instance.
/// </summary>
/// <param name="array">
/// The buffer previously obtained from <see cref="Rent"/> to return to the pool.
/// </param>
/// <param name="clearArray">
/// If <c>true</c> and if the pool will store the buffer to enable subsequent reuse, <see cref="Return"/>
/// will clear <paramref name="array"/> of its contents so that a subsequent consumer via <see cref="Rent"/>
/// will not see the previous consumer's content. If <c>false</c> or if the pool will release the buffer,
/// the array's contents are left unchanged.
/// </param>
/// <remarks>
/// Once a buffer has been returned to the pool, the caller gives up all ownership of the buffer
/// and must not use it. The reference returned from a given call to <see cref="Rent"/> must only be
/// returned via <see cref="Return"/> once. The default <see cref="ArrayPool{T}"/>
/// may hold onto the returned buffer in order to rent it again, or it may release the returned buffer
/// if it's determined that the pool already has enough buffers stored.
/// </remarks>
public abstract void Return(T[] array, bool clearArray = false);
}
}
#endif

View File

@@ -1,144 +0,0 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
#if NETCORE
using System;
namespace SharpCompress.Buffers
{
internal sealed partial class DefaultArrayPool<T> : ArrayPool<T>
{
/// <summary>The default maximum length of each array in the pool (2^20).</summary>
private const int DefaultMaxArrayLength = 1024 * 1024;
/// <summary>The default maximum number of arrays per bucket that are available for rent.</summary>
private const int DefaultMaxNumberOfArraysPerBucket = 50;
/// <summary>Lazily-allocated empty array used when arrays of length 0 are requested.</summary>
private static T[] s_emptyArray; // we support contracts earlier than those with Array.Empty<T>()
private readonly Bucket[] _buckets;
internal DefaultArrayPool() : this(DefaultMaxArrayLength, DefaultMaxNumberOfArraysPerBucket)
{
}
internal DefaultArrayPool(int maxArrayLength, int maxArraysPerBucket)
{
if (maxArrayLength <= 0)
{
throw new ArgumentOutOfRangeException(nameof(maxArrayLength));
}
if (maxArraysPerBucket <= 0)
{
throw new ArgumentOutOfRangeException(nameof(maxArraysPerBucket));
}
// Our bucketing algorithm has a min length of 2^4 and a max length of 2^30.
// Constrain the actual max used to those values.
const int MinimumArrayLength = 0x10, MaximumArrayLength = 0x40000000;
if (maxArrayLength > MaximumArrayLength)
{
maxArrayLength = MaximumArrayLength;
}
else if (maxArrayLength < MinimumArrayLength)
{
maxArrayLength = MinimumArrayLength;
}
// Create the buckets.
int poolId = Id;
int maxBuckets = Utilities.SelectBucketIndex(maxArrayLength);
var buckets = new Bucket[maxBuckets + 1];
for (int i = 0; i < buckets.Length; i++)
{
buckets[i] = new Bucket(Utilities.GetMaxSizeForBucket(i), maxArraysPerBucket, poolId);
}
_buckets = buckets;
}
/// <summary>Gets an ID for the pool to use with events.</summary>
private int Id => GetHashCode();
public override T[] Rent(int minimumLength)
{
// Arrays can't be smaller than zero. We allow requesting zero-length arrays (even though
// pooling such an array isn't valuable) as it's a valid length array, and we want the pool
// to be usable in general instead of using `new`, even for computed lengths.
if (minimumLength < 0)
{
throw new ArgumentOutOfRangeException(nameof(minimumLength));
}
else if (minimumLength == 0)
{
// No need for events with the empty array. Our pool is effectively infinite
// and we'll never allocate for rents and never store for returns.
return s_emptyArray ?? (s_emptyArray = new T[0]);
}
T[] buffer = null;
int index = Utilities.SelectBucketIndex(minimumLength);
if (index < _buckets.Length)
{
// Search for an array starting at the 'index' bucket. If the bucket is empty, bump up to the
// next higher bucket and try that one, but only try at most a few buckets.
const int MaxBucketsToTry = 2;
int i = index;
do
{
// Attempt to rent from the bucket. If we get a buffer from it, return it.
buffer = _buckets[i].Rent();
if (buffer != null)
{
return buffer;
}
}
while (++i < _buckets.Length && i != index + MaxBucketsToTry);
// The pool was exhausted for this buffer size. Allocate a new buffer with a size corresponding
// to the appropriate bucket.
buffer = new T[_buckets[index]._bufferLength];
}
else
{
// The request was for a size too large for the pool. Allocate an array of exactly the requested length.
// When it's returned to the pool, we'll simply throw it away.
buffer = new T[minimumLength];
}
return buffer;
}
public override void Return(T[] array, bool clearArray = false)
{
if (array == null)
{
throw new ArgumentNullException(nameof(array));
}
else if (array.Length == 0)
{
// Ignore empty arrays. When a zero-length array is rented, we return a singleton
// rather than actually taking a buffer out of the lowest bucket.
return;
}
// Determine with what bucket this array length is associated
int bucket = Utilities.SelectBucketIndex(array.Length);
// If we can tell that the buffer was allocated, drop it. Otherwise, check if we have space in the pool
if (bucket < _buckets.Length)
{
// Clear the array if the user requests
if (clearArray)
{
Array.Clear(array, 0, array.Length);
}
// Return the buffer to its bucket. In the future, we might consider having Return return false
// instead of dropping a bucket, in which case we could try to return to a lower-sized bucket,
// just as how in Rent we allow renting from a higher-sized bucket.
_buckets[bucket].Return(array);
}
}
}
}
#endif

View File

@@ -1,111 +0,0 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
#if NETCORE
using System;
using System.Diagnostics;
using System.Threading;
namespace SharpCompress.Buffers
{
internal sealed partial class DefaultArrayPool<T> : ArrayPool<T>
{
/// <summary>Provides a thread-safe bucket containing buffers that can be Rent'd and Return'd.</summary>
private sealed class Bucket
{
internal readonly int _bufferLength;
private readonly T[][] _buffers;
private readonly int _poolId;
private SpinLock _lock; // do not make this readonly; it's a mutable struct
private int _index;
/// <summary>
/// Creates the pool with numberOfBuffers arrays where each buffer is of bufferLength length.
/// </summary>
internal Bucket(int bufferLength, int numberOfBuffers, int poolId)
{
_lock = new SpinLock(Debugger.IsAttached); // only enable thread tracking if debugger is attached; it adds non-trivial overheads to Enter/Exit
_buffers = new T[numberOfBuffers][];
_bufferLength = bufferLength;
_poolId = poolId;
}
/// <summary>Gets an ID for the bucket to use with events.</summary>
internal int Id => GetHashCode();
/// <summary>Takes an array from the bucket. If the bucket is empty, returns null.</summary>
internal T[] Rent()
{
T[][] buffers = _buffers;
T[] buffer = null;
// While holding the lock, grab whatever is at the next available index and
// update the index. We do as little work as possible while holding the spin
// lock to minimize contention with other threads. The try/finally is
// necessary to properly handle thread aborts on platforms which have them.
bool lockTaken = false, allocateBuffer = false;
try
{
_lock.Enter(ref lockTaken);
if (_index < buffers.Length)
{
buffer = buffers[_index];
buffers[_index++] = null;
allocateBuffer = buffer == null;
}
}
finally
{
if (lockTaken) _lock.Exit(false);
}
// While we were holding the lock, we grabbed whatever was at the next available index, if
// there was one. If we tried and if we got back null, that means we hadn't yet allocated
// for that slot, in which case we should do so now.
if (allocateBuffer)
{
buffer = new T[_bufferLength];
}
return buffer;
}
/// <summary>
/// Attempts to return the buffer to the bucket. If successful, the buffer will be stored
/// in the bucket and true will be returned; otherwise, the buffer won't be stored, and false
/// will be returned.
/// </summary>
internal void Return(T[] array)
{
// Check to see if the buffer is the correct size for this bucket
if (array.Length != _bufferLength)
{
throw new ArgumentException("Buffer not from pool", nameof(array));
}
// While holding the spin lock, if there's room available in the bucket,
// put the buffer into the next available slot. Otherwise, we just drop it.
// The try/finally is necessary to properly handle thread aborts on platforms
// which have them.
bool lockTaken = false;
try
{
_lock.Enter(ref lockTaken);
if (_index != 0)
{
_buffers[--_index] = array;
}
}
finally
{
if (lockTaken) _lock.Exit(false);
}
}
}
}
}
#endif

View File

@@ -1,38 +0,0 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
#if NETCORE
using System.Diagnostics;
using System.Runtime.CompilerServices;
namespace SharpCompress.Buffers
{
internal static class Utilities
{
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static int SelectBucketIndex(int bufferSize)
{
Debug.Assert(bufferSize > 0);
uint bitsRemaining = ((uint)bufferSize - 1) >> 4;
int poolIndex = 0;
if (bitsRemaining > 0xFFFF) { bitsRemaining >>= 16; poolIndex = 16; }
if (bitsRemaining > 0xFF) { bitsRemaining >>= 8; poolIndex += 8; }
if (bitsRemaining > 0xF) { bitsRemaining >>= 4; poolIndex += 4; }
if (bitsRemaining > 0x3) { bitsRemaining >>= 2; poolIndex += 2; }
if (bitsRemaining > 0x1) { bitsRemaining >>= 1; poolIndex += 1; }
return poolIndex + (int)bitsRemaining;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static int GetMaxSizeForBucket(int binIndex)
{
int maxSize = 16 << binIndex;
Debug.Assert(maxSize >= 0);
return maxSize;
}
}
}
#endif

View File

@@ -18,21 +18,25 @@ namespace SharpCompress.Common
/// <summary>
/// Set this encoding when you want to force it for all encoding operations.
/// </summary>
public Encoding Forced { get; set; }
public Encoding? Forced { get; set; }
/// <summary>
/// Set this when you want to use a custom method for all decoding operations.
/// </summary>
/// <returns>string Func(bytes, index, length)</returns>
public Func<byte[], int, int, string> CustomDecoder { get; set; }
public Func<byte[], int, int, string>? CustomDecoder { get; set; }
public ArchiveEncoding()
: this(Encoding.Default, Encoding.Default)
{
Default = Encoding.UTF8;
Password = Encoding.UTF8;
}
public ArchiveEncoding(Encoding def, Encoding password)
{
Default = def;
Password = password;
}
#if NETSTANDARD1_3 || NETSTANDARD2_0
#if !NETFRAMEWORK
static ArchiveEncoding()
{
Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
@@ -44,21 +48,16 @@ namespace SharpCompress.Common
return Decode(bytes, 0, bytes.Length);
}
public string Decode437(byte[] bytes)
{
#if NETSTANDARD1_0
return Decode(bytes, 0, bytes.Length);
#else
var extendedAsciiEncoding = Encoding.GetEncoding(437);
return extendedAsciiEncoding.GetString(bytes, 0, bytes.Length);
#endif
}
public string Decode(byte[] bytes, int start, int length)
{
return GetDecoder().Invoke(bytes, start, length);
}
public string DecodeUTF8(byte[] bytes)
{
return Encoding.UTF8.GetString(bytes, 0, bytes.Length);
}
public byte[] Encode(string str)
{
return GetEncoding().GetBytes(str);
@@ -71,7 +70,7 @@ namespace SharpCompress.Common
public Func<byte[], int, int, string> GetDecoder()
{
return CustomDecoder ?? ((bytes, index, count) => (Default ?? Encoding.UTF8).GetString(bytes, index, count));
return CustomDecoder ?? ((bytes, index, count) => GetEncoding().GetString(bytes, index, count));
}
}
}
}

View File

@@ -2,16 +2,22 @@
namespace SharpCompress.Common
{
public class CompressedBytesReadEventArgs : EventArgs
public sealed class CompressedBytesReadEventArgs : EventArgs
{
public CompressedBytesReadEventArgs(long compressedBytesRead, long currentFilePartCompressedBytesRead)
{
CompressedBytesRead = compressedBytesRead;
CurrentFilePartCompressedBytesRead = currentFilePartCompressedBytesRead;
}
/// <summary>
/// Compressed bytes read for the current entry
/// </summary>
public long CompressedBytesRead { get; internal set; }
public long CompressedBytesRead { get; }
/// <summary>
/// Current file part read for Multipart files (e.g. Rar)
/// </summary>
public long CurrentFilePartCompressedBytesRead { get; internal set; }
public long CurrentFilePartCompressedBytesRead { get; }
}
}

View File

@@ -1,5 +1,6 @@
using System;
using System;
using System.Collections.Generic;
using System.Linq;
namespace SharpCompress.Common
{
@@ -15,6 +16,11 @@ namespace SharpCompress.Common
/// </summary>
public abstract string Key { get; }
/// <summary>
/// The target of a symlink entry internal to the Archive. Will be null if not a symlink.
/// </summary>
public abstract string? LinkTarget { get; }
/// <summary>
/// The compressed file size
/// </summary>
@@ -56,7 +62,7 @@ namespace SharpCompress.Common
public abstract bool IsEncrypted { get; }
/// <summary>
/// Entry is password protected and encrypted and cannot be extracted.
/// Entry is directory.
/// </summary>
public abstract bool IsDirectory { get; }
@@ -65,14 +71,14 @@ namespace SharpCompress.Common
/// </summary>
public abstract bool IsSplitAfter { get; }
public int VolumeIndexFirst => this.Parts?.FirstOrDefault()?.Index ?? 0;
public int VolumeIndexLast => this.Parts?.LastOrDefault()?.Index ?? 0;
/// <inheritdoc/>
public override string ToString()
{
return Key;
}
public override string ToString() => Key;
internal abstract IEnumerable<FilePart> Parts { get; }
internal bool IsSolid { get; set; }
public bool IsSolid { get; set; }
internal virtual void Close()
{
@@ -83,4 +89,4 @@ namespace SharpCompress.Common
/// </summary>
public virtual int? Attrib => throw new NotImplementedException();
}
}
}

View File

@@ -49,7 +49,6 @@ namespace SharpCompress.Common
public override void Flush()
{
throw new NotSupportedException();
}
public override long Length => _stream.Length;
@@ -66,6 +65,16 @@ namespace SharpCompress.Common
return read;
}
public override int ReadByte()
{
int value = _stream.ReadByte();
if (value == -1)
{
_completed = true;
}
return value;
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();

View File

@@ -0,0 +1,104 @@
using System;
using System.IO;
namespace SharpCompress.Common
{
internal static class ExtractionMethods
{
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteEntryToDirectory(IEntry entry,
string destinationDirectory,
ExtractionOptions? options,
Action<string, ExtractionOptions?> write)
{
string destinationFileName;
string fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
//check for trailing slash.
if (fullDestinationDirectoryPath[fullDestinationDirectoryPath.Length - 1] != Path.DirectorySeparatorChar)
{
fullDestinationDirectoryPath += Path.DirectorySeparatorChar;
}
if (!Directory.Exists(fullDestinationDirectoryPath))
{
throw new ExtractionException($"Directory does not exist to extract to: {fullDestinationDirectoryPath}");
}
options ??= new ExtractionOptions()
{
Overwrite = true
};
string file = Path.GetFileName(entry.Key);
if (options.ExtractFullPath)
{
string folder = Path.GetDirectoryName(entry.Key)!;
string destdir = Path.GetFullPath(Path.Combine(fullDestinationDirectoryPath, folder));
if (!Directory.Exists(destdir))
{
if (!destdir.StartsWith(fullDestinationDirectoryPath, StringComparison.Ordinal))
{
throw new ExtractionException("Entry is trying to create a directory outside of the destination directory.");
}
Directory.CreateDirectory(destdir);
}
destinationFileName = Path.Combine(destdir, file);
}
else
{
destinationFileName = Path.Combine(fullDestinationDirectoryPath, file);
}
if (!entry.IsDirectory)
{
destinationFileName = Path.GetFullPath(destinationFileName);
if (!destinationFileName.StartsWith(fullDestinationDirectoryPath, StringComparison.Ordinal))
{
throw new ExtractionException("Entry is trying to write a file outside of the destination directory.");
}
write(destinationFileName, options);
}
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
{
Directory.CreateDirectory(destinationFileName);
}
}
public static void WriteEntryToFile(IEntry entry, string destinationFileName,
ExtractionOptions? options,
Action<string, FileMode> openAndWrite)
{
if (entry.LinkTarget != null)
{
if (options?.WriteSymbolicLink is null)
{
throw new ExtractionException("Entry is a symbolic link but ExtractionOptions.WriteSymbolicLink delegate is null");
}
options.WriteSymbolicLink(destinationFileName, entry.LinkTarget);
}
else
{
FileMode fm = FileMode.Create;
options ??= new ExtractionOptions()
{
Overwrite = true
};
if (!options.Overwrite)
{
fm = FileMode.CreateNew;
}
openAndWrite(destinationFileName, fm);
entry.PreserveExtractionOptions(destinationFileName, options);
}
}
}
}

View File

@@ -0,0 +1,40 @@
using System;
namespace SharpCompress.Common
{
public class ExtractionOptions
{
/// <summary>
/// overwrite target if it exists
/// </summary>
public bool Overwrite { get; set; }
/// <summary>
/// extract with internal directory structure
/// </summary>
public bool ExtractFullPath { get; set; }
/// <summary>
/// preserve file time
/// </summary>
public bool PreserveFileTime { get; set; }
/// <summary>
/// preserve windows file attributes
/// </summary>
public bool PreserveAttributes { get; set; }
/// <summary>
/// Delegate for writing symbolic links to disk.
/// sourcePath is where the symlink is created.
/// targetPath is what the symlink refers to.
/// </summary>
public delegate void SymbolicLinkWriterDelegate(string sourcePath, string targetPath);
public SymbolicLinkWriterDelegate WriteSymbolicLink =
(sourcePath, targetPath) =>
{
Console.WriteLine($"Could not write symlink {sourcePath} -> {targetPath}, for more information please see https://github.com/dotnet/runtime/issues/24271");
};
}
}

View File

@@ -1,4 +1,4 @@
using System.IO;
using System.IO;
namespace SharpCompress.Common
{
@@ -10,11 +10,12 @@ namespace SharpCompress.Common
}
internal ArchiveEncoding ArchiveEncoding { get; }
internal abstract string FilePartName { get; }
public int Index { get; set; }
internal abstract Stream GetCompressedStream();
internal abstract Stream GetRawStream();
internal abstract Stream? GetRawStream();
internal bool Skipped { get; set; }
}
}
}

View File

@@ -2,21 +2,28 @@
namespace SharpCompress.Common
{
public class FilePartExtractionBeginEventArgs : EventArgs
public sealed class FilePartExtractionBeginEventArgs : EventArgs
{
public FilePartExtractionBeginEventArgs(string name, long size, long compressedSize)
{
Name = name;
Size = size;
CompressedSize = compressedSize;
}
/// <summary>
/// File name for the part for the current entry
/// </summary>
public string Name { get; internal set; }
public string Name { get; }
/// <summary>
/// Uncompressed size of the current entry in the part
/// </summary>
public long Size { get; internal set; }
public long Size { get; }
/// <summary>
/// Compressed size of the current entry in the part
/// </summary>
public long CompressedSize { get; internal set; }
public long CompressedSize { get; }
}
}

View File

@@ -1,7 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace SharpCompress.Common.GZip
{
@@ -16,13 +15,15 @@ namespace SharpCompress.Common.GZip
public override CompressionType CompressionType => CompressionType.GZip;
public override long Crc => 0;
public override long Crc => _filePart.Crc ?? 0;
public override string Key => _filePart.FilePartName;
public override string? LinkTarget => null;
public override long CompressedSize => 0;
public override long Size => 0;
public override long Size => _filePart.UncompressedSize ?? 0;
public override DateTime? LastModifiedTime => _filePart.DateModified;

View File

@@ -1,32 +1,40 @@
using System;
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Converters;
using System.Text;
namespace SharpCompress.Common.GZip
{
internal class GZipFilePart : FilePart
internal sealed class GZipFilePart : FilePart
{
private string _name;
private string? _name;
private readonly Stream _stream;
internal GZipFilePart(Stream stream, ArchiveEncoding archiveEncoding)
: base(archiveEncoding)
: base(archiveEncoding)
{
ReadAndValidateGzipHeader(stream);
EntryStartPosition = stream.Position;
_stream = stream;
ReadAndValidateGzipHeader();
if (stream.CanSeek)
{
long position = stream.Position;
stream.Position = stream.Length - 8;
ReadTrailer();
stream.Position = position;
}
EntryStartPosition = stream.Position;
}
internal long EntryStartPosition { get; }
internal DateTime? DateModified { get; private set; }
internal uint? Crc { get; private set; }
internal uint? UncompressedSize { get; private set; }
internal override string FilePartName => _name;
internal override string FilePartName => _name!;
internal override Stream GetCompressedStream()
{
@@ -38,11 +46,21 @@ namespace SharpCompress.Common.GZip
return _stream;
}
private void ReadAndValidateGzipHeader(Stream stream)
private void ReadTrailer()
{
// Read and potentially verify the GZIP trailer: CRC32 and size mod 2^32
Span<byte> trailer = stackalloc byte[8];
int n = _stream.Read(trailer);
Crc = BinaryPrimitives.ReadUInt32LittleEndian(trailer);
UncompressedSize = BinaryPrimitives.ReadUInt32LittleEndian(trailer.Slice(4));
}
private void ReadAndValidateGzipHeader()
{
// read the header on the first read
byte[] header = new byte[10];
int n = stream.Read(header, 0, header.Length);
Span<byte> header = stackalloc byte[10];
int n = _stream.Read(header);
// workitem 8501: handle edge case (decompress empty stream)
if (n == 0)
@@ -60,17 +78,17 @@ namespace SharpCompress.Common.GZip
throw new ZlibException("Bad GZIP header.");
}
Int32 timet = DataConverter.LittleEndian.GetInt32(header, 4);
int timet = BinaryPrimitives.ReadInt32LittleEndian(header.Slice(4));
DateModified = TarHeader.EPOCH.AddSeconds(timet);
if ((header[3] & 0x04) == 0x04)
{
// read and discard extra field
n = stream.Read(header, 0, 2); // 2-byte length field
n = _stream.Read(header.Slice(0, 2)); // 2-byte length field
Int16 extraLength = (Int16)(header[0] + header[1] * 256);
short extraLength = (short)(header[0] + header[1] * 256);
byte[] extra = new byte[extraLength];
if (!stream.ReadFully(extra))
if (!_stream.ReadFully(extra))
{
throw new ZlibException("Unexpected end-of-file reading GZIP header.");
}
@@ -78,27 +96,27 @@ namespace SharpCompress.Common.GZip
}
if ((header[3] & 0x08) == 0x08)
{
_name = ReadZeroTerminatedString(stream);
_name = ReadZeroTerminatedString(_stream);
}
if ((header[3] & 0x10) == 0x010)
{
ReadZeroTerminatedString(stream);
ReadZeroTerminatedString(_stream);
}
if ((header[3] & 0x02) == 0x02)
{
stream.ReadByte(); // CRC16, ignore
_stream.ReadByte(); // CRC16, ignore
}
}
private string ReadZeroTerminatedString(Stream stream)
{
byte[] buf1 = new byte[1];
Span<byte> buf1 = stackalloc byte[1];
var list = new List<byte>();
bool done = false;
do
{
// workitem 7740
int n = stream.Read(buf1, 0, 1);
int n = stream.Read(buf1);
if (n != 1)
{
throw new ZlibException("Unexpected EOF reading GZIP header.");
@@ -117,4 +135,4 @@ namespace SharpCompress.Common.GZip
return ArchiveEncoding.Decode(buffer);
}
}
}
}

View File

@@ -1,25 +1,23 @@
using System.IO;
using System.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common.GZip
{
public class GZipVolume : Volume
{
public GZipVolume(Stream stream, ReaderOptions options)
: base(stream, options)
public GZipVolume(Stream stream, ReaderOptions options, int index = 0)
: base(stream, options, index)
{
}
#if !NO_FILE
public GZipVolume(FileInfo fileInfo, ReaderOptions options)
: base(fileInfo.OpenRead(), options)
{
options.LeaveStreamOpen = false;
}
#endif
public override bool IsFirstVolume => true;
public override bool IsMultiVolume => true;
}
}
}

View File

@@ -1,7 +1,4 @@
#if !NO_FILE
using System.IO;
using SharpCompress.Readers;
using System.IO;
namespace SharpCompress.Common
{
@@ -48,4 +45,3 @@ namespace SharpCompress.Common
}
}
}
#endif

View File

@@ -1,4 +1,5 @@
using System;
using System;
using System.Collections.Generic;
namespace SharpCompress.Common
{
@@ -10,12 +11,16 @@ namespace SharpCompress.Common
long Crc { get; }
DateTime? CreatedTime { get; }
string Key { get; }
string? LinkTarget { get; }
bool IsDirectory { get; }
bool IsEncrypted { get; }
bool IsSplitAfter { get; }
bool IsSolid { get; }
int VolumeIndexFirst { get; }
int VolumeIndexLast { get; }
DateTime? LastAccessedTime { get; }
DateTime? LastModifiedTime { get; }
long Size { get; }
int? Attrib { get; }
}
}
}

View File

@@ -1,12 +1,11 @@
using System;
#if !NO_FILE
using System.IO;
#endif
using System;
namespace SharpCompress.Common
{
public interface IVolume : IDisposable
{
int Index { get; }
string FileName { get; }
}
}
}

View File

@@ -4,11 +4,13 @@ namespace SharpCompress.Common.Rar.Headers
{
internal class AvHeader : RarHeader
{
public AvHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Av)
public AvHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Av)
{
if (IsRar5)
if (IsRar5)
{
throw new InvalidFormatException("unexpected rar5 record");
}
}
protected override void ReadFinish(MarkingBinaryReader reader)

View File

@@ -1,10 +1,12 @@
using SharpCompress.IO;
#nullable disable
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{
internal class ArchiveCryptHeader : RarHeader
{
private const int CRYPT_VERSION = 0; // Supported encryption version.
private const int SIZE_SALT50 = 16;
private const int SIZE_SALT30 = 8;
@@ -13,14 +15,14 @@ namespace SharpCompress.Common.Rar.Headers
private const int SIZE_PSWCHECK_CSUM = 4;
private const int CRYPT5_KDF_LG2_COUNT = 15; // LOG2 of PDKDF2 iteration count.
private const int CRYPT5_KDF_LG2_COUNT_MAX = 24; // LOG2 of maximum accepted iteration count.
private bool _usePswCheck;
private uint _lg2Count; // Log2 of PBKDF2 repetition count.
private byte[] _salt;
private byte[] _pswCheck;
private byte[] _pswCheckCsm;
public ArchiveCryptHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Crypt)
{
@@ -33,12 +35,12 @@ namespace SharpCompress.Common.Rar.Headers
{
//error?
return;
}
}
var encryptionFlags = reader.ReadRarVIntUInt32();
_usePswCheck = FlagUtility.HasFlag(encryptionFlags, EncryptionFlagsV5.CHFL_CRYPT_PSWCHECK);
_lg2Count = reader.ReadRarVIntByte(1);
//UsePswCheck = HasHeaderFlag(EncryptionFlagsV5.CHFL_CRYPT_PSWCHECK);
if (_lg2Count > CRYPT5_KDF_LG2_COUNT_MAX)
{

View File

@@ -2,16 +2,16 @@ using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{
internal class ArchiveHeader : RarHeader
internal sealed class ArchiveHeader : RarHeader
{
public ArchiveHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Archive)
public ArchiveHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Archive)
{
}
protected override void ReadFinish(MarkingBinaryReader reader)
{
if (IsRar5)
if (IsRar5)
{
Flags = reader.ReadRarVIntUInt16();
if (HasFlag(ArchiveFlagsV5.HAS_VOLUME_NUMBER))
@@ -22,8 +22,8 @@ namespace SharpCompress.Common.Rar.Headers
//if (ExtraSize != 0) {
// ReadLocator(reader);
//}
}
else
}
else
{
Flags = HeaderFlags;
HighPosAv = reader.ReadInt16();
@@ -35,26 +35,33 @@ namespace SharpCompress.Common.Rar.Headers
}
}
private void ReadLocator(MarkingBinaryReader reader) {
private void ReadLocator(MarkingBinaryReader reader)
{
var size = reader.ReadRarVIntUInt16();
var type = reader.ReadRarVIntUInt16();
if (type != 1) throw new InvalidFormatException("expected locator record");
if (type != 1)
{
throw new InvalidFormatException("expected locator record");
}
var flags = reader.ReadRarVIntUInt16();
const ushort hasQuickOpenOffset = 0x01;
const ushort hasRecoveryOffset = 0x02;
ulong quickOpenOffset = 0;
if ((flags & hasQuickOpenOffset) == hasQuickOpenOffset) {
if ((flags & hasQuickOpenOffset) == hasQuickOpenOffset)
{
quickOpenOffset = reader.ReadRarVInt();
}
ulong recoveryOffset = 0;
if ((flags & hasRecoveryOffset) == hasRecoveryOffset) {
if ((flags & hasRecoveryOffset) == hasRecoveryOffset)
{
recoveryOffset = reader.ReadRarVInt();
}
}
private ushort Flags { get; set; }
private ushort Flags { get; set; }
private bool HasFlag(ushort flag)
private bool HasFlag(ushort flag)
{
return (Flags & flag) == flag;
}
@@ -74,7 +81,7 @@ namespace SharpCompress.Common.Rar.Headers
public bool IsVolume => HasFlag(IsRar5 ? ArchiveFlagsV5.VOLUME : ArchiveFlagsV4.VOLUME);
// RAR5: Volume number field is present. True for all volumes except first.
public bool IsFirstVolume => IsRar5 ? VolumeNumber == null : HasFlag(ArchiveFlagsV4.FIRST_VOLUME);
public bool IsFirstVolume => IsRar5 ? VolumeNumber is null : HasFlag(ArchiveFlagsV4.FIRST_VOLUME);
public bool IsSolid => HasFlag(IsRar5 ? ArchiveFlagsV5.SOLID : ArchiveFlagsV4.SOLID);
}

View File

@@ -5,9 +5,12 @@ namespace SharpCompress.Common.Rar.Headers
internal class CommentHeader : RarHeader
{
protected CommentHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Comment)
{
if (IsRar5) throw new InvalidFormatException("unexpected rar5 record");
: base(header, reader, HeaderType.Comment)
{
if (IsRar5)
{
throw new InvalidFormatException("unexpected rar5 record");
}
}
protected override void ReadFinish(MarkingBinaryReader reader)

View File

@@ -4,14 +4,14 @@ namespace SharpCompress.Common.Rar.Headers
{
internal class EndArchiveHeader : RarHeader
{
public EndArchiveHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.EndArchive)
public EndArchiveHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.EndArchive)
{
}
protected override void ReadFinish(MarkingBinaryReader reader)
{
if (IsRar5)
if (IsRar5)
{
Flags = reader.ReadRarVIntUInt16();
}
@@ -31,7 +31,7 @@ namespace SharpCompress.Common.Rar.Headers
private ushort Flags { get; set; }
private bool HasFlag(ushort flag)
private bool HasFlag(ushort flag)
{
return (Flags & flag) == flag;
}

View File

@@ -1,3 +1,5 @@
#nullable disable
#if !Rar2017_64bit
using nint = System.Int32;
using nuint = System.UInt32;
@@ -19,18 +21,18 @@ namespace SharpCompress.Common.Rar.Headers
{
private uint _fileCrc;
public FileHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType)
: base(header, reader, headerType)
public FileHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType)
: base(header, reader, headerType)
{
}
protected override void ReadFinish(MarkingBinaryReader reader)
protected override void ReadFinish(MarkingBinaryReader reader)
{
if (IsRar5)
if (IsRar5)
{
ReadFromReaderV5(reader);
}
else
}
else
{
ReadFromReaderV4(reader);
}
@@ -47,11 +49,13 @@ namespace SharpCompress.Common.Rar.Headers
FileAttributes = reader.ReadRarVIntUInt32();
if (HasFlag(FileFlagsV5.HAS_MOD_TIME)) {
if (HasFlag(FileFlagsV5.HAS_MOD_TIME))
{
FileLastModifiedTime = Utility.UnixTimeToDateTime(reader.ReadUInt32());
}
if (HasFlag(FileFlagsV5.HAS_CRC32)) {
if (HasFlag(FileFlagsV5.HAS_CRC32))
{
FileCrc = reader.ReadUInt32();
}
@@ -63,7 +67,7 @@ namespace SharpCompress.Common.Rar.Headers
// but it was already used in RAR 1.5 and Unpack needs to distinguish
// them.
CompressionAlgorithm = (byte)((compressionInfo & 0x3f) + 50);
// 7th bit (0x0040) defines the solid flag. If it is set, RAR continues to use the compression dictionary left after processing preceding files.
// It can be set only for file headers and is never set for service headers.
IsSolid = (compressionInfo & 0x40) == 0x40;
@@ -72,7 +76,7 @@ namespace SharpCompress.Common.Rar.Headers
CompressionMethod = (byte)((compressionInfo >> 7) & 0x7);
// Bits 11 - 14 (0x3c00) define the minimum size of dictionary size required to extract data. Value 0 means 128 KB, 1 - 256 KB, ..., 14 - 2048 MB, 15 - 4096 MB.
WindowSize = IsDirectory ? 0 : ((size_t)0x20000) << ((compressionInfo>>10) & 0xf);
WindowSize = IsDirectory ? 0 : ((size_t)0x20000) << ((compressionInfo >> 10) & 0xf);
HostOs = reader.ReadRarVIntByte();
@@ -99,65 +103,73 @@ namespace SharpCompress.Common.Rar.Headers
FileName = ConvertPathV5(Encoding.UTF8.GetString(b, 0, b.Length));
// extra size seems to be redudant since we know the total header size
if (ExtraSize != RemainingHeaderBytes(reader))
if (ExtraSize != RemainingHeaderBytes(reader))
{
throw new InvalidFormatException("rar5 header size / extra size inconsistency");
}
while (RemainingHeaderBytes(reader) > 0) {
isEncryptedRar5 = false;
while (RemainingHeaderBytes(reader) > 0)
{
var size = reader.ReadRarVIntUInt16();
int n = RemainingHeaderBytes(reader);
var type = reader.ReadRarVIntUInt16();
switch (type) {
//TODO
// case 1: // file encryption
// {
// var version = reader.ReadRarVIntByte();
// if (version != 0) throw new InvalidFormatException("unknown encryption algorithm "+ version);
//
// }
// break;
// case 2: // file hash
// {
//
// }
// break;
switch (type)
{
//TODO
case 1: // file encryption
{
isEncryptedRar5 = true;
//var version = reader.ReadRarVIntByte();
//if (version != 0) throw new InvalidFormatException("unknown encryption algorithm " + version);
}
break;
// case 2: // file hash
// {
//
// }
// break;
case 3: // file time
{
ushort flags = reader.ReadRarVIntUInt16();
var isWindowsTime = (flags & 1) == 0;
if ((flags & 0x2) == 0x2) {
if ((flags & 0x2) == 0x2)
{
FileLastModifiedTime = ReadExtendedTimeV5(reader, isWindowsTime);
}
if ((flags & 0x4) == 0x4) {
if ((flags & 0x4) == 0x4)
{
FileCreatedTime = ReadExtendedTimeV5(reader, isWindowsTime);
}
if ((flags & 0x8) == 0x8) {
if ((flags & 0x8) == 0x8)
{
FileLastAccessedTime = ReadExtendedTimeV5(reader, isWindowsTime);
}
}
break;
//TODO
// case 4: // file version
// {
//
// }
// break;
// case 5: // file system redirection
// {
//
// }
// break;
// case 6: // unix owner
// {
//
// }
// break;
// case 7: // service data
// {
//
// }
// break;
//TODO
// case 4: // file version
// {
//
// }
// break;
// case 5: // file system redirection
// {
//
// }
// break;
// case 6: // unix owner
// {
//
// }
// break;
// case 7: // service data
// {
//
// }
// break;
default:
// skip unknown record types to allow new record types to be added in the future
@@ -166,25 +178,26 @@ namespace SharpCompress.Common.Rar.Headers
// drain any trailing bytes of extra record
int did = n - RemainingHeaderBytes(reader);
int drain = size - did;
if (drain > 0)
if (drain > 0)
{
reader.ReadBytes(drain);
}
}
if (AdditionalDataSize != 0) {
if (AdditionalDataSize != 0)
{
CompressedSize = AdditionalDataSize;
}
}
private static DateTime ReadExtendedTimeV5(MarkingBinaryReader reader, bool isWindowsTime)
private static DateTime ReadExtendedTimeV5(MarkingBinaryReader reader, bool isWindowsTime)
{
if (isWindowsTime)
if (isWindowsTime)
{
return DateTime.FromFileTime(reader.ReadInt64());
}
else
}
else
{
return Utility.UnixTimeToDateTime(reader.ReadUInt32());
}
@@ -192,17 +205,12 @@ namespace SharpCompress.Common.Rar.Headers
private static string ConvertPathV5(string path)
{
#if NO_FILE
// not sure what to do here
throw new NotImplementedException("TODO");
#else
if (Path.DirectorySeparatorChar == '\\')
{
// replace embedded \\ with valid filename char
return path.Replace('\\', '-').Replace('/', '\\');
}
}
return path;
#endif
}
@@ -358,9 +366,6 @@ namespace SharpCompress.Common.Rar.Headers
private static string ConvertPathV4(string path)
{
#if NO_FILE
return path.Replace('\\', '/');
#else
if (Path.DirectorySeparatorChar == '/')
{
return path.Replace('\\', '/');
@@ -370,7 +375,6 @@ namespace SharpCompress.Common.Rar.Headers
return path.Replace('/', '\\');
}
return path;
#endif
}
public override string ToString()
@@ -380,20 +384,22 @@ namespace SharpCompress.Common.Rar.Headers
private ushort Flags { get; set; }
private bool HasFlag(ushort flag)
private bool HasFlag(ushort flag)
{
return (Flags & flag) == flag;
}
internal uint FileCrc
{
get {
if (IsRar5 && !HasFlag(FileFlagsV5.HAS_CRC32)) {
//!!! rar5:
internal uint FileCrc
{
get
{
if (IsRar5 && !HasFlag(FileFlagsV5.HAS_CRC32))
{
//!!! rar5:
throw new InvalidOperationException("TODO rar5");
}
return _fileCrc;
}
return _fileCrc;
}
private set => _fileCrc = value;
}
@@ -413,7 +419,7 @@ namespace SharpCompress.Common.Rar.Headers
//case 29: // rar 3.x compression
//case 50: // RAR 5.0 compression algorithm.
internal byte CompressionAlgorithm { get; private set; }
public bool IsSolid { get; private set; }
// unused for UnpackV1 implementation (limitation)
@@ -431,13 +437,14 @@ namespace SharpCompress.Common.Rar.Headers
internal long DataStartPosition { get; set; }
public Stream PackedStream { get; set; }
public bool IsSplitBefore => IsRar5 ? HasHeaderFlag(HeaderFlagsV5.SPLIT_BEFORE) : HasFlag(FileFlagsV4.SPLIT_BEFORE);
public bool IsSplitAfter => IsRar5 ? HasHeaderFlag(HeaderFlagsV5.SPLIT_AFTER) : HasFlag(FileFlagsV4.SPLIT_AFTER);
public bool IsDirectory => HasFlag(IsRar5 ? FileFlagsV5.DIRECTORY : FileFlagsV4.DIRECTORY);
//!!! TODO rar5
public bool IsEncrypted => HasFlag(FileFlagsV4.PASSWORD);
private bool isEncryptedRar5 = false;
public bool IsEncrypted => IsRar5 ? isEncryptedRar5 : HasFlag(FileFlagsV4.PASSWORD);
internal DateTime? FileLastModifiedTime { get; private set; }
internal DateTime? FileCreatedTime { get; private set; }

View File

@@ -42,10 +42,10 @@ namespace SharpCompress.Common.Rar.Headers
}
internal static class EncryptionFlagsV5
{
{
// RAR 5.0 archive encryption header specific flags.
public const uint CHFL_CRYPT_PSWCHECK = 0x01; // Password check data is present.
public const uint FHEXTRA_CRYPT_PSWCHECK = 0x01; // Password check data is present.
public const uint FHEXTRA_CRYPT_HASHMAC = 0x02;
}

View File

@@ -1,6 +1,6 @@
namespace SharpCompress.Common.Rar.Headers
{
internal interface IRarHeader
internal interface IRarHeader
{
HeaderType HeaderType { get; }
}

View File

@@ -11,71 +11,98 @@ namespace SharpCompress.Common.Rar.Headers
public bool IsRar5 { get; }
private MarkHeader(bool isRar5)
{
private MarkHeader(bool isRar5)
{
IsRar5 = isRar5;
}
public HeaderType HeaderType => HeaderType.Mark;
private static byte GetByte(Stream stream)
private static byte GetByte(Stream stream)
{
var b = stream.ReadByte();
if (b != -1)
if (b != -1)
{
return (byte)b;
}
throw new EndOfStreamException();
}
public static MarkHeader Read(Stream stream, bool leaveStreamOpen, bool lookForHeader)
public static MarkHeader Read(Stream stream, bool leaveStreamOpen, bool lookForHeader)
{
int maxScanIndex = lookForHeader ? MAX_SFX_SIZE : 0;
try
{
int start = -1;
var b = GetByte(stream); start++;
while (start <= maxScanIndex)
while (start <= maxScanIndex)
{
// Rar old signature: 52 45 7E 5E
// Rar4 signature: 52 61 72 21 1A 07 00
// Rar5 signature: 52 61 72 21 1A 07 01 00
if (b == 0x52)
if (b == 0x52)
{
b = GetByte(stream); start++;
if (b == 0x61)
if (b == 0x61)
{
b = GetByte(stream); start++;
if (b != 0x72) continue;
b = GetByte(stream); start++;
if (b != 0x21) continue;
b = GetByte(stream); start++;
if (b != 0x1a) continue;
b = GetByte(stream); start++;
if (b != 0x07) continue;
if (b != 0x72)
{
continue;
}
b = GetByte(stream); start++;
if (b == 1)
if (b != 0x21)
{
continue;
}
b = GetByte(stream); start++;
if (b != 0x1a)
{
continue;
}
b = GetByte(stream); start++;
if (b != 0x07)
{
continue;
}
b = GetByte(stream); start++;
if (b == 1)
{
b = GetByte(stream); start++;
if (b != 0) continue;
if (b != 0)
{
continue;
}
return new MarkHeader(true); // Rar5
}
else if (b == 0)
}
else if (b == 0)
{
return new MarkHeader(false); // Rar4
}
}
else if (b == 0x45)
}
}
else if (b == 0x45)
{
b = GetByte(stream); start++;
if (b != 0x7e) continue;
if (b != 0x7e)
{
continue;
}
b = GetByte(stream); start++;
if (b != 0x5e) continue;
if (b != 0x5e)
{
continue;
}
throw new InvalidFormatException("Rar format version pre-4 is unsupported.");
}
}
else
}
}
else
{
b = GetByte(stream); start++;
}

View File

@@ -2,23 +2,23 @@
namespace SharpCompress.Common.Rar.Headers
{
internal class NewSubHeaderType : IEquatable<NewSubHeaderType>
internal sealed class NewSubHeaderType : IEquatable<NewSubHeaderType>
{
internal static readonly NewSubHeaderType SUBHEAD_TYPE_CMT = new NewSubHeaderType('C', 'M', 'T');
internal static readonly NewSubHeaderType SUBHEAD_TYPE_CMT = new('C', 'M', 'T');
//internal static final NewSubHeaderType SUBHEAD_TYPE_ACL = new NewSubHeaderType(new byte[]{'A','C','L'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_ACL = new (new byte[]{'A','C','L'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_STREAM = new NewSubHeaderType(new byte[]{'S','T','M'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_STREAM = new (new byte[]{'S','T','M'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_UOWNER = new NewSubHeaderType(new byte[]{'U','O','W'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_UOWNER = new (new byte[]{'U','O','W'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_AV = new NewSubHeaderType(new byte[]{'A','V'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_AV = new (new byte[]{'A','V'});
internal static readonly NewSubHeaderType SUBHEAD_TYPE_RR = new NewSubHeaderType('R', 'R');
internal static readonly NewSubHeaderType SUBHEAD_TYPE_RR = new('R', 'R');
//internal static final NewSubHeaderType SUBHEAD_TYPE_OS2EA = new NewSubHeaderType(new byte[]{'E','A','2'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_OS2EA = new (new byte[]{'E','A','2'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_BEOSEA = new NewSubHeaderType(new byte[]{'E','A','B','E'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_BEOSEA = new (new byte[]{'E','A','B','E'});
private readonly byte[] _bytes;
@@ -37,19 +37,13 @@ namespace SharpCompress.Common.Rar.Headers
{
return false;
}
for (int i = 0; i < bytes.Length; ++i)
{
if (_bytes[i] != bytes[i])
{
return false;
}
}
return true;
return _bytes.AsSpan().SequenceEqual(bytes);
}
public bool Equals(NewSubHeaderType other)
public bool Equals(NewSubHeaderType? other)
{
return Equals(other._bytes);
return other is not null && Equals(other._bytes);
}
}
}

View File

@@ -3,12 +3,15 @@
namespace SharpCompress.Common.Rar.Headers
{
// ProtectHeader is part of the Recovery Record feature
internal class ProtectHeader : RarHeader
internal sealed class ProtectHeader : RarHeader
{
public ProtectHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Protect)
public ProtectHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Protect)
{
if (IsRar5) throw new InvalidFormatException("unexpected rar5 record");
if (IsRar5)
{
throw new InvalidFormatException("unexpected rar5 record");
}
}
protected override void ReadFinish(MarkingBinaryReader reader)
@@ -23,6 +26,6 @@ namespace SharpCompress.Common.Rar.Headers
internal byte Version { get; private set; }
internal ushort RecSectors { get; private set; }
internal uint TotalBlocks { get; private set; }
internal byte[] Mark { get; private set; }
internal byte[]? Mark { get; private set; }
}
}

View File

@@ -11,7 +11,7 @@ namespace SharpCompress.Common.Rar.Headers
private readonly HeaderType _headerType;
private readonly bool _isRar5;
internal static RarHeader TryReadBase(RarCrcBinaryReader reader, bool isRar5, ArchiveEncoding archiveEncoding)
internal static RarHeader? TryReadBase(RarCrcBinaryReader reader, bool isRar5, ArchiveEncoding archiveEncoding)
{
try
{
@@ -23,12 +23,12 @@ namespace SharpCompress.Common.Rar.Headers
}
}
private RarHeader(RarCrcBinaryReader reader, bool isRar5, ArchiveEncoding archiveEncoding)
private RarHeader(RarCrcBinaryReader reader, bool isRar5, ArchiveEncoding archiveEncoding)
{
_headerType = HeaderType.Null;
_isRar5 = isRar5;
ArchiveEncoding = archiveEncoding;
if (IsRar5)
if (IsRar5)
{
HeaderCrc = reader.ReadUInt32();
reader.ResetCrc();
@@ -45,7 +45,9 @@ namespace SharpCompress.Common.Rar.Headers
{
AdditionalDataSize = (long)reader.ReadRarVInt();
}
} else {
}
else
{
reader.Mark();
HeaderCrc = reader.ReadUInt16();
reader.ResetCrc();
@@ -59,7 +61,8 @@ namespace SharpCompress.Common.Rar.Headers
}
}
protected RarHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType) {
protected RarHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType)
{
_headerType = headerType;
_isRar5 = header.IsRar5;
HeaderCrc = header.HeaderCrc;
@@ -80,7 +83,8 @@ namespace SharpCompress.Common.Rar.Headers
VerifyHeaderCrc(reader.GetCrc32());
}
protected int RemainingHeaderBytes(MarkingBinaryReader reader) {
protected int RemainingHeaderBytes(MarkingBinaryReader reader)
{
return checked(HeaderSize - (int)reader.CurrentReadByteCount);
}
@@ -108,7 +112,7 @@ namespace SharpCompress.Common.Rar.Headers
protected ushort HeaderFlags { get; }
protected bool HasHeaderFlag(ushort flag)
protected bool HasHeaderFlag(ushort flag)
{
return (HeaderFlags & flag) == flag;
}

View File

@@ -1,4 +1,3 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.IO;
@@ -26,7 +25,7 @@ namespace SharpCompress.Common.Rar.Headers
_isRar5 = markHeader.IsRar5;
yield return markHeader;
RarHeader header;
RarHeader? header;
while ((header = TryReadNextHeader(stream)) != null)
{
yield return header;
@@ -39,28 +38,24 @@ namespace SharpCompress.Common.Rar.Headers
}
}
private RarHeader TryReadNextHeader(Stream stream)
private RarHeader? TryReadNextHeader(Stream stream)
{
RarCrcBinaryReader reader;
if (!IsEncrypted)
if (!IsEncrypted)
{
reader = new RarCrcBinaryReader(stream);
}
else
}
else
{
#if !NO_CRYPTO
if (Options.Password == null)
if (Options.Password is null)
{
throw new CryptographicException("Encrypted Rar archive has no password specified.");
}
reader = new RarCryptoBinaryReader(stream, Options.Password);
#else
throw new CryptographicException("Rar encryption unsupported on this platform");
#endif
}
var header = RarHeader.TryReadBase(reader, _isRar5, Options.ArchiveEncoding);
if (header == null)
if (header is null)
{
return null;
}
@@ -70,7 +65,7 @@ namespace SharpCompress.Common.Rar.Headers
case HeaderCodeV.RAR4_ARCHIVE_HEADER:
{
var ah = new ArchiveHeader(header, reader);
if (ah.IsEncrypted == true)
if (ah.IsEncrypted == true)
{
//!!! rar5 we don't know yet
IsEncrypted = true;
@@ -132,17 +127,13 @@ namespace SharpCompress.Common.Rar.Headers
case StreamingMode.Streaming:
{
var ms = new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize);
if (fh.R4Salt == null)
if (fh.R4Salt is null)
{
fh.PackedStream = ms;
}
else
{
#if !NO_CRYPTO
fh.PackedStream = new RarCryptoWrapper(ms, Options.Password, fh.R4Salt);
#else
throw new NotSupportedException("RarCrypto not supported");
#endif
fh.PackedStream = new RarCryptoWrapper(ms, Options.Password!, fh.R4Salt);
}
}
break;
@@ -159,11 +150,11 @@ namespace SharpCompress.Common.Rar.Headers
return new EndArchiveHeader(header, reader);
}
case HeaderCodeV.RAR5_ARCHIVE_ENCRYPTION_HEADER:
{
var ch = new ArchiveCryptHeader(header, reader);
IsEncrypted = true;
return ch;
}
{
var ch = new ArchiveCryptHeader(header, reader);
IsEncrypted = true;
return ch;
}
default:
{
throw new InvalidFormatException("Unknown Rar Header: " + header.HeaderCode);
@@ -171,21 +162,26 @@ namespace SharpCompress.Common.Rar.Headers
}
}
private void SkipData(FileHeader fh, RarCrcBinaryReader reader) {
switch (StreamingMode) {
case StreamingMode.Seekable: {
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
private void SkipData(FileHeader fh, RarCrcBinaryReader reader)
{
switch (StreamingMode)
{
case StreamingMode.Seekable:
{
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming: {
//skip the data because it's useless?
reader.BaseStream.Skip(fh.CompressedSize);
}
case StreamingMode.Streaming:
{
//skip the data because it's useless?
reader.BaseStream.Skip(fh.CompressedSize);
}
break;
default: {
throw new InvalidFormatException("Invalid StreamingMode");
}
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
}
}

View File

@@ -5,9 +5,12 @@ namespace SharpCompress.Common.Rar.Headers
internal class SignHeader : RarHeader
{
protected SignHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Sign)
{
if (IsRar5) throw new InvalidFormatException("unexpected rar5 record");
: base(header, reader, HeaderType.Sign)
{
if (IsRar5)
{
throw new InvalidFormatException("unexpected rar5 record");
}
}
protected override void ReadFinish(MarkingBinaryReader reader)

View File

@@ -1,10 +1,9 @@
#if !NO_CRYPTO
using System.Collections.Generic;
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Common.Rar
{
internal class RarCryptoBinaryReader : RarCrcBinaryReader
internal sealed class RarCryptoBinaryReader : RarCrcBinaryReader
{
private RarRijndael _rijndael;
private byte[] _salt;
@@ -20,7 +19,9 @@ namespace SharpCompress.Common.Rar
// coderb: not sure why this was being done at this logical point
//SkipQueue();
byte[] salt = ReadBytes(8);
InitializeAes(salt);
_salt = salt;
_rijndael = RarRijndael.InitializeFrom(_password, salt);
}
// track read count ourselves rather than using the underlying stream since we buffer
@@ -40,12 +41,6 @@ namespace SharpCompress.Common.Rar
private bool UseEncryption => _salt != null;
internal void InitializeAes(byte[] salt)
{
_salt = salt;
_rijndael = RarRijndael.InitializeFrom(_password, salt);
}
public override byte ReadByte()
{
if (UseEncryption)
@@ -82,7 +77,9 @@ namespace SharpCompress.Common.Rar
byte[] cipherText = ReadBytesNoCrc(16);
var readBytes = _rijndael.ProcessBlock(cipherText);
foreach (var readByte in readBytes)
{
_data.Enqueue(readByte);
}
}
}
@@ -111,5 +108,4 @@ namespace SharpCompress.Common.Rar
ClearQueue();
}
}
}
#endif
}

View File

@@ -1,12 +1,10 @@
#if !NO_CRYPTO
using System;
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Common.Rar
{
internal class RarCryptoWrapper : Stream
internal sealed class RarCryptoWrapper : Stream
{
private readonly Stream _actualStream;
private readonly byte[] _salt;
@@ -37,7 +35,7 @@ namespace SharpCompress.Common.Rar
public override int Read(byte[] buffer, int offset, int count)
{
if (_salt == null)
if (_salt is null)
{
return _actualStream.Read(buffer, offset, count);
}
@@ -52,20 +50,23 @@ namespace SharpCompress.Common.Rar
if (sizeToRead > 0)
{
int alignedSize = sizeToRead + ((~sizeToRead + 1) & 0xf);
Span<byte> cipherText = stackalloc byte[RarRijndael.CRYPTO_BLOCK_SIZE];
for (int i = 0; i < alignedSize / 16; i++)
{
//long ax = System.currentTimeMillis();
byte[] cipherText = new byte[RarRijndael.CRYPTO_BLOCK_SIZE];
_actualStream.Read(cipherText, 0, RarRijndael.CRYPTO_BLOCK_SIZE);
_actualStream.Read(cipherText);
var readBytes = _rijndael.ProcessBlock(cipherText);
foreach (var readByte in readBytes)
{
_data.Enqueue(readByte);
}
}
for (int i = 0; i < count; i++)
{
buffer[offset + i] = _data.Dequeue();
}
}
return count;
}
@@ -90,10 +91,9 @@ namespace SharpCompress.Common.Rar
if (_rijndael != null)
{
_rijndael.Dispose();
_rijndael = null;
_rijndael = null!;
}
base.Dispose(disposing);
}
}
}
#endif

View File

@@ -10,8 +10,8 @@ namespace SharpCompress.Common.Rar
/// <summary>
/// As the V2017 port isn't complete, add this check to use the legacy Rar code.
/// </summary>
internal bool IsRarV3 => FileHeader.CompressionAlgorithm == 29 || FileHeader.CompressionAlgorithm == 36;
internal bool IsRarV3 => FileHeader.CompressionAlgorithm == 20 || FileHeader.CompressionAlgorithm == 26 || FileHeader.CompressionAlgorithm == 29 || FileHeader.CompressionAlgorithm == 36; //Nanook - Added 20+26 as Test arc from WinRar2.8 (algo 20) was failing with 2017 code
/// <summary>
/// The File's 32 bit CRC Hash
/// </summary>
@@ -22,6 +22,8 @@ namespace SharpCompress.Common.Rar
/// </summary>
public override string Key => FileHeader.FileName;
public override string? LinkTarget => null;
/// <summary>
/// The entry last modified time in the archive, if recorded
/// </summary>

View File

@@ -1,4 +1,4 @@
using System.IO;
using System.IO;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Common.Rar
@@ -8,20 +8,21 @@ namespace SharpCompress.Common.Rar
/// </summary>
internal abstract class RarFilePart : FilePart
{
internal RarFilePart(MarkHeader mh, FileHeader fh)
internal RarFilePart(MarkHeader mh, FileHeader fh, int index)
: base(fh.ArchiveEncoding)
{
MarkHeader = mh;
FileHeader = fh;
Index = index;
}
internal MarkHeader MarkHeader { get; }
internal FileHeader FileHeader { get; }
internal override Stream GetRawStream()
internal override Stream? GetRawStream()
{
return null;
}
}
}
}

View File

@@ -1,7 +1,6 @@
#if !NO_CRYPTO
#nullable disable
using System;
using System.Collections.Generic;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using SharpCompress.Crypto;
@@ -23,52 +22,45 @@ namespace SharpCompress.Common.Rar
_salt = salt;
}
private byte[] ComputeHash(byte[] input)
{
var sha = SHA1.Create();
return sha.ComputeHash(input);
}
private void Initialize()
{
_rijndael = new RijndaelEngine();
_aesInitializationVector = new byte[CRYPTO_BLOCK_SIZE];
int rawLength = 2*_password.Length;
int rawLength = 2 * _password.Length;
byte[] rawPassword = new byte[rawLength + 8];
byte[] passwordBytes = Encoding.UTF8.GetBytes(_password);
for (int i = 0; i < _password.Length; i++)
{
rawPassword[i*2] = passwordBytes[i];
rawPassword[i*2 + 1] = 0;
rawPassword[i * 2] = passwordBytes[i];
rawPassword[i * 2 + 1] = 0;
}
for (int i = 0; i < _salt.Length; i++)
{
rawPassword[i + rawLength] = _salt[i];
}
const int noOfRounds = (1 << 18);
IList<byte> bytes = new List<byte>();
const int iblock = 3;
byte[] digest;
byte[] data = new byte[(rawPassword.Length + iblock) * noOfRounds];
//TODO slow code below, find ways to optimize
for (int i = 0; i < noOfRounds; i++)
{
bytes.AddRange(rawPassword);
rawPassword.CopyTo(data, i * (rawPassword.Length + iblock));
bytes.AddRange(new[]
data[i * (rawPassword.Length + iblock) + rawPassword.Length + 0] = (byte)i;
data[i * (rawPassword.Length + iblock) + rawPassword.Length + 1] = (byte)(i >> 8);
data[i * (rawPassword.Length + iblock) + rawPassword.Length + 2] = (byte)(i >> CRYPTO_BLOCK_SIZE);
if (i % (noOfRounds / CRYPTO_BLOCK_SIZE) == 0)
{
(byte) i, (byte) (i >> 8), (byte) (i >> CRYPTO_BLOCK_SIZE)
});
if (i%(noOfRounds/CRYPTO_BLOCK_SIZE) == 0)
{
digest = ComputeHash(bytes.ToArray());
_aesInitializationVector[i/(noOfRounds/CRYPTO_BLOCK_SIZE)] = digest[19];
digest = SHA1.Create().ComputeHash(data, 0, (i + 1) * (rawPassword.Length + iblock));
_aesInitializationVector[i / (noOfRounds / CRYPTO_BLOCK_SIZE)] = digest[19];
}
}
digest = ComputeHash(bytes.ToArray());
digest = SHA1.Create().ComputeHash(data);
//slow code ends
byte[] aesKey = new byte[CRYPTO_BLOCK_SIZE];
@@ -76,11 +68,11 @@ namespace SharpCompress.Common.Rar
{
for (int j = 0; j < 4; j++)
{
aesKey[i*4 + j] = (byte)
(((digest[i*4]*0x1000000) & 0xff000000 |
(uint) ((digest[i*4 + 1]*0x10000) & 0xff0000) |
(uint) ((digest[i*4 + 2]*0x100) & 0xff00) |
(uint) (digest[i*4 + 3] & 0xff)) >> (j*8));
aesKey[i * 4 + j] = (byte)
(((digest[i * 4] * 0x1000000) & 0xff000000 |
(uint)((digest[i * 4 + 1] * 0x10000) & 0xff0000) |
(uint)((digest[i * 4 + 2] * 0x100) & 0xff00) |
(uint)(digest[i * 4 + 3] & 0xff)) >> (j * 8));
}
}
@@ -95,22 +87,23 @@ namespace SharpCompress.Common.Rar
return rijndael;
}
public byte[] ProcessBlock(byte[] cipherText)
public byte[] ProcessBlock(ReadOnlySpan<byte> cipherText)
{
var plainText = new byte[CRYPTO_BLOCK_SIZE];
var decryptedBytes = new List<byte>();
_rijndael.ProcessBlock(cipherText, 0, plainText, 0);
Span<byte> plainText = stackalloc byte[CRYPTO_BLOCK_SIZE]; // 16 bytes
byte[] decryptedBytes = new byte[CRYPTO_BLOCK_SIZE];
_rijndael.ProcessBlock(cipherText, plainText);
for (int j = 0; j < plainText.Length; j++)
for (int j = 0; j < CRYPTO_BLOCK_SIZE; j++)
{
decryptedBytes.Add((byte) (plainText[j] ^ _aesInitializationVector[j%16])); //32:114, 33:101
decryptedBytes[j] = (byte)(plainText[j] ^ _aesInitializationVector[j % 16]); //32:114, 33:101
}
for (int j = 0; j < _aesInitializationVector.Length; j++)
{
_aesInitializationVector[j] = cipherText[j];
}
return decryptedBytes.ToArray();
return decryptedBytes;
}
public void Dispose()
@@ -118,4 +111,3 @@ namespace SharpCompress.Common.Rar
}
}
}
#endif

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -14,14 +14,17 @@ namespace SharpCompress.Common.Rar
public abstract class RarVolume : Volume
{
private readonly RarHeaderFactory _headerFactory;
internal int _maxCompressionAlgorithm;
internal RarVolume(StreamingMode mode, Stream stream, ReaderOptions options)
: base(stream, options)
internal RarVolume(StreamingMode mode, Stream stream, ReaderOptions options, int index = 0)
: base(stream, options, index)
{
_headerFactory = new RarHeaderFactory(mode, options);
}
#nullable disable
internal ArchiveHeader ArchiveHeader { get; private set; }
#nullable enable
internal StreamingMode Mode => _headerFactory.StreamingMode;
@@ -31,26 +34,28 @@ namespace SharpCompress.Common.Rar
internal IEnumerable<RarFilePart> GetVolumeFileParts()
{
MarkHeader lastMarkHeader = null;
MarkHeader? lastMarkHeader = null;
foreach (var header in _headerFactory.ReadHeaders(Stream))
{
switch (header.HeaderType)
{
case HeaderType.Mark:
{
lastMarkHeader = header as MarkHeader;
}
{
lastMarkHeader = (MarkHeader)header;
}
break;
case HeaderType.Archive:
{
ArchiveHeader = header as ArchiveHeader;
}
{
ArchiveHeader = (ArchiveHeader)header;
}
break;
case HeaderType.File:
{
var fh = header as FileHeader;
yield return CreateFilePart(lastMarkHeader, fh);
}
{
var fh = (FileHeader)header;
if (_maxCompressionAlgorithm < fh.CompressionAlgorithm)
_maxCompressionAlgorithm = fh.CompressionAlgorithm;
yield return CreateFilePart(lastMarkHeader!, fh);
}
break;
}
}
@@ -58,7 +63,7 @@ namespace SharpCompress.Common.Rar
private void EnsureArchiveHeaderLoaded()
{
if (ArchiveHeader == null)
if (ArchiveHeader is null)
{
if (Mode == StreamingMode.Streaming)
{
@@ -108,5 +113,37 @@ namespace SharpCompress.Common.Rar
return ArchiveHeader.IsSolid;
}
}
public int MinVersion
{
get
{
EnsureArchiveHeaderLoaded();
if (_maxCompressionAlgorithm >= 50)
return 5; //5-6
else if (_maxCompressionAlgorithm >= 29)
return 3; //3-4
else if (_maxCompressionAlgorithm >= 20)
return 2; //2
else
return 1;
}
}
public int MaxVersion
{
get
{
EnsureArchiveHeaderLoaded();
if (_maxCompressionAlgorithm >= 50)
return 6; //5-6
else if (_maxCompressionAlgorithm >= 29)
return 4; //3-4
else if (_maxCompressionAlgorithm >= 20)
return 2; //2
else
return 1;
}
}
}
}
}

View File

@@ -3,15 +3,16 @@ using SharpCompress.Readers;
namespace SharpCompress.Common
{
public class ReaderExtractionEventArgs<T> : EventArgs
public sealed class ReaderExtractionEventArgs<T> : EventArgs
{
internal ReaderExtractionEventArgs(T entry, ReaderProgress readerProgress = null)
internal ReaderExtractionEventArgs(T entry, ReaderProgress? readerProgress = null)
{
Item = entry;
ReaderProgress = readerProgress;
}
public T Item { get; }
public ReaderProgress ReaderProgress { get; }
public ReaderProgress? ReaderProgress { get; }
}
}

View File

@@ -1,4 +1,6 @@
using System;
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Compressors.LZMA;
@@ -22,7 +24,7 @@ namespace SharpCompress.Common.SevenZip
internal List<long> _packStreamStartPositions = new List<long>();
internal List<int> _folderStartFileIndex = new List<int>();
internal List<int> _fileIndexToFolderIndexMap = new List<int>();
internal IPasswordProvider PasswordProvider { get; }
public ArchiveDatabase(IPasswordProvider passwordProvider)
@@ -35,7 +37,7 @@ namespace SharpCompress.Common.SevenZip
_packSizes.Clear();
_packCrCs.Clear();
_folders.Clear();
_numUnpackStreamsVector = null;
_numUnpackStreamsVector = null!;
_files.Clear();
_packStreamStartPositions.Clear();
@@ -87,7 +89,7 @@ namespace SharpCompress.Common.SevenZip
{
// v3.13 incorrectly worked with empty folders
// v4.07: Loop for skipping empty folders
for (;;)
for (; ; )
{
if (folderIndex >= _folders.Count)
{
@@ -96,7 +98,7 @@ namespace SharpCompress.Common.SevenZip
_folderStartFileIndex.Add(i); // check it
if (_numUnpackStreamsVector[folderIndex] != 0)
if (_numUnpackStreamsVector![folderIndex] != 0)
{
break;
}
@@ -114,7 +116,7 @@ namespace SharpCompress.Common.SevenZip
indexInFolder++;
if (indexInFolder >= _numUnpackStreamsVector[folderIndex])
if (indexInFolder >= _numUnpackStreamsVector![folderIndex])
{
folderIndex++;
indexInFolder = 0;
@@ -152,13 +154,14 @@ namespace SharpCompress.Common.SevenZip
{
int packStreamIndex = folder._firstPackStreamId;
long folderStartPackPos = GetFolderStreamPos(folder, 0);
List<long> packSizes = new List<long>();
for (int j = 0; j < folder._packStreams.Count; j++)
int count = folder._packStreams.Count;
long[] packSizes = new long[count];
for (int j = 0; j < count; j++)
{
packSizes.Add(_packSizes[packStreamIndex + j]);
packSizes[j] = _packSizes[packStreamIndex + j];
}
return DecoderStreamHelper.CreateDecoderStream(stream, folderStartPackPos, packSizes.ToArray(), folder, pw);
return DecoderStreamHelper.CreateDecoderStream(stream, folderStartPackPos, packSizes, folder, pw);
}
private long GetFolderPackStreamSize(int folderIndex, int streamIndex)
@@ -179,4 +182,4 @@ namespace SharpCompress.Common.SevenZip
return 0;
}
}
}
}

View File

@@ -1,4 +1,6 @@
using System;
#nullable disable
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
@@ -88,7 +90,7 @@ namespace SharpCompress.Common.SevenZip
private void WaitAttribute(BlockType attribute)
{
for (;;)
for (; ; )
{
BlockType? type = ReadId();
if (type == attribute)
@@ -450,7 +452,7 @@ namespace SharpCompress.Common.SevenZip
#endif
BlockType? type;
for (;;)
for (; ; )
{
type = ReadId();
if (type == BlockType.End)
@@ -465,7 +467,7 @@ namespace SharpCompress.Common.SevenZip
SkipData();
}
if (packCrCs == null)
if (packCrCs is null)
{
packCrCs = new List<uint?>(numPackStreams);
for (int i = 0; i < numPackStreams; i++)
@@ -506,7 +508,7 @@ namespace SharpCompress.Common.SevenZip
int index = 0;
for (int i = 0; i < numFolders; i++)
{
var f = new CFolder {_firstPackStreamId = index};
var f = new CFolder { _firstPackStreamId = index };
folders.Add(f);
GetNextFolderItem(f);
index += f._packStreams.Count;
@@ -537,7 +539,7 @@ namespace SharpCompress.Common.SevenZip
#endif
}
for (;;)
for (; ; )
{
BlockType? type = ReadId();
if (type == BlockType.End)
@@ -578,7 +580,7 @@ namespace SharpCompress.Common.SevenZip
numUnpackStreamsInFolders = null;
BlockType? type;
for (;;)
for (; ; )
{
type = ReadId();
if (type == BlockType.NumUnpackStream)
@@ -600,7 +602,7 @@ namespace SharpCompress.Common.SevenZip
#endif
continue;
}
if (type == BlockType.Crc || type == BlockType.Size)
if (type is BlockType.Crc or BlockType.Size)
{
break;
}
@@ -611,7 +613,7 @@ namespace SharpCompress.Common.SevenZip
SkipData();
}
if (numUnpackStreamsInFolders == null)
if (numUnpackStreamsInFolders is null)
{
numUnpackStreamsInFolders = new List<int>(folders.Count);
for (int i = 0; i < folders.Count; i++)
@@ -670,7 +672,7 @@ namespace SharpCompress.Common.SevenZip
digests = null;
for (;;)
for (; ; )
{
if (type == BlockType.Crc)
{
@@ -703,7 +705,7 @@ namespace SharpCompress.Common.SevenZip
}
else if (type == BlockType.End)
{
if (digests == null)
if (digests is null)
{
digests = new List<uint?>(numDigestsTotal);
for (int i = 0; i < numDigestsTotal; i++)
@@ -753,7 +755,7 @@ namespace SharpCompress.Common.SevenZip
unpackSizes = null;
digests = null;
for (;;)
for (; ; )
{
switch (ReadId())
{
@@ -789,22 +791,14 @@ namespace SharpCompress.Common.SevenZip
#endif
try
{
long dataStartPos;
List<long> packSizes;
List<uint?> packCrCs;
List<CFolder> folders;
List<int> numUnpackStreamsInFolders;
List<long> unpackSizes;
List<uint?> digests;
ReadStreamsInfo(null,
out dataStartPos,
out packSizes,
out packCrCs,
out folders,
out numUnpackStreamsInFolders,
out unpackSizes,
out digests);
out long dataStartPos,
out List<long> packSizes,
out List<uint?> packCrCs,
out List<CFolder> folders,
out List<int> numUnpackStreamsInFolders,
out List<long> unpackSizes,
out List<uint?> digests);
dataStartPos += baseOffset;
@@ -932,7 +926,7 @@ namespace SharpCompress.Common.SevenZip
BitVector antiFileVector = null;
int numEmptyStreams = 0;
for (;;)
for (; ; )
{
type = ReadId();
if (type == BlockType.End)
@@ -967,7 +961,7 @@ namespace SharpCompress.Common.SevenZip
#if DEBUG
Log.Write("WinAttributes:");
#endif
ReadAttributeVector(dataVector, numFiles, delegate(int i, uint? attr)
ReadAttributeVector(dataVector, numFiles, delegate (int i, uint? attr)
{
// Some third party implementations established an unofficial extension
// of the 7z archive format by placing posix file attributes in the high
@@ -1055,7 +1049,7 @@ namespace SharpCompress.Common.SevenZip
#if DEBUG
Log.Write("StartPos:");
#endif
ReadNumberVector(dataVector, numFiles, delegate(int i, long? startPos)
ReadNumberVector(dataVector, numFiles, delegate (int i, long? startPos)
{
db._files[i].StartPos = startPos;
#if DEBUG
@@ -1070,7 +1064,7 @@ namespace SharpCompress.Common.SevenZip
#if DEBUG
Log.Write("CTime:");
#endif
ReadDateTimeVector(dataVector, numFiles, delegate(int i, DateTime? time)
ReadDateTimeVector(dataVector, numFiles, delegate (int i, DateTime? time)
{
db._files[i].CTime = time;
#if DEBUG
@@ -1085,7 +1079,7 @@ namespace SharpCompress.Common.SevenZip
#if DEBUG
Log.Write("ATime:");
#endif
ReadDateTimeVector(dataVector, numFiles, delegate(int i, DateTime? time)
ReadDateTimeVector(dataVector, numFiles, delegate (int i, DateTime? time)
{
db._files[i].ATime = time;
#if DEBUG
@@ -1100,7 +1094,7 @@ namespace SharpCompress.Common.SevenZip
#if DEBUG
Log.Write("MTime:");
#endif
ReadDateTimeVector(dataVector, numFiles, delegate(int i, DateTime? time)
ReadDateTimeVector(dataVector, numFiles, delegate (int i, DateTime? time)
{
db._files[i].MTime = time;
#if DEBUG
@@ -1254,7 +1248,7 @@ namespace SharpCompress.Common.SevenZip
if (nextHeaderOffset > _streamEnding - db._startPositionAfterHeader)
{
throw new IndexOutOfRangeException();
throw new InvalidOperationException("nextHeaderOffset is invalid");
}
_stream.Seek(nextHeaderOffset, SeekOrigin.Current);
@@ -1443,19 +1437,19 @@ namespace SharpCompress.Common.SevenZip
private Stream GetCachedDecoderStream(ArchiveDatabase db, int folderIndex)
{
Stream s;
if (!_cachedStreams.TryGetValue(folderIndex, out s))
if (!_cachedStreams.TryGetValue(folderIndex, out Stream s))
{
CFolder folderInfo = db._folders[folderIndex];
int packStreamIndex = db._folders[folderIndex]._firstPackStreamId;
long folderStartPackPos = db.GetFolderStreamPos(folderInfo, 0);
List<long> packSizes = new List<long>();
for (int j = 0; j < folderInfo._packStreams.Count; j++)
var count = folderInfo._packStreams.Count;
long[] packSizes = new long[count];
for (int j = 0; j < count; j++)
{
packSizes.Add(db._packSizes[packStreamIndex + j]);
packSizes[j] = db._packSizes[packStreamIndex + j];
}
s = DecoderStreamHelper.CreateDecoderStream(_stream, folderStartPackPos, packSizes.ToArray(), folderInfo,
s = DecoderStreamHelper.CreateDecoderStream(_stream, folderStartPackPos, packSizes, folderInfo,
db.PasswordProvider);
_cachedStreams.Add(folderIndex, s);
}
@@ -1486,16 +1480,11 @@ namespace SharpCompress.Common.SevenZip
public void Extract(ArchiveDatabase db, int[] indices)
{
int numItems;
bool allFilesMode = (indices == null);
if (allFilesMode)
{
numItems = db._files.Count;
}
else
{
numItems = indices.Length;
}
bool allFilesMode = (indices is null);
int numItems = allFilesMode
? db._files.Count
: indices.Length;
if (numItems == 0)
{
@@ -1528,6 +1517,7 @@ namespace SharpCompress.Common.SevenZip
}
}
byte[] buffer = null;
foreach (CExtractFolderInfo efi in extractFolderInfoVector)
{
int startIndex;
@@ -1553,18 +1543,19 @@ namespace SharpCompress.Common.SevenZip
int packStreamIndex = db._folders[folderIndex]._firstPackStreamId;
long folderStartPackPos = db.GetFolderStreamPos(folderInfo, 0);
List<long> packSizes = new List<long>();
for (int j = 0; j < folderInfo._packStreams.Count; j++)
var count = folderInfo._packStreams.Count;
long[] packSizes = new long[count];
for (int j = 0; j < count; j++)
{
packSizes.Add(db._packSizes[packStreamIndex + j]);
packSizes[j] = db._packSizes[packStreamIndex + j];
}
// TODO: If the decoding fails the last file may be extracted incompletely. Delete it?
Stream s = DecoderStreamHelper.CreateDecoderStream(_stream, folderStartPackPos, packSizes.ToArray(),
Stream s = DecoderStreamHelper.CreateDecoderStream(_stream, folderStartPackPos, packSizes,
folderInfo, db.PasswordProvider);
byte[] buffer = new byte[4 << 10];
for (;;)
buffer ??= new byte[4 << 10];
for (; ; )
{
int processed = s.Read(buffer, 0, buffer.Length);
if (processed == 0)

View File

@@ -1,4 +1,6 @@
namespace SharpCompress.Common.SevenZip
#nullable disable
namespace SharpCompress.Common.SevenZip
{
internal class CCoderInfo
{

View File

@@ -1,4 +1,6 @@
using System;
#nullable disable
using System;
namespace SharpCompress.Common.SevenZip
{

View File

@@ -30,7 +30,7 @@ namespace SharpCompress.Common.SevenZip
}
}
throw new Exception();
throw new InvalidOperationException();
}
public int GetNumOutStreams()
@@ -185,4 +185,4 @@ namespace SharpCompress.Common.SevenZip
return true;
}
}
}
}

View File

@@ -1,6 +1,6 @@
namespace SharpCompress.Common.SevenZip
{
internal struct CMethodId
internal readonly struct CMethodId
{
public const ulong K_COPY_ID = 0;
public const ulong K_LZMA_ID = 0x030101;
@@ -24,9 +24,9 @@
return _id.GetHashCode();
}
public override bool Equals(object obj)
public override bool Equals(object? obj)
{
return obj is CMethodId && (CMethodId)obj == this;
return obj is CMethodId other && Equals(other);
}
public bool Equals(CMethodId other)

View File

@@ -161,7 +161,7 @@ namespace SharpCompress.Common.SevenZip
{
int ending = Offset;
for (;;)
for (; ; )
{
if (ending + 2 > _ending)
{

View File

@@ -18,6 +18,8 @@ namespace SharpCompress.Common.SevenZip
public override string Key => FilePart.Header.Name;
public override string? LinkTarget => null;
public override long CompressedSize => 0;
public override long Size => FilePart.Header.Size;
@@ -30,13 +32,13 @@ namespace SharpCompress.Common.SevenZip
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsEncrypted => FilePart.IsEncrypted;
public override bool IsDirectory => FilePart.Header.IsDir;
public override bool IsSplitAfter => false;
public override int? Attrib => (int)FilePart.Header.Attrib;
public override int? Attrib => FilePart.Header.Attrib.HasValue ? (int?)FilePart.Header.Attrib.Value : null;
internal override IEnumerable<FilePart> Parts => FilePart.AsEnumerable<FilePart>();
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.IO;
using System.Linq;
using SharpCompress.IO;
@@ -25,12 +25,11 @@ namespace SharpCompress.Common.SevenZip
}
internal CFileItem Header { get; }
internal CFolder Folder { get; }
internal int Index { get; }
internal CFolder? Folder { get; }
internal override string FilePartName => Header.Name;
internal override Stream GetRawStream()
internal override Stream? GetRawStream()
{
return null;
}
@@ -39,11 +38,11 @@ namespace SharpCompress.Common.SevenZip
{
if (!Header.HasStream)
{
return null;
return null!;
}
var folderStream = _database.GetFolderStream(_stream, Folder, _database.PasswordProvider);
var folderStream = _database.GetFolderStream(_stream, Folder!, _database.PasswordProvider);
int firstFileIndex = _database._folderStartFileIndex[_database._folders.IndexOf(Folder)];
int firstFileIndex = _database._folderStartFileIndex[_database._folders.IndexOf(Folder!)];
int skipCount = Index - firstFileIndex;
long skipSize = 0;
for (int i = 0; i < skipCount; i++)
@@ -61,7 +60,7 @@ namespace SharpCompress.Common.SevenZip
{
get
{
if (_type == null)
if (_type is null)
{
_type = GetCompression();
}
@@ -82,25 +81,27 @@ namespace SharpCompress.Common.SevenZip
internal CompressionType GetCompression()
{
var coder = Folder._coders.First();
var coder = Folder!._coders.First();
switch (coder._methodId._id)
{
{
case K_LZMA:
case K_LZMA2:
{
return CompressionType.LZMA;
}
{
return CompressionType.LZMA;
}
case K_PPMD:
{
return CompressionType.PPMd;
}
{
return CompressionType.PPMd;
}
case K_B_ZIP2:
{
return CompressionType.BZip2;
}
{
return CompressionType.BZip2;
}
default:
throw new NotImplementedException();
}
}
internal bool IsEncrypted => Folder!._coders.FindIndex(c => c._methodId._id == CMethodId.K_AES_ID) != -1;
}
}
}

View File

@@ -1,14 +1,13 @@
using System.IO;
using SharpCompress.Archives;
using System.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common.SevenZip
{
public class SevenZipVolume : Volume
{
public SevenZipVolume(Stream stream, ReaderOptions readerFactoryOptions)
: base(stream, readerFactoryOptions)
public SevenZipVolume(Stream stream, ReaderOptions readerFactoryOptions, int index = 0)
: base(stream, readerFactoryOptions, index)
{
}
}
}
}

View File

@@ -1,11 +1,13 @@
using System;
#nullable disable
using System;
using System.Buffers.Binary;
using System.IO;
using System.Text;
using SharpCompress.Converters;
namespace SharpCompress.Common.Tar.Headers
{
internal class TarHeader
internal sealed class TarHeader
{
internal static readonly DateTime EPOCH = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
@@ -15,12 +17,11 @@ namespace SharpCompress.Common.Tar.Headers
}
internal string Name { get; set; }
internal string LinkName { get; set; }
//internal int Mode { get; set; }
//internal int UserId { get; set; }
//internal string UserName { get; set; }
//internal int GroupId { get; set; }
//internal string GroupName { get; set; }
internal long Mode { get; set; }
internal long UserId { get; set; }
internal long GroupId { get; set; }
internal long Size { get; set; }
internal DateTime LastModifiedTime { get; set; }
internal EntryType EntryType { get; set; }
@@ -38,16 +39,17 @@ namespace SharpCompress.Common.Tar.Headers
WriteOctalBytes(0, buffer, 116, 8); // group ID
//ArchiveEncoding.UTF8.GetBytes("magic").CopyTo(buffer, 257);
if (Name.Length > 100)
var nameByteCount = ArchiveEncoding.GetEncoding().GetByteCount(Name);
if (nameByteCount > 100)
{
// Set mock filename and filetype to indicate the next block is the actual name of the file
WriteStringBytes("././@LongLink", buffer, 0, 100);
buffer[156] = (byte)EntryType.LongName;
WriteOctalBytes(Name.Length + 1, buffer, 124, 12);
WriteOctalBytes(nameByteCount + 1, buffer, 124, 12);
}
else
{
WriteStringBytes(Name, buffer, 0, 100);
WriteStringBytes(ArchiveEncoding.Encode(Name), buffer, 100);
WriteOctalBytes(Size, buffer, 124, 12);
var time = (long)(LastModifiedTime.ToUniversalTime() - EPOCH).TotalSeconds;
WriteOctalBytes(time, buffer, 136, 12);
@@ -55,11 +57,10 @@ namespace SharpCompress.Common.Tar.Headers
if (Size >= 0x1FFFFFFFF)
{
byte[] bytes = DataConverter.BigEndian.GetBytes(Size);
var bytes12 = new byte[12];
bytes.CopyTo(bytes12, 12 - bytes.Length);
Span<byte> bytes12 = stackalloc byte[12];
BinaryPrimitives.WriteInt64BigEndian(bytes12.Slice(4), Size);
bytes12[0] |= 0x80;
bytes12.CopyTo(buffer, 124);
bytes12.CopyTo(buffer.AsSpan(124));
}
}
@@ -68,10 +69,17 @@ namespace SharpCompress.Common.Tar.Headers
output.Write(buffer, 0, buffer.Length);
if (Name.Length > 100)
if (nameByteCount > 100)
{
WriteLongFilenameHeader(output);
Name = Name.Substring(0, 100);
// update to short name lower than 100 - [max bytes of one character].
// subtracting bytes is needed because preventing infinite loop(example code is here).
//
// var bytes = Encoding.UTF8.GetBytes(new string(0x3042, 100));
// var truncated = Encoding.UTF8.GetBytes(Encoding.UTF8.GetString(bytes, 0, 100));
//
// and then infinite recursion is occured in WriteLongFilenameHeader because truncated.Length is 102.
Name = ArchiveEncoding.Decode(ArchiveEncoding.Encode(Name), 0, 100 - ArchiveEncoding.GetEncoding().GetMaxByteCount(1));
Write(output);
}
}
@@ -87,7 +95,7 @@ namespace SharpCompress.Common.Tar.Headers
{
numPaddingBytes = BLOCK_SIZE;
}
output.Write(new byte[numPaddingBytes], 0, numPaddingBytes);
output.Write(stackalloc byte[numPaddingBytes]);
}
internal bool Read(BinaryReader reader)
@@ -98,6 +106,12 @@ namespace SharpCompress.Common.Tar.Headers
return false;
}
// for symlinks, additionally read the linkname
if (ReadEntryType(buffer) == EntryType.SymLink)
{
LinkName = ArchiveEncoding.Decode(buffer, 157, 100).TrimNulls();
}
if (ReadEntryType(buffer) == EntryType.LongName)
{
Name = ReadLongName(reader, buffer);
@@ -111,9 +125,12 @@ namespace SharpCompress.Common.Tar.Headers
EntryType = ReadEntryType(buffer);
Size = ReadSize(buffer);
//Mode = ReadASCIIInt32Base8(buffer, 100, 7);
//UserId = ReadASCIIInt32Base8(buffer, 108, 7);
//GroupId = ReadASCIIInt32Base8(buffer, 116, 7);
Mode = ReadAsciiInt64Base8(buffer, 100, 7);
if(EntryType == EntryType.Directory)
Mode |= 0b1_000_000_000;
UserId = ReadAsciiInt64Base8(buffer, 108, 7);
GroupId = ReadAsciiInt64Base8(buffer, 116, 7);
long unixTimeStamp = ReadAsciiInt64Base8(buffer, 136, 11);
LastModifiedTime = EPOCH.AddSeconds(unixTimeStamp).ToLocalTime();
@@ -161,8 +178,9 @@ namespace SharpCompress.Common.Tar.Headers
{
if ((buffer[124] & 0x80) == 0x80) // if size in binary
{
return DataConverter.BigEndian.GetInt64(buffer, 0x80);
return BinaryPrimitives.ReadInt64BigEndian(buffer.AsSpan(0x80));
}
return ReadAsciiInt64Base8(buffer, 124, 11);
}
@@ -177,11 +195,18 @@ namespace SharpCompress.Common.Tar.Headers
return buffer;
}
private static void WriteStringBytes(ReadOnlySpan<byte> name, Span<byte> buffer, int length)
{
name.CopyTo(buffer);
int i = Math.Min(length, name.Length);
buffer.Slice(i, length - i).Clear();
}
private static void WriteStringBytes(string name, byte[] buffer, int offset, int length)
{
int i;
for (i = 0; i < length - 1 && i < name.Length; ++i)
for (i = 0; i < length && i < name.Length; ++i)
{
buffer[offset + i] = (byte)name[i];
}
@@ -236,10 +261,16 @@ namespace SharpCompress.Common.Tar.Headers
return Convert.ToInt64(s);
}
private static readonly byte[] eightSpaces = {
(byte)' ', (byte)' ', (byte)' ', (byte)' ',
(byte)' ', (byte)' ', (byte)' ', (byte)' '
};
internal static int RecalculateChecksum(byte[] buf)
{
// Set default value for checksum. That is 8 spaces.
Encoding.UTF8.GetBytes(" ").CopyTo(buf, 148);
eightSpaces.CopyTo(buf, 148);
// Calculate checksum
int headerChecksum = 0;
@@ -252,7 +283,7 @@ namespace SharpCompress.Common.Tar.Headers
internal static int RecalculateAltChecksum(byte[] buf)
{
Encoding.UTF8.GetBytes(" ").CopyTo(buf, 148);
eightSpaces.CopyTo(buf, 148);
int headerChecksum = 0;
foreach (byte b in buf)
{
@@ -272,4 +303,4 @@ namespace SharpCompress.Common.Tar.Headers
public string Magic { get; set; }
}
}
}

View File

@@ -1,9 +1,10 @@
using System;
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
using System.Text;
namespace SharpCompress.Common.Tar
{
@@ -23,6 +24,8 @@ namespace SharpCompress.Common.Tar
public override string Key => _filePart.Header.Name;
public override string LinkTarget => _filePart.Header.LinkName;
public override long CompressedSize => _filePart.Header.Size;
public override long Size => _filePart.Header.Size;
@@ -41,6 +44,12 @@ namespace SharpCompress.Common.Tar
public override bool IsSplitAfter => false;
public long Mode => _filePart.Header.Mode;
public long UserID => _filePart.Header.UserId;
public long GroupId => _filePart.Header.GroupId;
internal override IEnumerable<FilePart> Parts => _filePart.AsEnumerable<FilePart>();
internal static IEnumerable<TarEntry> GetEntries(StreamingMode mode, Stream stream,

View File

@@ -4,7 +4,7 @@ using SharpCompress.IO;
namespace SharpCompress.Common.Tar
{
internal class TarFilePart : FilePart
internal sealed class TarFilePart : FilePart
{
private readonly Stream _seekableStream;
@@ -23,13 +23,13 @@ namespace SharpCompress.Common.Tar
{
if (_seekableStream != null)
{
_seekableStream.Position = Header.DataStartPosition.Value;
return new ReadOnlySubStream(_seekableStream, Header.Size);
_seekableStream.Position = Header.DataStartPosition!.Value;
return new TarReadOnlySubStream(_seekableStream, Header.Size);
}
return Header.PackedStream;
}
internal override Stream GetRawStream()
internal override Stream? GetRawStream()
{
return null;
}

View File

@@ -2,17 +2,16 @@
using System.IO;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
using System.Text;
namespace SharpCompress.Common.Tar
{
internal static class TarHeaderFactory
{
internal static IEnumerable<TarHeader> ReadHeader(StreamingMode mode, Stream stream, ArchiveEncoding archiveEncoding)
internal static IEnumerable<TarHeader?> ReadHeader(StreamingMode mode, Stream stream, ArchiveEncoding archiveEncoding)
{
while (true)
{
TarHeader header = null;
TarHeader? header = null;
try
{
BinaryReader reader = new BinaryReader(stream);

View File

@@ -1,16 +1,16 @@
using System;
using SharpCompress.IO;
using System;
using System.IO;
namespace SharpCompress.Common.Tar
{
internal class TarReadOnlySubStream : Stream
internal class TarReadOnlySubStream : NonDisposingStream
{
private bool _isDisposed;
private long _amountRead;
public TarReadOnlySubStream(Stream stream, long bytesToRead)
public TarReadOnlySubStream(Stream stream, long bytesToRead) : base(stream, throwOnDispose: false)
{
Stream = stream;
BytesLeftToRead = bytesToRead;
}
@@ -20,28 +20,29 @@ namespace SharpCompress.Common.Tar
{
return;
}
_isDisposed = true;
if (disposing)
{
long skipBytes = _amountRead % 512;
if (skipBytes == 0)
// Ensure we read all remaining blocks for this entry.
Stream.Skip(BytesLeftToRead);
_amountRead += BytesLeftToRead;
// If the last block wasn't a full 512 bytes, skip the remaining padding bytes.
var bytesInLastBlock = _amountRead % 512;
if (bytesInLastBlock != 0)
{
return;
Stream.Skip(512 - bytesInLastBlock);
}
skipBytes = 512 - skipBytes;
if (skipBytes == 0)
{
return;
}
var buffer = new byte[skipBytes];
Stream.ReadFully(buffer);
}
base.Dispose(disposing);
}
private long BytesLeftToRead { get; set; }
public Stream Stream { get; }
public override bool CanRead => true;
public override bool CanSeek => false;
@@ -72,6 +73,22 @@ namespace SharpCompress.Common.Tar
return read;
}
public override int ReadByte()
{
if (BytesLeftToRead <= 0)
{
return -1;
}
int value = Stream.ReadByte();
if (value != -1)
{
--BytesLeftToRead;
++_amountRead;
}
return value;
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();

Some files were not shown because too many files have changed in this diff Show More