Compare commits

...

236 Commits

Author SHA1 Message Date
Adam Hathcock
3009e6dcfd Mark for 0.32.2 2022-07-29 10:45:56 +01:00
Adam Hathcock
70343b17bc add more tests for uncompressed streaming zips 2022-07-29 09:47:35 +01:00
Adam Hathcock
3f6027ec2c Merge pull request #686 from Erior/477
Mitigation of problems
2022-07-29 09:41:24 +01:00
Lars Vahlenberg
5706732c55 Naive implementation of searching of DataDescriptor, not compatible with big archives (>32bit), but handles test cases. 2022-07-28 23:03:06 +02:00
Lars Vahlenberg
ad633a9dd0 missing test file from error report 2022-07-28 21:20:42 +02:00
Lars Vahlenberg
7c56df1237 Mitigation of problems 2022-07-28 20:36:28 +02:00
Adam Hathcock
c1110f2897 Merge pull request #683 from OwnageIsMagic/patch-1
WriteAll: use delegate instead of Expression
2022-07-27 10:13:50 +01:00
Adam Hathcock
647642578b Merge branch 'master' into patch-1 2022-07-27 09:49:13 +01:00
OwnageIsMagic
5ca4efac31 WriteAll: revert 109a7c1 2022-07-26 21:36:00 +03:00
Adam Hathcock
deddf12b70 Merge pull request #684 from daverant/nuget-license
Include license in nuget package
2022-07-26 16:21:41 +01:00
OwnageIsMagic
109a7c12ea WriteAll: update delegate type 2022-07-19 04:03:26 +03:00
David Rant
f955031e27 Hide license in IDE 2022-07-18 17:16:22 +01:00
David Rant
6a69c6cd02 Reference bundled package license file 2022-07-18 17:11:06 +01:00
David Rant
c1d4ac45ab Include license when packing 2022-07-18 17:10:36 +01:00
OwnageIsMagic
2946a35b0e WriteAll: use delegate instead of Expression 2022-07-18 04:36:31 +03:00
Adam Hathcock
c73a8cb18f Merge pull request #682 from adamhathcock/RarFileVolIdx_RarArcVer_GzCrc 2022-07-16 11:29:48 +01:00
Nanook
574a093038 Minor tweak that got missed in the last tidy. 2022-07-15 21:25:39 +01:00
Nanook
4eb1fe0b80 RarArchive has Min/MaxVersion. RarEntry has Volumne Indexes. GZ CRC fix. 2022-07-15 21:15:10 +01:00
Adam Hathcock
4c46cd725b Merge pull request #679 from louis-michelbergeron/master
Fix LZMADecoder Code function
2022-06-28 08:27:13 +01:00
Adam Hathcock
fdbd0e1fba Merge branch 'master' into master 2022-06-28 08:21:49 +01:00
louis-michel
5801168ce0 Merge branch 'master' of https://github.com/louis-michelbergeron/sharpcompress 2022-06-27 19:13:20 -04:00
louis-michel
d4c7551087 Fix LZMA Code function 2022-06-27 19:13:10 -04:00
Adam Hathcock
c9daf0c9f5 Merge pull request #675 from Erior/feature/#636
ReadOnlySubStream overrides and adds logic #636
2022-06-22 11:17:18 +01:00
Adam Hathcock
8cb566b031 Merge branch 'master' into feature/#636 2022-06-22 09:05:57 +01:00
Lars Vahlenberg
089b16326e ReadOnlySubStream overrides and adds logic to Read byte[], needs to have same logic for Span<byte> for consistency. 2022-06-21 19:30:07 +02:00
Adam Hathcock
c0e43cc0e5 Mark for 0.32.1 2022-06-20 10:32:47 +01:00
Adam Hathcock
514c3539e6 Merge pull request #672 from MartinDemberger/Task_477
Corrected skip-marker on skip of uncompressed ZIP file with missing size informations.
2022-06-20 10:31:31 +01:00
Adam Hathcock
62c94a178c Merge branch 'master' into Task_477 2022-06-20 10:26:45 +01:00
Adam Hathcock
9fee38b18d Merge pull request #674 from MartinDemberger/DeduplicateNonDisposing
Suppress nested NonDisposingStream
2022-06-20 10:25:25 +01:00
Adam Hathcock
cd3114d39e Merge branch 'master' into DeduplicateNonDisposing 2022-06-20 10:20:02 +01:00
Adam Hathcock
12b4e15812 Merge pull request #673 from Erior/feature/Malformed-zip-file-generated
Feature/malformed zip file generated
2022-06-20 10:19:41 +01:00
Martin Demberger
35336a0827 Suppress nested NonDisposingStream 2022-06-19 22:05:52 +02:00
Martin Demberger
ece7cbfec3 Set skip-marker when stream is skipped 2022-06-18 14:35:14 +02:00
Lars Vahlenberg
a00075ee0d Wrong flags set, we do not expose this in the interface 2022-06-17 15:07:07 +02:00
Lars Vahlenberg
b6c4e28b4d Generated test case, however, don't see any problems 2022-06-16 23:32:46 +02:00
Martin Demberger
8b55cce39a Better handling of uncompressed zip files. 2022-06-15 16:28:14 +02:00
Adam Hathcock
6e99446ce5 Mark for 0.32 2022-06-13 15:28:54 +01:00
Adam Hathcock
20a09b4866 Drop net5 2022-06-13 15:24:53 +01:00
Adam Hathcock
7f7db5eabd Merge pull request #669 from louis-michelbergeron/master
XZ decoding BCJ filters support
2022-06-13 08:37:28 +01:00
louis-michelbergeron
0651d064fc Update README.md 2022-06-10 15:32:41 -04:00
louis-michelbergeron
73ca7759d3 Update README.md
Contribution line.
2022-06-10 15:32:08 -04:00
louis-michel
0f112d0685 BCJ executable filter (only for decoding), used by XZ. 2022-06-10 13:29:42 -04:00
Adam Hathcock
fa5c91ecf6 Merge pull request #663 from Nanook/master
Align behavour of 7Zip exception with encrypted filenames arc with rar when no password provided
2022-05-04 08:21:39 +01:00
Nanook
3b2fd1b9fa Merge branch 'adamhathcock:master' into master 2022-05-04 01:36:58 +01:00
Craig
e424094fdf 7z encrypted filename exception with no password matches rar behaviour. 2022-05-04 01:35:58 +01:00
Adam Hathcock
bad9ab2c9d Merge pull request #662 from Nanook/master
Properly integrated zip multivolume and general split support.
2022-05-03 08:23:33 +01:00
Craig
61c01ce9b0 Properly integrated zip multivolume and split support. 2022-04-30 19:35:40 +01:00
Adam Hathcock
3de5df9f38 Merge pull request #661 from Nanook/master
Added multipart Zip support (z01...). Added IEntry.IsSolid
2022-04-29 13:43:37 +01:00
Craig
910aa1c22e Corrected the Crc exception as it was within a #DEBUG define 2022-04-27 14:12:00 +01:00
Craig
71c8f3129f RarStream Position fix, it was returning the file size. 7Zip CrcCheckStream always failed. Added a Solid Rar entry CRC test. 2022-04-27 13:16:05 +01:00
Craig
224614312f Added multipart Zip support (z01...). Added IEntry.IsSolid and implemented Rar and 7Zi[ support. 2022-04-25 01:16:53 +01:00
Adam Hathcock
f717133947 Merge pull request #660 from adamhathcock/dependabot/github_actions/actions/upload-artifact-3
Bump actions/upload-artifact from 2 to 3
2022-04-21 11:16:52 +01:00
dependabot[bot]
fcbfcfed03 Bump actions/upload-artifact from 2 to 3
Bumps [actions/upload-artifact](https://github.com/actions/upload-artifact) from 2 to 3.
- [Release notes](https://github.com/actions/upload-artifact/releases)
- [Commits](https://github.com/actions/upload-artifact/compare/v2...v3)

---
updated-dependencies:
- dependency-name: actions/upload-artifact
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-04-21 08:52:47 +00:00
Adam Hathcock
e6601c91ca Merge pull request #659 from adamhathcock/updates
Dependency updates and start of enforcing some C# standards
2022-04-21 09:52:26 +01:00
Adam Hathcock
0379903689 Fix tests 2022-04-21 08:59:35 +01:00
Adam Hathcock
6950eebf9f Dependency updates and start of enforcing some C# standards 2022-04-21 08:54:59 +01:00
Adam Hathcock
c15f1327c9 Merge pull request #658 from Nanook/master
Added Split archive support with unit tests. …
2022-04-21 08:26:06 +01:00
Craig
ec1999f73d Added Split archive support with unit tests. Added ArchiveFactory.IsArchive() and minor refactor. 2022-04-21 00:12:26 +01:00
Adam Hathcock
3d0a18b05d Merge pull request #655 from Ryhon0/master
Tar file mode, user and group
2022-04-11 16:43:40 +01:00
Ryhon
156a02c5a8 Tar file mode, user and group 2022-04-11 14:33:55 +02:00
Adam Hathcock
eba1a443e3 Merge pull request #652 from ds5678/net6_with_trimming
Add a net 6 target and make trimmable
2022-04-11 10:14:18 +01:00
ds5678
eb44cdc903 Update dotnetcore.yml 2022-04-08 02:21:07 -04:00
Jeremy Pritts
196df63de2 fix build project 2022-03-31 14:52:36 -04:00
Jeremy Pritts
ed3c11f44c update workflow and csproj files 2022-03-31 04:04:56 -04:00
Jeremy Pritts
7f6c877fdc add a net 6 target and make trimmable 2022-03-30 21:24:51 -04:00
Adam Hathcock
eee8309db8 Mark for 0.31 2022-03-30 12:03:03 +01:00
Adam Hathcock
155cfab792 Merge pull request #651 from loop-evgeny/evgeny-upgrade-adler32
Update Adler32 from ImageSharp v2.1.0
2022-03-30 12:01:32 +01:00
Evgeny Morozov
e1c36afdec Update Adler32 from ImageSharp v2.1.0
Adler32.cs is taken from 09b2cdb83a with minimal change to make it build as part of SharpCompress. Fixes https://github.com/adamhathcock/sharpcompress/issues/650 and https://github.com/adamhathcock/sharpcompress/issues/645.
2022-03-30 12:17:14 +02:00
Adam Hathcock
6b0d6a41ca Merge pull request #638 from Nanook/rar2MultiWithTest
Rar2 fix with new unit tests that fail on previous build.
2022-02-16 08:33:12 +00:00
Craig
dab157bb71 Rar2 fix with new unit tests that fail on previous build. 2022-02-15 16:24:22 +00:00
Adam Hathcock
8d17d09455 Merge pull request #624 from adamhathcock/issue-617
Add test and probable fix for Issue 617
2021-11-22 09:20:15 +00:00
Adam Hathcock
05208ccd9b Add test and probable fix for Issue 617 2021-11-22 08:40:40 +00:00
Adam Hathcock
a1e7c0068d Merge pull request #622 from adamhathcock/net461-tests
Net461 tests
2021-10-02 15:32:20 +01:00
Adam Hathcock
e6bec19946 Mark for 0.30 2021-10-02 15:29:22 +01:00
Adam Hathcock
ec2be2869f Fix whitespace from dotnet format 2021-10-02 15:29:03 +01:00
Adam Hathcock
ce5432ed73 Fix tests for multi-targetting 2021-10-02 15:25:43 +01:00
Adam Hathcock
b6e0ad89ce Remove duplicated artifact step 2021-10-02 15:21:05 +01:00
Adam Hathcock
2745bfa19b Minor SDK update 2021-10-02 15:19:51 +01:00
Adam Hathcock
3cdc4b38a6 Test 461 on github actions 2021-10-02 15:19:13 +01:00
Adam Hathcock
fc1ca808d7 Merge pull request #621 from inthemedium/master
Add net461 target to clean up issues with system.* nuget dependencies
2021-10-02 15:08:18 +01:00
Jeff Tyson
6983e66037 Fix preprocessor condition 2021-10-01 16:34:00 +00:00
Jeff Tyson
01f7336d09 Based on docs, the target should bet net461 2021-09-29 22:04:47 +00:00
Jeff Tyson
1561bba538 Add net462 target to clean up issues with system.* nuget dependencies 2021-09-29 21:55:11 +00:00
Adam Hathcock
3ecf8a5e0c Merge pull request #616 from amosonn/patch-1
Fix for chunked read for ZLibBaseStream
2021-09-27 09:14:58 +01:00
Adam Hathcock
e2095fc416 Merge branch 'master' into patch-1 2021-09-27 09:08:58 +01:00
Amos Onn
8398d40106 Fix #615 2021-09-14 22:02:18 +02:00
Amos Onn
134fa8892f Test for bug #615 2021-09-14 21:55:05 +02:00
Adam Hathcock
ea5c8dc063 Merge pull request #614 from adamhathcock/ensure-dest-dir-exists
Ensure destination directory exists.
2021-09-12 08:56:42 +01:00
Adam Hathcock
0209d00164 Minor updates and prep for 0.29 2021-09-12 08:52:00 +01:00
Adam Hathcock
a8d065dc9e Ensure destination directory exists 2021-09-12 08:47:30 +01:00
Adam Hathcock
7bd9711ade Merge pull request #610 from cyr/master
Bugfix for TarWriter - too much padding in large files
2021-09-12 08:43:20 +01:00
cyr
61802eadb4 Merge branch 'adamhathcock:master' into master 2021-09-12 09:37:07 +02:00
Adam Hathcock
b425659058 Merge pull request #611 from Thunderstr1k3/fix-zipheader-seeking
Allowing to seek empty zip files
2021-09-12 08:28:05 +01:00
Christian
3e32e3d7b1 Allowing to seek empty zip files 2021-09-02 13:54:32 +02:00
cyr
1b661c9df1 Fixed bug where large (int32+ file size) adds an additional 512 bytes of padding in tar files. 2021-08-27 22:38:04 +02:00
Adam Hathcock
54fc26b93d Update build and mark for 0.28.3 2021-06-04 13:43:35 +01:00
Adam Hathcock
161f99bbad Merge pull request #601 from salvois/master
Write ZIP64 End of Central Directory only if needed.
2021-06-04 13:22:01 +01:00
Adam Hathcock
c012db0776 Merge branch 'master' into master 2021-06-04 13:17:13 +01:00
Adam Hathcock
8ee257d299 Merge pull request #592 from adamhathcock/memory-downgrade
Downgrade System.Memory to fix buffer version issue
2021-06-04 13:16:40 +01:00
Adam Hathcock
f9522107c3 Merge branch 'master' into memory-downgrade 2021-06-04 13:16:34 +01:00
Adam Hathcock
e07046a37a Merge pull request #596 from DannyBoyk/issue_595_conditionally_read_zip64_extra
Conditionally parse Zip64 extra field based on specification
2021-06-04 13:07:08 +01:00
Salvatore Isaja
ad6d0d9ae8 Write ZIP64 End of Central Directory only if needed. 2021-05-23 21:10:35 +02:00
Daniel Nash
fdc33e91bd Conditionally parse Zip64 extra field based on specification
The Zip64 extra field should look for values based on the corresponding
values in the local entry header.

Fixes adamhathcock/sharpcompress#595
2021-04-26 14:58:10 -04:00
Adam Hathcock
a34f5a855c Mark for 0.28.2 2021-04-25 09:29:56 +01:00
Adam Hathcock
6474741af1 Merge pull request #593 from adamhathcock/fix-pkware-encryption
ReadFully used by pkware encryption didn’t like spans
2021-04-25 09:29:02 +01:00
Adam Hathcock
c10bd840c5 ReadFully used by pkware encryption didn’t like spans 2021-04-25 09:25:51 +01:00
Adam Hathcock
e6dded826b Downgrade System.Memory to fix buffer version issue 2021-04-24 09:16:46 +01:00
Adam Hathcock
8a022c4b18 Update FORMATS.md
remove LZipArchive/Reader/Writer mention
2021-03-28 08:58:11 +01:00
Adam Hathcock
cfef228afc Merge pull request #579 from Looooong/fix/do-not-place-extention-classes-in-common-namespace
Do not place extension classes in common namespace
2021-03-18 13:52:40 +00:00
Nguyễn Đức Long
237ff9f055 Do not place extension classes in common namespace 2021-03-18 20:44:04 +07:00
Adam Hathcock
020f862814 Bug fix for recursive call introduced in 0.28 2021-02-18 08:31:50 +00:00
Adam Hathcock
fa6107200d Merge pull request #572 from Erior/feature/521
Not so elegant perhaps for checking 7z encryption
2021-02-16 08:05:08 +00:00
Adam Hathcock
eb81f972c4 Merge branch 'master' into feature/521 2021-02-16 08:01:32 +00:00
Lars Vahlenberg
93c1ff396e Not so elegant perhaps 2021-02-14 16:29:01 +01:00
Adam Hathcock
403baf05a6 Mark for 0.28 2021-02-14 13:07:35 +00:00
Adam Hathcock
a51b56339a Fix complete entry check for RAR files. 2021-02-14 13:00:43 +00:00
Adam Hathcock
f48a6d47dc Merge pull request #571 from Erior/feature/540
Proposal fixing Extra bytes written when setting zip64
2021-02-14 12:54:17 +00:00
Adam Hathcock
5b52463e4c Merge pull request #570 from Erior/feature/555
Propsal for handling Zip with long comment
2021-02-14 12:52:42 +00:00
Adam Hathcock
6f08bb72d8 Merge pull request #569 from BrendanGrant/improve_how_missing_parts_are_handled
Improve how missing parts are handled
2021-02-14 12:49:49 +00:00
Lars Vahlenberg
045093f453 Linux is case sensitive with files names 2021-02-14 10:26:26 +01:00
Lars Vahlenberg
566c49ce53 Proposal
Zip64 requires version 4.5
Number of disks is 4 bytes and not 8
2021-02-14 02:42:32 +01:00
Lars Vahlenberg
d1d2758ee0 Propsal for handling Zip with long comment 2021-02-13 23:57:03 +01:00
Brendan Grant
5b86c40d5b Properly detect if RAR is complete at the end or not 2021-02-13 13:34:57 -06:00
Brendan Grant
53393e744e Supporting reading contents of incomplete files 2021-02-13 13:33:43 -06:00
Adam Hathcock
2dd17e3882 Be explicit about zip64 extra field sizes. Formatting 2021-02-13 07:05:53 +00:00
Adam Hathcock
c4f7433584 Merge pull request #567 from Nanook/master
Zip64 Header and Size fix
2021-02-13 06:58:41 +00:00
Adam Hathcock
9405a7cf4b Merge pull request #568 from Bond-009/stackalloc
Use stackallocs where possible/sensible
2021-02-13 06:39:32 +00:00
Bond_009
cd677440ce Use stackallocs where possible/sensible 2021-02-12 20:20:15 +01:00
Craig Greenhill
c06f4bc5a8 Zip64 Header and Size fix 2021-02-11 09:37:59 +00:00
Adam Hathcock
4a7337b223 Merge pull request #563 from adamhathcock/add-reader-test-gzip
Fix Rewindable stream Length and add GZip Reader tests
2021-01-13 15:13:34 +00:00
Adam Hathcock
1d8afb817e Bump version 2021-01-13 14:41:25 +00:00
Adam Hathcock
0f06c3d934 Fix rewindable stream to expose length 2021-01-13 14:40:36 +00:00
Adam Hathcock
9d5cb8d119 Add GZip Reader tests 2021-01-13 10:42:59 +00:00
Adam Hathcock
a28d686eb9 Fix relavant package references 2021-01-11 12:01:17 +00:00
Adam Hathcock
ac525a8ec2 Merge branch 'master' of github.com:adamhathcock/sharpcompress 2021-01-11 10:01:49 +00:00
Adam Hathcock
52c44befa2 Merge pull request #560 from adamhathcock/gzip-fixes
Expose Last Modified time on GZipStream.  Add CRC and Size to GZipEntries on Archive
2021-01-11 08:57:19 +00:00
Adam Hathcock
c64251c341 Mark for 0.27 2021-01-09 14:04:44 +00:00
Adam Hathcock
bdc57d3c33 Merge pull request #559 from adamhathcock/net5
Use Net5, NetCoreApp3.1, NetStandard2.1, NetStandard2.0 only
2021-01-09 13:43:37 +00:00
Adam Hathcock
7edc437df2 formatting 2021-01-09 13:40:57 +00:00
Adam Hathcock
57e4395e7d Merge branch 'master' into net5
# Conflicts:
#	build/Program.cs
#	src/SharpCompress/Common/Zip/ZipFilePart.cs
2021-01-09 13:40:09 +00:00
Adam Hathcock
ee17dca9e5 Fix formatting 2021-01-09 13:36:30 +00:00
Adam Hathcock
e9f3add5b9 Merge branch 'master' into gzip-fixes 2021-01-09 13:35:52 +00:00
Adam Hathcock
faf1a9f7e4 Merge pull request #561 from adamhathcock/format
Use dotnet format to ensure some kind of code style
2021-01-09 13:35:26 +00:00
Adam Hathcock
5357bd07c7 Let dotnet format do it’s thing 2021-01-09 13:33:34 +00:00
Adam Hathcock
8c0e2cbd25 Use dotnet format 2021-01-09 13:32:14 +00:00
Adam Hathcock
674f3b4f28 Merge branch 'master' into gzip-fixes 2021-01-09 13:25:55 +00:00
Adam Hathcock
6e42e00974 Merge pull request #485 from adamhathcock/issue-256
Create and using PauseEntryRebuilding for adding large numbers of ent…
2021-01-09 13:23:52 +00:00
Adam Hathcock
8598885258 Read trailer for GZip for CRC and uncompressed size 2021-01-09 13:22:06 +00:00
Adam Hathcock
669e40d53c Merge branch 'master' into issue-256 2021-01-09 13:01:16 +00:00
Adam Hathcock
1adcce6c62 Expose Last Modified time on GZipStream 2021-01-09 12:53:13 +00:00
Adam Hathcock
147be6e6e1 Use Net5, NetCoreApp3.1, NetStandard2.1, NetStandard2.0 only 2021-01-09 10:34:49 +00:00
Adam Hathcock
5879999094 Merge pull request #551 from carbon/alder32
Use hardware accelerated Alder32 impl
2020-11-19 08:21:31 +00:00
Jason Nelson
477a30cf5b Use hardware accelerated Alder32 impl 2020-11-18 11:21:29 -08:00
Adam Hathcock
2fec03e1ac Merge pull request #550 from carbon/cq
Improve Code Quality 3
2020-11-18 18:32:53 +00:00
Jason Nelson
9a17449a02 Format NewSubHeaderType 2020-11-18 09:44:13 -08:00
Jason Nelson
087a6aad8c Cross target .NETCOREAPP3.1 and react to new nullablity annotations 2020-11-18 09:43:08 -08:00
Jason Nelson
e243a8e88f Format AbstractArchive 2020-11-18 09:31:39 -08:00
Jason Nelson
b57df8026a Use pattern matching 2020-11-18 09:29:38 -08:00
Jason Nelson
a1d45b44cd Format ArchiveFactory 2020-11-18 09:28:24 -08:00
Jason Nelson
e47e1d220a Format AesDecoderStream 2020-11-18 09:25:38 -08:00
Jason Nelson
0129a933df Remove NETSTANDARD1_3 symbol 2020-11-18 09:23:50 -08:00
Jason Nelson
fa241bb0d7 Inline variable declarations 2020-11-18 09:21:45 -08:00
Jason Nelson
d8804ae108 Improve conditional logic to prepare to add .NETCOREAPP target 2020-11-18 09:19:21 -08:00
Jason Nelson
8090d269e7 Add polyfills for string.EndWith(char) && string.Contains(char) 2020-11-18 09:16:53 -08:00
Jason Nelson
b0101f20c5 Eliminate culture specific StartsWith comparisions 2020-11-18 09:12:01 -08:00
Jason Nelson
dd48e4299a Simplify .NET framework code exclusions, bump min .NET framework version to 4.6.1 2020-11-18 09:07:30 -08:00
Jason Nelson
c61ee0c24f Update deps 2020-11-18 09:02:11 -08:00
Jason Nelson
9576867c34 Enable C# 9 2020-11-18 09:01:35 -08:00
Adam Hathcock
4426a24298 Merge pull request #549 from adamhathcock/update-deps
Update dependencies
2020-11-03 08:47:58 +00:00
Adam Hathcock
3b43c1e413 Update dependencies 2020-11-03 08:45:10 +00:00
Adam Hathcock
aa6575c8f9 Merge pull request #541 from avao/master
UT and Fix for: Index out of range exception from gzip #532
2020-10-19 12:33:31 +01:00
avao
0268713960 UT and Fix for: Index out of range exception from gzip #532 2020-10-13 19:58:11 +01:00
Adam Hathcock
f36167d425 Merge pull request #531 from carbon/master
Improve CQ3
2020-08-01 06:25:09 +01:00
Jason Nelson
33ffcb9308 Use Array.Empty<byte> 2020-07-31 17:00:46 -07:00
Jason Nelson
a649c25a91 Eliminate two allocations in HuffmanTree 2020-07-31 16:58:21 -07:00
Jason Nelson
fa1e773960 Eliminate two allocations in Crc32 2020-07-31 16:55:07 -07:00
Jason Nelson
62f7238796 Make CMethodId readonly 2020-07-31 16:49:34 -07:00
Jason Nelson
d4ccf73340 Embed FAST_ENCODER_TREE_STRUCTURE_DATA 2020-07-31 16:47:05 -07:00
Jason Nelson
5ddb0f96bc Use switch expressions 2020-07-31 16:37:56 -07:00
Jason Nelson
75a6db8f4c Eliminate three allocations in HbMakeCodeLengths 2020-07-31 16:33:00 -07:00
Jason Nelson
ae5635319b Eliminate header bytes allocation 2020-07-31 16:30:26 -07:00
Jason Nelson
98ed3080d0 Eliminate three allocations 2020-07-31 16:30:09 -07:00
Jason Nelson
c618eacad4 Optimize RijndaelEngine 2020-07-31 16:22:44 -07:00
Jason Nelson
3b11e6ef97 Eliminate two allocations 2020-07-31 16:10:59 -07:00
Jason Nelson
40af9359db Pollyfill and use Stream.Read(Span<byte> buffer) 2020-07-31 16:08:38 -07:00
Jason Nelson
d6bf9dae42 Eliminate allocation 2020-07-31 16:01:09 -07:00
Adam Hathcock
13917941ff Merge pull request #530 from carbon/master
Enable test coverage for net461 and fix regression
2020-07-31 18:39:40 +01:00
Jason Nelson
28f04329ae Merge branch 'master' of https://github.com/carbon/sharpcompress 2020-07-31 10:12:37 -07:00
Jason Nelson
404a6b231d Fix .NET 461 failures 2020-07-31 10:12:34 -07:00
Jason Nelson
184596da3c Merge branch 'master' into master 2020-07-31 11:37:45 -05:00
Jason Nelson
f00f393687 Disable failing net461 tests 2020-07-31 09:30:20 -07:00
Jason Nelson
cbbfb89619 Add failure notes 2020-07-31 09:29:06 -07:00
Jason Nelson
6a5cf11dd0 Fix net461 bug 2020-07-31 09:27:41 -07:00
Jason Nelson
fc1d0a0464 Run tests against net461 2020-07-31 09:27:32 -07:00
Adam Hathcock
74af1759eb Merge pull request #529 from carbon/master
Improve code quality v2
2020-07-31 06:55:35 +01:00
Jason Nelson
ee3162ad71 Fix return 2020-07-30 17:49:29 -07:00
Jason Nelson
4357165163 Add Read/Write overrides to NonDisposingStream 2020-07-30 17:36:03 -07:00
Jason Nelson
6973436b94 Add and use Stream.Write(ReadOnlySpan<byte> buffer) polyfill 2020-07-30 17:29:33 -07:00
Jason Nelson
7750ed7106 Finish spanification of RijndaelEngine 2020-07-30 17:01:13 -07:00
Jason Nelson
773158e9d8 Seal LZipStream 2020-07-30 16:57:30 -07:00
Jason Nelson
4db615597d Refactor ExtraData and enable nullable 2020-07-30 16:48:22 -07:00
Jason Nelson
6bdf2365fc Inline variable declarations 2020-07-30 16:45:38 -07:00
Adam Hathcock
a7944f28c5 Fix CI again 2020-07-26 14:45:32 +01:00
Adam Hathcock
426d459284 Fix CI build 2020-07-26 14:39:13 +01:00
Adam Hathcock
b00b461ada Update documented targets 2020-07-26 14:38:19 +01:00
Adam Hathcock
84834b6348 ignore snukpkg 2020-07-26 14:36:42 +01:00
Adam Hathcock
f521fd35ff Fix tests, update to 0.26 2020-07-26 14:36:07 +01:00
Adam Hathcock
2979fceecf Merge pull request #522 from JTOne123/master
[PR] The proj files have been updated to enable SourceLink
2020-07-26 12:07:13 +01:00
Adam Hathcock
b12e8e793f Merge branch 'master' into master 2020-07-26 12:07:05 +01:00
Adam Hathcock
c77ec59a28 Merge pull request #527 from adamhathcock/default-encoding
Don’t use 437 Encoding by default anymore.
2020-07-26 12:06:30 +01:00
Adam Hathcock
42ba8cf828 Merge branch 'master' into default-encoding 2020-07-26 12:06:22 +01:00
Adam Hathcock
c7618fc895 Merge pull request #528 from DannyBoyk/issue_524_tararchive_fails_read_all_entries
Ensure TarArchive enumerates all entries
2020-07-26 12:05:58 +01:00
Daniel Nash
d055b34efe Ensure TarArchive enumerates all entries
While enumerating the entries of a tar file and writing their contents
to disk using TarArchive, it was discovered TarArchive was not properly
discarding padding bytes in the last block of each entry. TarArchive was
sometimes able to recover depending on the number of padding bytes due
to the logic it uses to find the next entry header, but not always.

TarArchive was changed to use TarReadOnlySubStream when opening entries
and TarReadOnlySubstream was changed to ensure all an entry's blocks are
read when it is being disposed.

Fixes adamhathcock/sharpcompress#524
2020-07-20 12:57:39 -04:00
Adam Hathcock
b7f635f540 Update readme 2020-07-16 15:35:27 +01:00
Adam Hathcock
5e95a54260 Merge branch 'master' into default-encoding 2020-07-16 14:32:51 +01:00
Adam Hathcock
4354e82bb5 Don’t use 437 Encoding by default anymore. 2020-07-16 14:28:37 +01:00
Adam Hathcock
ab7bdc24dc Merge pull request #523 from kdaadk/master
Decompress multipart solid RAR4.x archive
2020-07-01 12:56:58 +01:00
Dmitriy
81997fe1ba rename test 2020-07-01 15:10:21 +05:00
Dmitriy
de6759a83f - remove check of solid archive
- change tests
- add test on multi solid archive
2020-07-01 15:06:54 +05:00
Adam Hathcock
233dc33130 Fix running tests on build 2020-06-25 09:22:15 +01:00
Adam Hathcock
39b07f45f1 Update github action and minor SDK bump 2020-06-25 09:16:15 +01:00
Pavlo Datsiuk
802662a165 [COMMIT] The proj files have been updated to enable SourceLink [SharpCompress.csproj] 2020-06-25 10:58:21 +03:00
Adam Hathcock
2859848fc4 Give the artifacts names 2020-05-24 10:41:06 +01:00
Adam Hathcock
b734d00062 Remove README for appveyor and fix artifacts again 2020-05-24 10:35:39 +01:00
Adam Hathcock
02a17d22f6 Adjust artifacts and remove appveyor 2020-05-24 10:32:50 +01:00
Adam Hathcock
7bfff472c6 Fix yaml 2020-05-24 10:26:51 +01:00
Adam Hathcock
5aa146be17 Remove matrix var 2020-05-24 10:25:40 +01:00
Adam Hathcock
a0bfc22a29 Try upload artifact 2020-05-24 10:23:01 +01:00
Adam Hathcock
6ed46b5fcc Fix CI paths 2020-05-24 09:04:10 +01:00
Adam Hathcock
904e40ef57 Switch to bullseye for building 2020-05-24 09:00:27 +01:00
Adam Hathcock
00ff119ec4 Minor Rider issues resolved. Still two outstanding. 2020-05-24 08:42:36 +01:00
Adam Hathcock
60d2511e80 Remove .NET Standard 1.3 which is no longer in support 2020-05-24 08:42:06 +01:00
Adam Hathcock
ed56a4aa4a Merge pull request #515 from carbon/master
Enable nullable
2020-05-24 08:20:37 +01:00
Jason Nelson
5b6a1c97e3 Enable nullable 2020-05-23 16:27:55 -07:00
Adam Hathcock
3b2e273832 Merge branch 'master' into issue-256 2019-10-10 09:27:46 +01:00
Adam Hathcock
43c839eb89 Create and using PauseEntryRebuilding for adding large numbers of entries 2019-10-09 09:55:16 +01:00
330 changed files with 10685 additions and 7057 deletions

12
.config/dotnet-tools.json Normal file
View File

@@ -0,0 +1,12 @@
{
"version": 1,
"isRoot": true,
"tools": {
"dotnet-format": {
"version": "4.1.131201",
"commands": [
"dotnet-format"
]
}
}
}

543
.editorconfig Normal file
View File

@@ -0,0 +1,543 @@
# Version: 2.0.1 (Using https://semver.org/)
# Updated: 2020-12-11
# See https://github.com/RehanSaeed/EditorConfig/releases for release notes.
# See https://github.com/RehanSaeed/EditorConfig for updates to this file.
# See http://EditorConfig.org for more information about .editorconfig files.
##########################################
# Common Settings
##########################################
# This file is the top-most EditorConfig file
root = true
# All Files
[*]
charset = utf-8
indent_style = space
indent_size = 4
insert_final_newline = true
trim_trailing_whitespace = true
##########################################
# File Extension Settings
##########################################
# Visual Studio Solution Files
[*.sln]
indent_style = tab
# Visual Studio XML Project Files
[*.{csproj,vbproj,vcxproj.filters,proj,projitems,shproj}]
indent_size = 2
# XML Configuration Files
[*.{xml,config,props,targets,nuspec,resx,ruleset,vsixmanifest,vsct}]
indent_size = 2
# JSON Files
[*.{json,json5,webmanifest}]
indent_size = 2
# YAML Files
[*.{yml,yaml}]
indent_size = 2
# Markdown Files
[*.md]
trim_trailing_whitespace = false
# Web Files
[*.{htm,html,js,jsm,ts,tsx,css,sass,scss,less,svg,vue}]
indent_size = 2
# Batch Files
[*.{cmd,bat}]
end_of_line = crlf
# Bash Files
[*.sh]
end_of_line = lf
# Makefiles
[Makefile]
indent_style = tab
##########################################
# Default .NET Code Style Severities
# https://docs.microsoft.com/dotnet/fundamentals/code-analysis/configuration-options#scope
##########################################
[*.{cs,csx,cake,vb,vbx}]
# Default Severity for all .NET Code Style rules below
dotnet_analyzer_diagnostic.severity = warning
##########################################
# File Header (Uncomment to support file headers)
# https://docs.microsoft.com/visualstudio/ide/reference/add-file-header
##########################################
# [*.{cs,csx,cake,vb,vbx}]
# file_header_template = <copyright file="{fileName}" company="PROJECT-AUTHOR">\n© PROJECT-AUTHOR\n</copyright>
# SA1636: File header copyright text should match
# Justification: .editorconfig supports file headers. If this is changed to a value other than "none", a stylecop.json file will need to added to the project.
# dotnet_diagnostic.SA1636.severity = none
##########################################
# .NET Language Conventions
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions
##########################################
# .NET Code Style Settings
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#net-code-style-settings
[*.{cs,csx,cake,vb,vbx}]
# "this." and "Me." qualifiers
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#this-and-me
#dotnet_style_qualification_for_field = true:warning
#dotnet_style_qualification_for_property = true:warning
#dotnet_style_qualification_for_method = true:warning
#dotnet_style_qualification_for_event = true:warning
# Language keywords instead of framework type names for type references
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#language-keywords
dotnet_style_predefined_type_for_locals_parameters_members = true:warning
dotnet_style_predefined_type_for_member_access = true:warning
# Modifier preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#normalize-modifiers
dotnet_style_require_accessibility_modifiers = always:warning
csharp_preferred_modifier_order = public,private,protected,internal,static,extern,new,virtual,abstract,sealed,override,readonly,unsafe,volatile,async:warning
visual_basic_preferred_modifier_order = Partial,Default,Private,Protected,Public,Friend,NotOverridable,Overridable,MustOverride,Overloads,Overrides,MustInherit,NotInheritable,Static,Shared,Shadows,ReadOnly,WriteOnly,Dim,Const,WithEvents,Widening,Narrowing,Custom,Async:warning
dotnet_style_readonly_field = true:warning
# Parentheses preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#parentheses-preferences
dotnet_style_parentheses_in_arithmetic_binary_operators = always_for_clarity:warning
dotnet_style_parentheses_in_relational_binary_operators = always_for_clarity:warning
dotnet_style_parentheses_in_other_binary_operators = always_for_clarity:warning
dotnet_style_parentheses_in_other_operators = always_for_clarity:suggestion
# Expression-level preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#expression-level-preferences
dotnet_style_object_initializer = true:warning
dotnet_style_collection_initializer = true:warning
dotnet_style_explicit_tuple_names = true:warning
dotnet_style_prefer_inferred_tuple_names = true:warning
dotnet_style_prefer_inferred_anonymous_type_member_names = true:warning
dotnet_style_prefer_auto_properties = true:warning
dotnet_style_prefer_is_null_check_over_reference_equality_method = true:warning
dotnet_style_prefer_conditional_expression_over_assignment = false:suggestion
dotnet_diagnostic.IDE0045.severity = suggestion
dotnet_style_prefer_conditional_expression_over_return = false:suggestion
dotnet_diagnostic.IDE0046.severity = suggestion
dotnet_style_prefer_compound_assignment = true:warning
# Null-checking preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#null-checking-preferences
dotnet_style_coalesce_expression = true:warning
dotnet_style_null_propagation = true:warning
# Parameter preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#parameter-preferences
dotnet_code_quality_unused_parameters = all:warning
# More style options (Undocumented)
# https://github.com/MicrosoftDocs/visualstudio-docs/issues/3641
dotnet_style_operator_placement_when_wrapping = end_of_line
# https://github.com/dotnet/roslyn/pull/40070
dotnet_style_prefer_simplified_interpolation = true:warning
# C# Code Style Settings
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#c-code-style-settings
[*.{cs,csx,cake}]
# Implicit and explicit types
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#implicit-and-explicit-types
csharp_style_var_for_built_in_types = true:warning
csharp_style_var_when_type_is_apparent = true:warning
csharp_style_var_elsewhere = true:warning
# Expression-bodied members
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#expression-bodied-members
csharp_style_expression_bodied_methods = true:warning
csharp_style_expression_bodied_constructors = true:warning
csharp_style_expression_bodied_operators = true:warning
csharp_style_expression_bodied_properties = true:warning
csharp_style_expression_bodied_indexers = true:warning
csharp_style_expression_bodied_accessors = true:warning
csharp_style_expression_bodied_lambdas = true:warning
csharp_style_expression_bodied_local_functions = true:warning
# Pattern matching
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#pattern-matching
csharp_style_pattern_matching_over_is_with_cast_check = true:warning
csharp_style_pattern_matching_over_as_with_null_check = true:warning
# Inlined variable declarations
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#inlined-variable-declarations
csharp_style_inlined_variable_declaration = true:warning
# Expression-level preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#expression-level-preferences
csharp_prefer_simple_default_expression = true:warning
# "Null" checking preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#c-null-checking-preferences
csharp_style_throw_expression = true:warning
csharp_style_conditional_delegate_call = true:warning
# Code block preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#code-block-preferences
csharp_prefer_braces = true:warning
# Unused value preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#unused-value-preferences
csharp_style_unused_value_expression_statement_preference = discard_variable:suggestion
dotnet_diagnostic.IDE0058.severity = suggestion
csharp_style_unused_value_assignment_preference = discard_variable:suggestion
dotnet_diagnostic.IDE0059.severity = suggestion
# Index and range preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#index-and-range-preferences
csharp_style_prefer_index_operator = true:warning
csharp_style_prefer_range_operator = true:warning
# Miscellaneous preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#miscellaneous-preferences
csharp_style_deconstructed_variable_declaration = true:warning
csharp_style_pattern_local_over_anonymous_function = true:warning
csharp_using_directive_placement = outside_namespace:warning
csharp_prefer_static_local_function = true:warning
csharp_prefer_simple_using_statement = true:suggestion
dotnet_diagnostic.IDE0063.severity = suggestion
csharp_style_namespace_declarations = file_scoped
##########################################
# .NET Formatting Conventions
# https://docs.microsoft.com/visualstudio/ide/editorconfig-code-style-settings-reference#formatting-conventions
##########################################
# Organize usings
# https://docs.microsoft.com/visualstudio/ide/editorconfig-formatting-conventions#organize-using-directives
dotnet_sort_system_directives_first = true
# Newline options
# https://docs.microsoft.com/visualstudio/ide/editorconfig-formatting-conventions#new-line-options
csharp_new_line_before_open_brace = all
csharp_new_line_before_else = true
csharp_new_line_before_catch = true
csharp_new_line_before_finally = true
csharp_new_line_before_members_in_object_initializers = true
csharp_new_line_before_members_in_anonymous_types = true
csharp_new_line_between_query_expression_clauses = true
# Indentation options
# https://docs.microsoft.com/visualstudio/ide/editorconfig-formatting-conventions#indentation-options
csharp_indent_case_contents = true
csharp_indent_switch_labels = true
csharp_indent_labels = no_change
csharp_indent_block_contents = true
csharp_indent_braces = false
csharp_indent_case_contents_when_block = false
# Spacing options
# https://docs.microsoft.com/visualstudio/ide/editorconfig-formatting-conventions#spacing-options
csharp_space_after_cast = false
csharp_space_after_keywords_in_control_flow_statements = true
csharp_space_between_parentheses = false
csharp_space_before_colon_in_inheritance_clause = true
csharp_space_after_colon_in_inheritance_clause = true
csharp_space_around_binary_operators = before_and_after
csharp_space_between_method_declaration_parameter_list_parentheses = false
csharp_space_between_method_declaration_empty_parameter_list_parentheses = false
csharp_space_between_method_declaration_name_and_open_parenthesis = false
csharp_space_between_method_call_parameter_list_parentheses = false
csharp_space_between_method_call_empty_parameter_list_parentheses = false
csharp_space_between_method_call_name_and_opening_parenthesis = false
csharp_space_after_comma = true
csharp_space_before_comma = false
csharp_space_after_dot = false
csharp_space_before_dot = false
csharp_space_after_semicolon_in_for_statement = true
csharp_space_before_semicolon_in_for_statement = false
csharp_space_around_declaration_statements = false
csharp_space_before_open_square_brackets = false
csharp_space_between_empty_square_brackets = false
csharp_space_between_square_brackets = false
# Wrapping options
# https://docs.microsoft.com/visualstudio/ide/editorconfig-formatting-conventions#wrap-options
csharp_preserve_single_line_statements = false
csharp_preserve_single_line_blocks = true
##########################################
# .NET Naming Conventions
# https://docs.microsoft.com/visualstudio/ide/editorconfig-naming-conventions
##########################################
[*.{cs,csx,cake,vb,vbx}]
dotnet_diagnostic.CA1000.severity = suggestion
dotnet_diagnostic.CA1001.severity = error
dotnet_diagnostic.CA1018.severity = error
dotnet_diagnostic.CA1051.severity = suggestion
dotnet_diagnostic.CA1068.severity = error
dotnet_diagnostic.CA1069.severity = error
dotnet_diagnostic.CA1304.severity = error
dotnet_diagnostic.CA1305.severity = suggestion
dotnet_diagnostic.CA1309.severity = suggestion
dotnet_diagnostic.CA1310.severity = error
dotnet_diagnostic.CA1707.severity = suggestion
dotnet_diagnostic.CA1708.severity = suggestion
dotnet_diagnostic.CA1711.severity = suggestion
dotnet_diagnostic.CA1716.severity = suggestion
dotnet_diagnostic.CA1720.severity = suggestion
dotnet_diagnostic.CA1725.severity = suggestion
dotnet_diagnostic.CA1805.severity = suggestion
dotnet_diagnostic.CA1816.severity = suggestion
dotnet_diagnostic.CA1822.severity = suggestion
dotnet_diagnostic.CA1825.severity = error
dotnet_diagnostic.CA1826.severity = silent
dotnet_diagnostic.CA1827.severity = error
dotnet_diagnostic.CA1829.severity = suggestion
dotnet_diagnostic.CA1834.severity = error
dotnet_diagnostic.CA1845.severity = suggestion
dotnet_diagnostic.CA1848.severity = suggestion
dotnet_diagnostic.CA2016.severity = suggestion
dotnet_diagnostic.CA2201.severity = error
dotnet_diagnostic.CA2206.severity = error
dotnet_diagnostic.CA2208.severity = error
dotnet_diagnostic.CA2211.severity = error
dotnet_diagnostic.CA2249.severity = error
dotnet_diagnostic.CA2251.severity = error
dotnet_diagnostic.CA2252.severity = none
dotnet_diagnostic.CA2254.severity = suggestion
dotnet_diagnostic.CS0169.severity = error
dotnet_diagnostic.CS0219.severity = error
dotnet_diagnostic.CS1998.severity = error
dotnet_diagnostic.CS8602.severity = error
dotnet_diagnostic.CS8604.severity = error
dotnet_diagnostic.CS8618.severity = error
dotnet_diagnostic.CS0618.severity = error
dotnet_diagnostic.CS1998.severity = error
dotnet_diagnostic.CS4014.severity = error
dotnet_diagnostic.CS8600.severity = error
dotnet_diagnostic.CS8603.severity = error
dotnet_diagnostic.CS8625.severity = error
dotnet_diagnostic.BL0005.severity = suggestion
dotnet_diagnostic.MVC1000.severity = suggestion
dotnet_diagnostic.IDE0055.severity = suggestion # Fix formatting
dotnet_diagnostic.IDE0023.severity = suggestion # use expression body for operators
dotnet_diagnostic.IDE0025.severity = suggestion # use expression body for properties
dotnet_diagnostic.IDE1006.severity = suggestion # Naming rule violation: These words cannot contain lower case characters
dotnet_diagnostic.IDE0072.severity = suggestion # Populate switch - forces population of all cases even when default specified
dotnet_diagnostic.IDE0027.severity = suggestion # Use expression body for accessors
dotnet_diagnostic.IDE0032.severity = suggestion # Use auto property
dotnet_diagnostic.IDE0007.severity = error # Use var
dotnet_diagnostic.IDE0160.severity = suggestion # Use block scoped
dotnet_diagnostic.IDE0011.severity = error # Use braces on if statements
dotnet_diagnostic.IDE0057.severity = suggestion # substring can be simplified
dotnet_diagnostic.IDE0004.severity = error # redundant cast
dotnet_diagnostic.IDE0010.severity = silent # populate switch
dotnet_diagnostic.IDE0021.severity = silent # expression body for constructors
dotnet_diagnostic.IDE0022.severity = silent # expression body for methods
dotnet_diagnostic.IDE0024.severity = silent # expression body for operators
dotnet_diagnostic.IDE0028.severity = silent
dotnet_diagnostic.IDE0033.severity = error # prefer tuple name
dotnet_diagnostic.IDE0040.severity = error # modifiers required
dotnet_diagnostic.IDE0041.severity = error # simplify null
dotnet_diagnostic.IDE0042.severity = error # deconstruct variable
dotnet_diagnostic.IDE0044.severity = error # make field only when possible
dotnet_diagnostic.IDE0047.severity = suggestion # paratemeter name
dotnet_diagnostic.IDE0051.severity = error # unused field
dotnet_diagnostic.IDE0052.severity = error # unused member
dotnet_diagnostic.IDE0060.severity = suggestion # unused parameters
dotnet_diagnostic.IDE0066.severity = suggestion # switch expression
dotnet_diagnostic.IDE0078.severity = suggestion # use pattern matching
dotnet_diagnostic.IDE0090.severity = suggestion # new can be simplified
dotnet_diagnostic.IDE0130.severity = suggestion # namespace folder structure
dotnet_diagnostic.IDE0160.severity = silent # Use block namespaces ARE NOT required
dotnet_diagnostic.IDE0161.severity = suggestion # Please use file namespaces
##########################################
# Styles
##########################################
# camel_case_style - Define the camelCase style
dotnet_naming_style.camel_case_style.capitalization = camel_case
# pascal_case_style - Define the PascalCase style
dotnet_naming_style.pascal_case_style.capitalization = pascal_case
# constant_case - Define the CONSTANT_CASE style
dotnet_naming_style.constant_case.capitalization = all_upper
dotnet_naming_style.constant_case.word_separator = _
# first_upper_style - The first character must start with an upper-case character
dotnet_naming_style.first_upper_style.capitalization = first_word_upper
# prefix_interface_with_i_style - Interfaces must be PascalCase and the first character of an interface must be an 'I'
dotnet_naming_style.prefix_interface_with_i_style.capitalization = pascal_case
dotnet_naming_style.prefix_interface_with_i_style.required_prefix = I
# prefix_type_parameters_with_t_style - Generic Type Parameters must be PascalCase and the first character must be a 'T'
dotnet_naming_style.prefix_type_parameters_with_t_style.capitalization = pascal_case
dotnet_naming_style.prefix_type_parameters_with_t_style.required_prefix = T
# disallowed_style - Anything that has this style applied is marked as disallowed
dotnet_naming_style.disallowed_style.capitalization = pascal_case
dotnet_naming_style.disallowed_style.required_prefix = ____RULE_VIOLATION____
dotnet_naming_style.disallowed_style.required_suffix = ____RULE_VIOLATION____
# internal_error_style - This style should never occur... if it does, it indicates a bug in file or in the parser using the file
dotnet_naming_style.internal_error_style.capitalization = pascal_case
dotnet_naming_style.internal_error_style.required_prefix = ____INTERNAL_ERROR____
dotnet_naming_style.internal_error_style.required_suffix = ____INTERNAL_ERROR____
# prefix_interface_with_i_style - Interfaces must be PascalCase and the first character of an interface must be an 'I'
dotnet_naming_style.underscore_camel_case_style.capitalization = camel_case
dotnet_naming_style.underscore_camel_case_style.required_prefix = _
##########################################
# .NET Design Guideline Field Naming Rules
# Naming rules for fields follow the .NET Framework design guidelines
# https://docs.microsoft.com/dotnet/standard/design-guidelines/index
##########################################
# All public/protected/protected_internal constant fields must be constant_case
# https://docs.microsoft.com/dotnet/standard/design-guidelines/field
dotnet_naming_symbols.public_protected_constant_fields_group.applicable_accessibilities = public, protected, protected_internal
dotnet_naming_symbols.public_protected_constant_fields_group.required_modifiers = const
dotnet_naming_symbols.public_protected_constant_fields_group.applicable_kinds = field
dotnet_naming_rule.public_protected_constant_fields_must_be_pascal_case_rule.symbols = public_protected_constant_fields_group
dotnet_naming_rule.public_protected_constant_fields_must_be_pascal_case_rule.style = constant_case
dotnet_naming_rule.public_protected_constant_fields_must_be_pascal_case_rule.severity = warning
# All public/protected/protected_internal static readonly fields must be constant_case
# https://docs.microsoft.com/dotnet/standard/design-guidelines/field
dotnet_naming_symbols.public_protected_static_readonly_fields_group.applicable_accessibilities = public, protected, protected_internal
dotnet_naming_symbols.public_protected_static_readonly_fields_group.required_modifiers = static, readonly
dotnet_naming_symbols.public_protected_static_readonly_fields_group.applicable_kinds = field
dotnet_naming_rule.public_protected_static_readonly_fields_must_be_pascal_case_rule.symbols = public_protected_static_readonly_fields_group
dotnet_naming_rule.public_protected_static_readonly_fields_must_be_pascal_case_rule.style = constant_case
dotnet_naming_rule.public_protected_static_readonly_fields_must_be_pascal_case_rule.severity = warning
# No other public/protected/protected_internal fields are allowed
# https://docs.microsoft.com/dotnet/standard/design-guidelines/field
dotnet_naming_symbols.other_public_protected_fields_group.applicable_accessibilities = public, protected, protected_internal
dotnet_naming_symbols.other_public_protected_fields_group.applicable_kinds = field
dotnet_naming_rule.other_public_protected_fields_disallowed_rule.symbols = other_public_protected_fields_group
dotnet_naming_rule.other_public_protected_fields_disallowed_rule.style = disallowed_style
dotnet_naming_rule.other_public_protected_fields_disallowed_rule.severity = error
##########################################
# StyleCop Field Naming Rules
# Naming rules for fields follow the StyleCop analyzers
# This does not override any rules using disallowed_style above
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers
##########################################
# All constant fields must be constant_case
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1303.md
dotnet_naming_symbols.stylecop_constant_fields_group.applicable_accessibilities = public, internal, protected_internal, protected, private_protected, private
dotnet_naming_symbols.stylecop_constant_fields_group.required_modifiers = const
dotnet_naming_symbols.stylecop_constant_fields_group.applicable_kinds = field
dotnet_naming_rule.stylecop_constant_fields_must_be_pascal_case_rule.symbols = stylecop_constant_fields_group
dotnet_naming_rule.stylecop_constant_fields_must_be_pascal_case_rule.style = constant_case
dotnet_naming_rule.stylecop_constant_fields_must_be_pascal_case_rule.severity = warning
# All static readonly fields must be constant_case
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1311.md
dotnet_naming_symbols.stylecop_static_readonly_fields_group.applicable_accessibilities = public, internal, protected_internal, protected, private_protected, private
dotnet_naming_symbols.stylecop_static_readonly_fields_group.required_modifiers = static, readonly
dotnet_naming_symbols.stylecop_static_readonly_fields_group.applicable_kinds = field
dotnet_naming_rule.stylecop_static_readonly_fields_must_be_pascal_case_rule.symbols = stylecop_static_readonly_fields_group
dotnet_naming_rule.stylecop_static_readonly_fields_must_be_pascal_case_rule.style = constant_case
dotnet_naming_rule.stylecop_static_readonly_fields_must_be_pascal_case_rule.severity = warning
# No non-private instance fields are allowed
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1401.md
dotnet_naming_symbols.stylecop_fields_must_be_private_group.applicable_accessibilities = public, internal, protected_internal, protected, private_protected
dotnet_naming_symbols.stylecop_fields_must_be_private_group.applicable_kinds = field
dotnet_naming_rule.stylecop_instance_fields_must_be_private_rule.symbols = stylecop_fields_must_be_private_group
dotnet_naming_rule.stylecop_instance_fields_must_be_private_rule.style = disallowed_style
dotnet_naming_rule.stylecop_instance_fields_must_be_private_rule.severity = error
# Private fields must be camelCase
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1306.md
dotnet_naming_symbols.stylecop_private_fields_group.applicable_accessibilities = private
dotnet_naming_symbols.stylecop_private_fields_group.applicable_kinds = field
dotnet_naming_rule.stylecop_private_fields_must_be_camel_case_rule.symbols = stylecop_private_fields_group
dotnet_naming_rule.stylecop_private_fields_must_be_camel_case_rule.style = underscore_camel_case_style
dotnet_naming_rule.stylecop_private_fields_must_be_camel_case_rule.severity = warning
# Local variables must be camelCase
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1312.md
dotnet_naming_symbols.stylecop_local_fields_group.applicable_accessibilities = local
dotnet_naming_symbols.stylecop_local_fields_group.applicable_kinds = local
dotnet_naming_rule.stylecop_local_fields_must_be_camel_case_rule.symbols = stylecop_local_fields_group
dotnet_naming_rule.stylecop_local_fields_must_be_camel_case_rule.style = camel_case_style
dotnet_naming_rule.stylecop_local_fields_must_be_camel_case_rule.severity = warning
# This rule should never fire. However, it's included for at least two purposes:
# First, it helps to understand, reason about, and root-case certain types of issues, such as bugs in .editorconfig parsers.
# Second, it helps to raise immediate awareness if a new field type is added (as occurred recently in C#).
dotnet_naming_symbols.sanity_check_uncovered_field_case_group.applicable_accessibilities = *
dotnet_naming_symbols.sanity_check_uncovered_field_case_group.applicable_kinds = field
dotnet_naming_rule.sanity_check_uncovered_field_case_rule.symbols = sanity_check_uncovered_field_case_group
dotnet_naming_rule.sanity_check_uncovered_field_case_rule.style = internal_error_style
dotnet_naming_rule.sanity_check_uncovered_field_case_rule.severity = error
##########################################
# Other Naming Rules
##########################################
# All of the following must be PascalCase:
# - Namespaces
# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-namespaces
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1300.md
# - Classes and Enumerations
# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-classes-structs-and-interfaces
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1300.md
# - Delegates
# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-classes-structs-and-interfaces#names-of-common-types
# - Constructors, Properties, Events, Methods
# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-type-members
dotnet_naming_symbols.element_group.applicable_kinds = namespace, class, enum, struct, delegate, event, method, property
dotnet_naming_rule.element_rule.symbols = element_group
dotnet_naming_rule.element_rule.style = pascal_case_style
dotnet_naming_rule.element_rule.severity = warning
# Interfaces use PascalCase and are prefixed with uppercase 'I'
# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-classes-structs-and-interfaces
dotnet_naming_symbols.interface_group.applicable_kinds = interface
dotnet_naming_rule.interface_rule.symbols = interface_group
dotnet_naming_rule.interface_rule.style = prefix_interface_with_i_style
dotnet_naming_rule.interface_rule.severity = warning
# Generics Type Parameters use PascalCase and are prefixed with uppercase 'T'
# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-classes-structs-and-interfaces
dotnet_naming_symbols.type_parameter_group.applicable_kinds = type_parameter
dotnet_naming_rule.type_parameter_rule.symbols = type_parameter_group
dotnet_naming_rule.type_parameter_rule.style = prefix_type_parameters_with_t_style
dotnet_naming_rule.type_parameter_rule.severity = warning
# Function parameters use camelCase
# https://docs.microsoft.com/dotnet/standard/design-guidelines/naming-parameters
dotnet_naming_symbols.parameters_group.applicable_kinds = parameter
dotnet_naming_rule.parameters_rule.symbols = parameters_group
dotnet_naming_rule.parameters_rule.style = camel_case_style
dotnet_naming_rule.parameters_rule.severity = warning
##########################################
# License
##########################################
# The following applies as to the .editorconfig file ONLY, and is
# included below for reference, per the requirements of the license
# corresponding to this .editorconfig file.
# See: https://github.com/RehanSaeed/EditorConfig
#
# MIT License
#
# Copyright (c) 2017-2019 Muhammad Rehan Saeed
# Copyright (c) 2019 Henry Gabryjelski
#
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the
# Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute,
# sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject
# to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
##########################################

6
.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,6 @@
version: 2
updates:
- package-ecosystem: "github-actions" # search for actions - there are other options available
directory: "/" # search in .github/workflows under root `/`
schedule:
interval: "weekly" # check for action update every week

View File

@@ -1,5 +1,5 @@
name: SharpCompress
on: [push]
on: [push, pull_request]
jobs:
build:
@@ -9,9 +9,12 @@ jobs:
os: [windows-latest, ubuntu-latest]
steps:
- uses: actions/checkout@v1
- uses: actions/setup-dotnet@v1
- uses: actions/checkout@v3
- uses: actions/setup-dotnet@v2
with:
dotnet-version: 3.1.202
- name: Run the Cake script
uses: ecampidoglio/cake-action@master
dotnet-version: 6.0.x
- run: dotnet run -p build/build.csproj
- uses: actions/upload-artifact@v3
with:
name: ${{ matrix.os }}-sharpcompress.nupkg
path: artifacts/*

2
.gitignore vendored
View File

@@ -17,3 +17,5 @@ tools
.idea/
.DS_Store
*.snupkg
/tests/TestArchives/6d23a38c-f064-4ef1-ad89-b942396f53b9/Scratch

View File

@@ -19,7 +19,6 @@
| Tar.XZ | LZMA2 | Decompress | TarArchive | TarReader | TarWriter (3) |
| GZip (single file) | DEFLATE | Both | GZipArchive | GZipReader | GZipWriter |
| 7Zip (4) | LZMA, LZMA2, BZip2, PPMd, BCJ, BCJ2, Deflate | Decompress | SevenZipArchive | N/A | N/A |
| LZip (single file) (5) | LZip (LZMA) | Both | LZipArchive | LZipReader | LZipWriter |
1. SOLID Rars are only supported in the RarReader API.
2. Zip format supports pkware and WinzipAES encryption. However, encrypted LZMA is not supported. Zip64 reading/writing is supported but only with seekable streams as the Zip spec doesn't support Zip64 data in post data descriptors. Deflate64 is only supported for reading.

View File

@@ -1,12 +1,9 @@
# SharpCompress
SharpCompress is a compression library in pure C# for .NET Standard 1.3 and 2.0 that can unrar, un7zip, unzip, untar unbzip2 and ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip are implemented.
SharpCompress is a compression library in pure C# for .NET Standard 2.0, 2.1, .NET Core 3.1 and .NET 5.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).
AppVeyor Build -
[![Build status](https://ci.appveyor.com/api/projects/status/voxg971oemmvxh1e/branch/master?svg=true)](https://ci.appveyor.com/project/adamhathcock/sharpcompress/branch/master)
GitHub Actions Build -
[![GitHubActions](https://github.com/adamhathcock/sharpcompress/workflows/SharpCompress/badge.svg)](https://circleci.com/gh/adamhathcock/sharpcompress)
@@ -185,6 +182,8 @@ I'm always looking for help or ideas. Please submit code or email with ideas. Un
XZ implementation based on: https://github.com/sambott/XZ.NET by @sambott
XZ BCJ filters support contributed by Louis-Michel Bergeron, on behalf of aDolus Technology Inc. - 2022
7Zip implementation based on: https://code.google.com/p/managed-lzma/
LICENSE

View File

@@ -13,6 +13,8 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SharpCompress", "src\SharpC
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SharpCompress.Test", "tests\SharpCompress.Test\SharpCompress.Test.csproj", "{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "build", "build\build.csproj", "{D4D613CB-5E94-47FB-85BE-B8423D20C545}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -27,6 +29,10 @@ Global
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}.Debug|Any CPU.Build.0 = Debug|Any CPU
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}.Release|Any CPU.ActiveCfg = Release|Any CPU
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}.Release|Any CPU.Build.0 = Release|Any CPU
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Debug|Any CPU.Build.0 = Debug|Any CPU
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Release|Any CPU.ActiveCfg = Release|Any CPU
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE

View File

@@ -1,20 +0,0 @@
version: '{build}'
image: Visual Studio 2019
pull_requests:
do_not_increment_build_number: true
branches:
only:
- master
nuget:
disable_publish_on_pr: true
build_script:
- ps: .\build.ps1
test: off
artifacts:
- path: src\SharpCompress\bin\Release\*.nupkg

View File

@@ -1,89 +0,0 @@
var target = Argument("target", "Default");
var tag = Argument("tag", "cake");
Task("Restore")
.Does(() =>
{
DotNetCoreRestore(".");
});
Task("Build")
.IsDependentOn("Restore")
.Does(() =>
{
if (IsRunningOnWindows())
{
MSBuild("./sharpcompress.sln", c =>
{
c.SetConfiguration("Release")
.SetVerbosity(Verbosity.Minimal)
.UseToolVersion(MSBuildToolVersion.VS2019);
});
}
else
{
var settings = new DotNetCoreBuildSettings
{
Framework = "netstandard1.3",
Configuration = "Release",
NoRestore = true
};
DotNetCoreBuild("./src/SharpCompress/SharpCompress.csproj", settings);
settings.Framework = "netstandard2.0";
DotNetCoreBuild("./src/SharpCompress/SharpCompress.csproj", settings);
settings.Framework = "netstandard2.1";
DotNetCoreBuild("./src/SharpCompress/SharpCompress.csproj", settings);
}
});
Task("Test")
.IsDependentOn("Build")
.Does(() =>
{
var files = GetFiles("tests/**/*.csproj");
foreach(var file in files)
{
var settings = new DotNetCoreTestSettings
{
Configuration = "Release",
Framework = "netcoreapp3.1"
};
DotNetCoreTest(file.ToString(), settings);
}
});
Task("Pack")
.IsDependentOn("Build")
.Does(() =>
{
if (IsRunningOnWindows())
{
MSBuild("src/SharpCompress/SharpCompress.csproj", c => c
.SetConfiguration("Release")
.SetVerbosity(Verbosity.Minimal)
.UseToolVersion(MSBuildToolVersion.VS2019)
.WithProperty("NoBuild", "true")
.WithTarget("Pack"));
}
else
{
Information("Skipping Pack as this is not Windows");
}
});
Task("Default")
.IsDependentOn("Restore")
.IsDependentOn("Build")
.IsDependentOn("Test")
.IsDependentOn("Pack");
Task("RunTests")
.IsDependentOn("Restore")
.IsDependentOn("Build")
.IsDependentOn("Test");
RunTarget(target);

228
build.ps1
View File

@@ -1,228 +0,0 @@
##########################################################################
# This is the Cake bootstrapper script for PowerShell.
# This file was downloaded from https://github.com/cake-build/resources
# Feel free to change this file to fit your needs.
##########################################################################
<#
.SYNOPSIS
This is a Powershell script to bootstrap a Cake build.
.DESCRIPTION
This Powershell script will download NuGet if missing, restore NuGet tools (including Cake)
and execute your Cake build script with the parameters you provide.
.PARAMETER Script
The build script to execute.
.PARAMETER Target
The build script target to run.
.PARAMETER Configuration
The build configuration to use.
.PARAMETER Verbosity
Specifies the amount of information to be displayed.
.PARAMETER Experimental
Tells Cake to use the latest Roslyn release.
.PARAMETER WhatIf
Performs a dry run of the build script.
No tasks will be executed.
.PARAMETER Mono
Tells Cake to use the Mono scripting engine.
.PARAMETER SkipToolPackageRestore
Skips restoring of packages.
.PARAMETER ScriptArgs
Remaining arguments are added here.
.LINK
http://cakebuild.net
#>
[CmdletBinding()]
Param(
[string]$Script = "build.cake",
[string]$Target = "Default",
[ValidateSet("Release", "Debug")]
[string]$Configuration = "Release",
[ValidateSet("Quiet", "Minimal", "Normal", "Verbose", "Diagnostic")]
[string]$Verbosity = "Verbose",
[switch]$Experimental,
[Alias("DryRun","Noop")]
[switch]$WhatIf,
[switch]$Mono,
[switch]$SkipToolPackageRestore,
[Parameter(Position=0,Mandatory=$false,ValueFromRemainingArguments=$true)]
[string[]]$ScriptArgs
)
[Reflection.Assembly]::LoadWithPartialName("System.Security") | Out-Null
function MD5HashFile([string] $filePath)
{
if ([string]::IsNullOrEmpty($filePath) -or !(Test-Path $filePath -PathType Leaf))
{
return $null
}
[System.IO.Stream] $file = $null;
[System.Security.Cryptography.MD5] $md5 = $null;
try
{
$md5 = [System.Security.Cryptography.MD5]::Create()
$file = [System.IO.File]::OpenRead($filePath)
return [System.BitConverter]::ToString($md5.ComputeHash($file))
}
finally
{
if ($file -ne $null)
{
$file.Dispose()
}
}
}
Write-Host "Preparing to run build script..."
if(!$PSScriptRoot){
$PSScriptRoot = Split-Path $MyInvocation.MyCommand.Path -Parent
}
$TOOLS_DIR = Join-Path $PSScriptRoot "tools"
$ADDINS_DIR = Join-Path $TOOLS_DIR "addins"
$MODULES_DIR = Join-Path $TOOLS_DIR "modules"
$NUGET_EXE = Join-Path $TOOLS_DIR "nuget.exe"
$CAKE_EXE = Join-Path $TOOLS_DIR "Cake/Cake.exe"
$NUGET_URL = "https://dist.nuget.org/win-x86-commandline/latest/nuget.exe"
$PACKAGES_CONFIG = Join-Path $TOOLS_DIR "packages.config"
$PACKAGES_CONFIG_MD5 = Join-Path $TOOLS_DIR "packages.config.md5sum"
$ADDINS_PACKAGES_CONFIG = Join-Path $ADDINS_DIR "packages.config"
$MODULES_PACKAGES_CONFIG = Join-Path $MODULES_DIR "packages.config"
# Should we use mono?
$UseMono = "";
if($Mono.IsPresent) {
Write-Verbose -Message "Using the Mono based scripting engine."
$UseMono = "-mono"
}
# Should we use the new Roslyn?
$UseExperimental = "";
if($Experimental.IsPresent -and !($Mono.IsPresent)) {
Write-Verbose -Message "Using experimental version of Roslyn."
$UseExperimental = "-experimental"
}
# Is this a dry run?
$UseDryRun = "";
if($WhatIf.IsPresent) {
$UseDryRun = "-dryrun"
}
# Make sure tools folder exists
if ((Test-Path $PSScriptRoot) -and !(Test-Path $TOOLS_DIR)) {
Write-Verbose -Message "Creating tools directory..."
New-Item -Path $TOOLS_DIR -Type directory | out-null
}
# Make sure that packages.config exist.
if (!(Test-Path $PACKAGES_CONFIG)) {
Write-Verbose -Message "Downloading packages.config..."
try { (New-Object System.Net.WebClient).DownloadFile("http://cakebuild.net/download/bootstrapper/packages", $PACKAGES_CONFIG) } catch {
Throw "Could not download packages.config."
}
}
# Try find NuGet.exe in path if not exists
if (!(Test-Path $NUGET_EXE)) {
Write-Verbose -Message "Trying to find nuget.exe in PATH..."
$existingPaths = $Env:Path -Split ';' | Where-Object { (![string]::IsNullOrEmpty($_)) -and (Test-Path $_ -PathType Container) }
$NUGET_EXE_IN_PATH = Get-ChildItem -Path $existingPaths -Filter "nuget.exe" | Select -First 1
if ($NUGET_EXE_IN_PATH -ne $null -and (Test-Path $NUGET_EXE_IN_PATH.FullName)) {
Write-Verbose -Message "Found in PATH at $($NUGET_EXE_IN_PATH.FullName)."
$NUGET_EXE = $NUGET_EXE_IN_PATH.FullName
}
}
# Try download NuGet.exe if not exists
if (!(Test-Path $NUGET_EXE)) {
Write-Verbose -Message "Downloading NuGet.exe..."
try {
(New-Object System.Net.WebClient).DownloadFile($NUGET_URL, $NUGET_EXE)
} catch {
Throw "Could not download NuGet.exe."
}
}
# Save nuget.exe path to environment to be available to child processed
$ENV:NUGET_EXE = $NUGET_EXE
# Restore tools from NuGet?
if(-Not $SkipToolPackageRestore.IsPresent) {
Push-Location
Set-Location $TOOLS_DIR
# Check for changes in packages.config and remove installed tools if true.
[string] $md5Hash = MD5HashFile($PACKAGES_CONFIG)
if((!(Test-Path $PACKAGES_CONFIG_MD5)) -Or
($md5Hash -ne (Get-Content $PACKAGES_CONFIG_MD5 ))) {
Write-Verbose -Message "Missing or changed package.config hash..."
Remove-Item * -Recurse -Exclude packages.config,nuget.exe
}
Write-Verbose -Message "Restoring tools from NuGet..."
$NuGetOutput = Invoke-Expression "&`"$NUGET_EXE`" install -ExcludeVersion -OutputDirectory `"$TOOLS_DIR`""
if ($LASTEXITCODE -ne 0) {
Throw "An error occured while restoring NuGet tools."
}
else
{
$md5Hash | Out-File $PACKAGES_CONFIG_MD5 -Encoding "ASCII"
}
Write-Verbose -Message ($NuGetOutput | out-string)
Pop-Location
}
# Restore addins from NuGet
if (Test-Path $ADDINS_PACKAGES_CONFIG) {
Push-Location
Set-Location $ADDINS_DIR
Write-Verbose -Message "Restoring addins from NuGet..."
$NuGetOutput = Invoke-Expression "&`"$NUGET_EXE`" install -ExcludeVersion -OutputDirectory `"$ADDINS_DIR`""
if ($LASTEXITCODE -ne 0) {
Throw "An error occured while restoring NuGet addins."
}
Write-Verbose -Message ($NuGetOutput | out-string)
Pop-Location
}
# Restore modules from NuGet
if (Test-Path $MODULES_PACKAGES_CONFIG) {
Push-Location
Set-Location $MODULES_DIR
Write-Verbose -Message "Restoring modules from NuGet..."
$NuGetOutput = Invoke-Expression "&`"$NUGET_EXE`" install -ExcludeVersion -OutputDirectory `"$MODULES_DIR`""
if ($LASTEXITCODE -ne 0) {
Throw "An error occured while restoring NuGet modules."
}
Write-Verbose -Message ($NuGetOutput | out-string)
Pop-Location
}
# Make sure that Cake has been installed.
if (!(Test-Path $CAKE_EXE)) {
Throw "Could not find Cake.exe at $CAKE_EXE"
}
# Start Cake
Write-Host "Running build script..."
Invoke-Expression "& `"$CAKE_EXE`" `"$Script`" -target=`"$Target`" -configuration=`"$Configuration`" -verbosity=`"$Verbosity`" $UseMono $UseDryRun $UseExperimental $ScriptArgs"
exit $LASTEXITCODE

View File

@@ -1,42 +0,0 @@
#!/usr/bin/env bash
##########################################################################
# This is the Cake bootstrapper script for Linux and OS X.
# This file was downloaded from https://github.com/cake-build/resources
# Feel free to change this file to fit your needs.
##########################################################################
# Define directories.
SCRIPT_DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
TOOLS_DIR=$SCRIPT_DIR/tools
CAKE_VERSION=0.27.1
CAKE_DLL=$TOOLS_DIR/Cake.CoreCLR.$CAKE_VERSION/Cake.dll
# Make sure the tools folder exist.
if [ ! -d "$TOOLS_DIR" ]; then
mkdir "$TOOLS_DIR"
fi
###########################################################################
# INSTALL CAKE
###########################################################################
if [ ! -f "$CAKE_DLL" ]; then
curl -Lsfo Cake.CoreCLR.zip "https://www.nuget.org/api/v2/package/Cake.CoreCLR/$CAKE_VERSION" && unzip -q Cake.CoreCLR.zip -d "$TOOLS_DIR/Cake.CoreCLR.$CAKE_VERSION" && rm -f Cake.CoreCLR.zip
if [ $? -ne 0 ]; then
echo "An error occured while installing Cake."
exit 1
fi
fi
# Make sure that Cake has been installed.
if [ ! -f "$CAKE_DLL" ]; then
echo "Could not find Cake.exe at '$CAKE_DLL'."
exit 1
fi
###########################################################################
# RUN BUILD SCRIPT
###########################################################################
# Start Cake
exec dotnet "$CAKE_DLL" "$@"

83
build/Program.cs Normal file
View File

@@ -0,0 +1,83 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Runtime.InteropServices;
using GlobExpressions;
using static Bullseye.Targets;
using static SimpleExec.Command;
const string Clean = "clean";
const string Format = "format";
const string Build = "build";
const string Test = "test";
const string Publish = "publish";
Target(Clean,
ForEach("**/bin", "**/obj"),
dir =>
{
IEnumerable<string> GetDirectories(string d)
{
return Glob.Directories(".", d);
}
void RemoveDirectory(string d)
{
if (Directory.Exists(d))
{
Console.WriteLine(d);
Directory.Delete(d, true);
}
}
foreach (var d in GetDirectories(dir))
{
RemoveDirectory(d);
}
});
Target(Format,
() =>
{
Run("dotnet", "tool restore");
Run("dotnet", "format --check");
});
Target(Build,
DependsOn(Format),
framework =>
{
Run("dotnet", "build src/SharpCompress/SharpCompress.csproj -c Release");
});
Target(Test,
DependsOn(Build),
ForEach("net6.0", "net461"),
framework =>
{
IEnumerable<string> GetFiles(string d)
{
return Glob.Files(".", d);
}
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows) && framework == "net461")
{
return;
}
foreach (var file in GetFiles("**/*.Test.csproj"))
{
Run("dotnet", $"test {file} -c Release -f {framework}");
}
});
Target(Publish,
DependsOn(Test),
() =>
{
Run("dotnet", "pack src/SharpCompress/SharpCompress.csproj -c Release -o artifacts/");
});
Target("default", DependsOn(Publish), () => Console.WriteLine("Done!"));
await RunTargetsAndExitAsync(args);

14
build/build.csproj Normal file
View File

@@ -0,0 +1,14 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net6.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Bullseye" Version="4.0.0" />
<PackageReference Include="Glob" Version="1.1.9" />
<PackageReference Include="SimpleExec" Version="10.0.0" />
</ItemGroup>
</Project>

6
global.json Normal file
View File

@@ -0,0 +1,6 @@
{
"sdk": {
"version": "6.0.200",
"rollForward": "latestFeature"
}
}

View File

@@ -0,0 +1,420 @@
// Copyright (c) Six Labors.
// Licensed under the Apache License, Version 2.0.
#if !NETSTANDARD2_0 && !NETSTANDARD2_1 && !NETFRAMEWORK
#define SUPPORTS_RUNTIME_INTRINSICS
#define SUPPORTS_HOTPATH
#endif
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
#if SUPPORTS_RUNTIME_INTRINSICS
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
#endif
#pragma warning disable IDE0007 // Use implicit type
namespace SharpCompress.Algorithms
{
/// <summary>
/// Calculates the 32 bit Adler checksum of a given buffer according to
/// RFC 1950. ZLIB Compressed Data Format Specification version 3.3)
/// </summary>
internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/blob/main/src/ImageSharp/Compression/Zlib/Adler32.cs
{
/// <summary>
/// Global inlining options. Helps temporarily disable inlining for better profiler output.
/// </summary>
private static class InliningOptions // From https://github.com/SixLabors/ImageSharp/blob/main/src/ImageSharp/Common/Helpers/InliningOptions.cs
{
/// <summary>
/// <see cref="MethodImplOptions.AggressiveInlining"/> regardless of the build conditions.
/// </summary>
public const MethodImplOptions AlwaysInline = MethodImplOptions.AggressiveInlining;
#if PROFILING
public const MethodImplOptions HotPath = MethodImplOptions.NoInlining;
public const MethodImplOptions ShortMethod = MethodImplOptions.NoInlining;
#else
#if SUPPORTS_HOTPATH
public const MethodImplOptions HotPath = MethodImplOptions.AggressiveOptimization;
#else
public const MethodImplOptions HotPath = MethodImplOptions.AggressiveInlining;
#endif
public const MethodImplOptions ShortMethod = MethodImplOptions.AggressiveInlining;
#endif
public const MethodImplOptions ColdPath = MethodImplOptions.NoInlining;
}
#if SUPPORTS_RUNTIME_INTRINSICS
/// <summary>
/// Provides optimized static methods for trigonometric, logarithmic,
/// and other common mathematical functions.
/// </summary>
private static class Numerics // From https://github.com/SixLabors/ImageSharp/blob/main/src/ImageSharp/Common/Helpers/Numerics.cs
{
/// <summary>
/// Reduces elements of the vector into one sum.
/// </summary>
/// <param name="accumulator">The accumulator to reduce.</param>
/// <returns>The sum of all elements.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int ReduceSum(Vector256<int> accumulator)
{
// Add upper lane to lower lane.
Vector128<int> vsum = Sse2.Add(accumulator.GetLower(), accumulator.GetUpper());
// Add odd to even.
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_11_01_01));
// Add high to low.
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_10_11_10));
return Sse2.ConvertToInt32(vsum);
}
/// <summary>
/// Reduces even elements of the vector into one sum.
/// </summary>
/// <param name="accumulator">The accumulator to reduce.</param>
/// <returns>The sum of even elements.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int EvenReduceSum(Vector256<int> accumulator)
{
Vector128<int> vsum = Sse2.Add(accumulator.GetLower(), accumulator.GetUpper()); // add upper lane to lower lane
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_10_11_10)); // add high to low
// Vector128<int>.ToScalar() isn't optimized pre-net5.0 https://github.com/dotnet/runtime/pull/37882
return Sse2.ConvertToInt32(vsum);
}
}
#endif
/// <summary>
/// The default initial seed value of a Adler32 checksum calculation.
/// </summary>
public const uint SeedValue = 1U;
// Largest prime smaller than 65536
private const uint BASE = 65521;
// NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1
private const uint NMAX = 5552;
#if SUPPORTS_RUNTIME_INTRINSICS
private const int MinBufferSize = 64;
private const int BlockSize = 1 << 5;
// The C# compiler emits this as a compile-time constant embedded in the PE file.
private static ReadOnlySpan<byte> Tap1Tap2 => new byte[]
{
32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17, // tap1
16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1 // tap2
};
#endif
/// <summary>
/// Calculates the Adler32 checksum with the bytes taken from the span.
/// </summary>
/// <param name="buffer">The readonly span of bytes.</param>
/// <returns>The <see cref="uint"/>.</returns>
[MethodImpl(InliningOptions.ShortMethod)]
public static uint Calculate(ReadOnlySpan<byte> buffer)
=> Calculate(SeedValue, buffer);
/// <summary>
/// Calculates the Adler32 checksum with the bytes taken from the span and seed.
/// </summary>
/// <param name="adler">The input Adler32 value.</param>
/// <param name="buffer">The readonly span of bytes.</param>
/// <returns>The <see cref="uint"/>.</returns>
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
public static uint Calculate(uint adler, ReadOnlySpan<byte> buffer)
{
if (buffer.IsEmpty)
{
return adler;
}
#if SUPPORTS_RUNTIME_INTRINSICS
if (Avx2.IsSupported && buffer.Length >= MinBufferSize)
{
return CalculateAvx2(adler, buffer);
}
if (Ssse3.IsSupported && buffer.Length >= MinBufferSize)
{
return CalculateSse(adler, buffer);
}
return CalculateScalar(adler, buffer);
#else
return CalculateScalar(adler, buffer);
#endif
}
// Based on https://github.com/chromium/chromium/blob/master/third_party/zlib/adler32_simd.c
#if SUPPORTS_RUNTIME_INTRINSICS
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
private static unsafe uint CalculateSse(uint adler, ReadOnlySpan<byte> buffer)
{
uint s1 = adler & 0xFFFF;
uint s2 = (adler >> 16) & 0xFFFF;
// Process the data in blocks.
uint length = (uint)buffer.Length;
uint blocks = length / BlockSize;
length -= blocks * BlockSize;
fixed (byte* bufferPtr = &MemoryMarshal.GetReference(buffer))
{
fixed (byte* tapPtr = &MemoryMarshal.GetReference(Tap1Tap2))
{
byte* localBufferPtr = bufferPtr;
// _mm_setr_epi8 on x86
Vector128<sbyte> tap1 = Sse2.LoadVector128((sbyte*)tapPtr);
Vector128<sbyte> tap2 = Sse2.LoadVector128((sbyte*)(tapPtr + 0x10));
Vector128<byte> zero = Vector128<byte>.Zero;
var ones = Vector128.Create((short)1);
while (blocks > 0)
{
uint n = NMAX / BlockSize; /* The NMAX constraint. */
if (n > blocks)
{
n = blocks;
}
blocks -= n;
// Process n blocks of data. At most NMAX data bytes can be
// processed before s2 must be reduced modulo BASE.
Vector128<uint> v_ps = Vector128.CreateScalar(s1 * n);
Vector128<uint> v_s2 = Vector128.CreateScalar(s2);
Vector128<uint> v_s1 = Vector128<uint>.Zero;
do
{
// Load 32 input bytes.
Vector128<byte> bytes1 = Sse3.LoadDquVector128(localBufferPtr);
Vector128<byte> bytes2 = Sse3.LoadDquVector128(localBufferPtr + 0x10);
// Add previous block byte sum to v_ps.
v_ps = Sse2.Add(v_ps, v_s1);
// Horizontally add the bytes for s1, multiply-adds the
// bytes by [ 32, 31, 30, ... ] for s2.
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes1, zero).AsUInt32());
Vector128<short> mad1 = Ssse3.MultiplyAddAdjacent(bytes1, tap1);
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad1, ones).AsUInt32());
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes2, zero).AsUInt32());
Vector128<short> mad2 = Ssse3.MultiplyAddAdjacent(bytes2, tap2);
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad2, ones).AsUInt32());
localBufferPtr += BlockSize;
}
while (--n > 0);
v_s2 = Sse2.Add(v_s2, Sse2.ShiftLeftLogical(v_ps, 5));
// Sum epi32 ints v_s1(s2) and accumulate in s1(s2).
const byte S2301 = 0b1011_0001; // A B C D -> B A D C
const byte S1032 = 0b0100_1110; // A B C D -> C D A B
v_s1 = Sse2.Add(v_s1, Sse2.Shuffle(v_s1, S1032));
s1 += v_s1.ToScalar();
v_s2 = Sse2.Add(v_s2, Sse2.Shuffle(v_s2, S2301));
v_s2 = Sse2.Add(v_s2, Sse2.Shuffle(v_s2, S1032));
s2 = v_s2.ToScalar();
// Reduce.
s1 %= BASE;
s2 %= BASE;
}
if (length > 0)
{
HandleLeftOver(localBufferPtr, length, ref s1, ref s2);
}
return s1 | (s2 << 16);
}
}
}
// Based on: https://github.com/zlib-ng/zlib-ng/blob/develop/arch/x86/adler32_avx2.c
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
public static unsafe uint CalculateAvx2(uint adler, ReadOnlySpan<byte> buffer)
{
uint s1 = adler & 0xFFFF;
uint s2 = (adler >> 16) & 0xFFFF;
uint length = (uint)buffer.Length;
fixed (byte* bufferPtr = &MemoryMarshal.GetReference(buffer))
{
byte* localBufferPtr = bufferPtr;
Vector256<byte> zero = Vector256<byte>.Zero;
var dot3v = Vector256.Create((short)1);
var dot2v = Vector256.Create(32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1);
// Process n blocks of data. At most NMAX data bytes can be
// processed before s2 must be reduced modulo BASE.
var vs1 = Vector256.CreateScalar(s1);
var vs2 = Vector256.CreateScalar(s2);
while (length >= 32)
{
int k = length < NMAX ? (int)length : (int)NMAX;
k -= k % 32;
length -= (uint)k;
Vector256<uint> vs10 = vs1;
Vector256<uint> vs3 = Vector256<uint>.Zero;
while (k >= 32)
{
// Load 32 input bytes.
Vector256<byte> block = Avx.LoadVector256(localBufferPtr);
// Sum of abs diff, resulting in 2 x int32's
Vector256<ushort> vs1sad = Avx2.SumAbsoluteDifferences(block, zero);
vs1 = Avx2.Add(vs1, vs1sad.AsUInt32());
vs3 = Avx2.Add(vs3, vs10);
// sum 32 uint8s to 16 shorts.
Vector256<short> vshortsum2 = Avx2.MultiplyAddAdjacent(block, dot2v);
// sum 16 shorts to 8 uint32s.
Vector256<int> vsum2 = Avx2.MultiplyAddAdjacent(vshortsum2, dot3v);
vs2 = Avx2.Add(vsum2.AsUInt32(), vs2);
vs10 = vs1;
localBufferPtr += BlockSize;
k -= 32;
}
// Defer the multiplication with 32 to outside of the loop.
vs3 = Avx2.ShiftLeftLogical(vs3, 5);
vs2 = Avx2.Add(vs2, vs3);
s1 = (uint)Numerics.EvenReduceSum(vs1.AsInt32());
s2 = (uint)Numerics.ReduceSum(vs2.AsInt32());
s1 %= BASE;
s2 %= BASE;
vs1 = Vector256.CreateScalar(s1);
vs2 = Vector256.CreateScalar(s2);
}
if (length > 0)
{
HandleLeftOver(localBufferPtr, length, ref s1, ref s2);
}
return s1 | (s2 << 16);
}
}
private static unsafe void HandleLeftOver(byte* localBufferPtr, uint length, ref uint s1, ref uint s2)
{
if (length >= 16)
{
s2 += s1 += localBufferPtr[0];
s2 += s1 += localBufferPtr[1];
s2 += s1 += localBufferPtr[2];
s2 += s1 += localBufferPtr[3];
s2 += s1 += localBufferPtr[4];
s2 += s1 += localBufferPtr[5];
s2 += s1 += localBufferPtr[6];
s2 += s1 += localBufferPtr[7];
s2 += s1 += localBufferPtr[8];
s2 += s1 += localBufferPtr[9];
s2 += s1 += localBufferPtr[10];
s2 += s1 += localBufferPtr[11];
s2 += s1 += localBufferPtr[12];
s2 += s1 += localBufferPtr[13];
s2 += s1 += localBufferPtr[14];
s2 += s1 += localBufferPtr[15];
localBufferPtr += 16;
length -= 16;
}
while (length-- > 0)
{
s2 += s1 += *localBufferPtr++;
}
if (s1 >= BASE)
{
s1 -= BASE;
}
s2 %= BASE;
}
#endif
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
private static unsafe uint CalculateScalar(uint adler, ReadOnlySpan<byte> buffer)
{
uint s1 = adler & 0xFFFF;
uint s2 = (adler >> 16) & 0xFFFF;
uint k;
fixed (byte* bufferPtr = buffer)
{
var localBufferPtr = bufferPtr;
uint length = (uint)buffer.Length;
while (length > 0)
{
k = length < NMAX ? length : NMAX;
length -= k;
while (k >= 16)
{
s2 += s1 += localBufferPtr[0];
s2 += s1 += localBufferPtr[1];
s2 += s1 += localBufferPtr[2];
s2 += s1 += localBufferPtr[3];
s2 += s1 += localBufferPtr[4];
s2 += s1 += localBufferPtr[5];
s2 += s1 += localBufferPtr[6];
s2 += s1 += localBufferPtr[7];
s2 += s1 += localBufferPtr[8];
s2 += s1 += localBufferPtr[9];
s2 += s1 += localBufferPtr[10];
s2 += s1 += localBufferPtr[11];
s2 += s1 += localBufferPtr[12];
s2 += s1 += localBufferPtr[13];
s2 += s1 += localBufferPtr[14];
s2 += s1 += localBufferPtr[15];
localBufferPtr += 16;
k -= 16;
}
while (k-- > 0)
{
s2 += s1 += *localBufferPtr++;
}
s1 %= BASE;
s2 %= BASE;
}
return (s2 << 16) | s1;
}
}
}
}

View File

@@ -1,8 +1,9 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives
@@ -14,46 +15,34 @@ namespace SharpCompress.Archives
private readonly LazyReadOnlyCollection<TVolume> lazyVolumes;
private readonly LazyReadOnlyCollection<TEntry> lazyEntries;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionBegin;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionEnd;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionBegin;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionEnd;
public event EventHandler<CompressedBytesReadEventArgs> CompressedBytesRead;
public event EventHandler<FilePartExtractionBeginEventArgs> FilePartExtractionBegin;
public event EventHandler<CompressedBytesReadEventArgs>? CompressedBytesRead;
public event EventHandler<FilePartExtractionBeginEventArgs>? FilePartExtractionBegin;
protected ReaderOptions ReaderOptions { get; }
private bool disposed;
protected SourceStream SrcStream;
internal AbstractArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerOptions)
internal AbstractArchive(ArchiveType type, SourceStream srcStream)
{
Type = type;
if (!fileInfo.Exists)
{
throw new ArgumentException("File does not exist: " + fileInfo.FullName);
}
ReaderOptions = readerOptions;
readerOptions.LeaveStreamOpen = false;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(fileInfo));
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
}
protected abstract IEnumerable<TVolume> LoadVolumes(FileInfo file);
internal AbstractArchive(ArchiveType type, IEnumerable<Stream> streams, ReaderOptions readerOptions)
{
Type = type;
ReaderOptions = readerOptions;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(streams.Select(CheckStreams)));
ReaderOptions = srcStream.ReaderOptions;
SrcStream = srcStream;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(SrcStream));
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
}
#nullable disable
internal AbstractArchive(ArchiveType type)
{
Type = type;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
}
#nullable enable
public ArchiveType Type { get; }
@@ -79,29 +68,29 @@ namespace SharpCompress.Archives
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveEntries across the one or many parts of the RarArchive.
/// </summary>
public virtual ICollection<TEntry> Entries { get { return lazyEntries; } }
public virtual ICollection<TEntry> Entries => lazyEntries;
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveVolumes across the one or many parts of the RarArchive.
/// </summary>
public ICollection<TVolume> Volumes { get { return lazyVolumes; } }
public ICollection<TVolume> Volumes => lazyVolumes;
/// <summary>
/// The total size of the files compressed in the archive.
/// </summary>
public virtual long TotalSize { get { return Entries.Aggregate(0L, (total, cf) => total + cf.CompressedSize); } }
public virtual long TotalSize => Entries.Aggregate(0L, (total, cf) => total + cf.CompressedSize);
/// <summary>
/// The total size of the files as uncompressed in the archive.
/// </summary>
public virtual long TotalUncompressSize { get { return Entries.Aggregate(0L, (total, cf) => total + cf.Size); } }
public virtual long TotalUncompressSize => Entries.Aggregate(0L, (total, cf) => total + cf.Size);
protected abstract IEnumerable<TVolume> LoadVolumes(IEnumerable<Stream> streams);
protected abstract IEnumerable<TVolume> LoadVolumes(SourceStream srcStream);
protected abstract IEnumerable<TEntry> LoadEntries(IEnumerable<TVolume> volumes);
IEnumerable<IArchiveEntry> IArchive.Entries { get { return Entries.Cast<IArchiveEntry>(); } }
IEnumerable<IArchiveEntry> IArchive.Entries => Entries.Cast<IArchiveEntry>();
IEnumerable<IVolume> IArchive.Volumes { get { return lazyVolumes.Cast<IVolume>(); } }
IEnumerable<IVolume> IArchive.Volumes => lazyVolumes.Cast<IVolume>();
public virtual void Dispose()
{
@@ -109,6 +98,8 @@ namespace SharpCompress.Archives
{
lazyVolumes.ForEach(v => v.Dispose());
lazyEntries.GetLoaded().Cast<Entry>().ForEach(x => x.Close());
if (SrcStream != null)
SrcStream.Dispose();
disposed = true;
}
}
@@ -121,21 +112,19 @@ namespace SharpCompress.Archives
void IExtractionListener.FireCompressedBytesRead(long currentPartCompressedBytes, long compressedReadBytes)
{
CompressedBytesRead?.Invoke(this, new CompressedBytesReadEventArgs
{
CurrentFilePartCompressedBytesRead = currentPartCompressedBytes,
CompressedBytesRead = compressedReadBytes
});
CompressedBytesRead?.Invoke(this, new CompressedBytesReadEventArgs(
currentFilePartCompressedBytesRead: currentPartCompressedBytes,
compressedBytesRead: compressedReadBytes
));
}
void IExtractionListener.FireFilePartExtractionBegin(string name, long size, long compressedSize)
{
FilePartExtractionBegin?.Invoke(this, new FilePartExtractionBeginEventArgs
{
CompressedSize = compressedSize,
Size = size,
Name = name
});
FilePartExtractionBegin?.Invoke(this, new FilePartExtractionBeginEventArgs(
compressedSize: compressedSize,
size: size,
name: name
));
}
/// <summary>
@@ -160,7 +149,7 @@ namespace SharpCompress.Archives
/// <summary>
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
/// </summary>
public virtual bool IsSolid { get { return false; } }
public virtual bool IsSolid => false;
/// <summary>
/// The archive can find all the parts of the archive needed to fully extract the archive. This forces the parsing of the entire archive.

View File

@@ -1,8 +1,9 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Writers;
@@ -12,24 +13,36 @@ namespace SharpCompress.Archives
where TEntry : IArchiveEntry
where TVolume : IVolume
{
private class RebuildPauseDisposable : IDisposable
{
private readonly AbstractWritableArchive<TEntry, TVolume> archive;
public RebuildPauseDisposable(AbstractWritableArchive<TEntry, TVolume> archive)
{
this.archive = archive;
archive.pauseRebuilding = true;
}
public void Dispose()
{
archive.pauseRebuilding = false;
archive.RebuildModifiedCollection();
}
}
private readonly List<TEntry> newEntries = new List<TEntry>();
private readonly List<TEntry> removedEntries = new List<TEntry>();
private readonly List<TEntry> modifiedEntries = new List<TEntry>();
private bool hasModifications;
private bool pauseRebuilding;
internal AbstractWritableArchive(ArchiveType type)
: base(type)
{
}
internal AbstractWritableArchive(ArchiveType type, Stream stream, ReaderOptions readerFactoryOptions)
: base(type, stream.AsEnumerable(), readerFactoryOptions)
{
}
internal AbstractWritableArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerFactoryOptions)
: base(type, fileInfo, readerFactoryOptions)
internal AbstractWritableArchive(ArchiveType type, SourceStream srcStream)
: base(type, srcStream)
{
}
@@ -45,8 +58,17 @@ namespace SharpCompress.Archives
}
}
public IDisposable PauseEntryRebuilding()
{
return new RebuildPauseDisposable(this);
}
private void RebuildModifiedCollection()
{
if (pauseRebuilding)
{
return;
}
hasModifications = true;
newEntries.RemoveAll(v => removedEntries.Contains(v));
modifiedEntries.Clear();
@@ -83,8 +105,7 @@ namespace SharpCompress.Archives
public TEntry AddEntry(string key, Stream source, bool closeStream,
long size = 0, DateTime? modified = null)
{
if (key.StartsWith("/")
|| key.StartsWith("\\"))
if (key.Length > 0 && key[0] is '/' or '\\')
{
key = key.Substring(1);
}
@@ -103,7 +124,7 @@ namespace SharpCompress.Archives
foreach (var path in Entries.Select(x => x.Key))
{
var p = path.Replace('/', '\\');
if (p.StartsWith("\\"))
if (p.Length > 0 && p[0] == '\\')
{
p = p.Substring(1);
}

View File

@@ -1,16 +1,19 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Archives.GZip;
using SharpCompress.Archives.Rar;
using SharpCompress.Archives.SevenZip;
using SharpCompress.Archives.Tar;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives
{
public class ArchiveFactory
public static class ArchiveFactory
{
/// <summary>
/// Opens an Archive for random access
@@ -18,67 +21,46 @@ namespace SharpCompress.Archives
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <returns></returns>
public static IArchive Open(Stream stream, ReaderOptions readerOptions = null)
public static IArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
{
throw new ArgumentException("Stream should be readable and seekable");
}
readerOptions = readerOptions ?? new ReaderOptions();
if (ZipArchive.IsZipFile(stream, null))
readerOptions ??= new ReaderOptions();
ArchiveType? type;
IsArchive(stream, out type); //test and reset stream position
if (type != null)
{
stream.Seek(0, SeekOrigin.Begin);
return ZipArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
if (SevenZipArchive.IsSevenZipFile(stream))
{
stream.Seek(0, SeekOrigin.Begin);
return SevenZipArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
if (GZipArchive.IsGZipFile(stream))
{
stream.Seek(0, SeekOrigin.Begin);
return GZipArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
if (RarArchive.IsRarFile(stream, readerOptions))
{
stream.Seek(0, SeekOrigin.Begin);
return RarArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
if (TarArchive.IsTarFile(stream))
{
stream.Seek(0, SeekOrigin.Begin);
return TarArchive.Open(stream, readerOptions);
switch (type.Value)
{
case ArchiveType.Zip:
return ZipArchive.Open(stream, readerOptions);
case ArchiveType.SevenZip:
return SevenZipArchive.Open(stream, readerOptions);
case ArchiveType.GZip:
return GZipArchive.Open(stream, readerOptions);
case ArchiveType.Rar:
return RarArchive.Open(stream, readerOptions);
case ArchiveType.Tar:
return TarArchive.Open(stream, readerOptions);
}
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip, LZip");
}
public static IWritableArchive Create(ArchiveType type)
{
switch (type)
return type switch
{
case ArchiveType.Zip:
{
return ZipArchive.Create();
}
case ArchiveType.Tar:
{
return TarArchive.Create();
}
case ArchiveType.GZip:
{
return GZipArchive.Create();
}
default:
{
throw new NotSupportedException("Cannot create Archives of type: " + type);
}
}
ArchiveType.Zip => ZipArchive.Create(),
ArchiveType.Tar => TarArchive.Create(),
ArchiveType.GZip => GZipArchive.Create(),
_ => throw new NotSupportedException("Cannot create Archives of type: " + type)
};
}
/// <summary>
@@ -86,7 +68,7 @@ namespace SharpCompress.Archives
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
public static IArchive Open(string filePath, ReaderOptions options = null)
public static IArchive Open(string filePath, ReaderOptions? options = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), options);
@@ -97,52 +79,217 @@ namespace SharpCompress.Archives
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
public static IArchive Open(FileInfo fileInfo, ReaderOptions options = null)
public static IArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
options = options ?? new ReaderOptions { LeaveStreamOpen = false };
using (var stream = fileInfo.OpenRead())
options ??= new ReaderOptions { LeaveStreamOpen = false };
ArchiveType? type;
using (Stream stream = fileInfo.OpenRead())
{
if (ZipArchive.IsZipFile(stream, null))
IsArchive(stream, out type); //test and reset stream position
if (type != null)
{
return ZipArchive.Open(fileInfo, options);
switch (type.Value)
{
case ArchiveType.Zip:
return ZipArchive.Open(fileInfo, options);
case ArchiveType.SevenZip:
return SevenZipArchive.Open(fileInfo, options);
case ArchiveType.GZip:
return GZipArchive.Open(fileInfo, options);
case ArchiveType.Rar:
return RarArchive.Open(fileInfo, options);
case ArchiveType.Tar:
return TarArchive.Open(fileInfo, options);
}
}
stream.Seek(0, SeekOrigin.Begin);
if (SevenZipArchive.IsSevenZipFile(stream))
{
return SevenZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (GZipArchive.IsGZipFile(stream))
{
return GZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (RarArchive.IsRarFile(stream, options))
{
return RarArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (TarArchive.IsTarFile(stream))
{
return TarArchive.Open(fileInfo, options);
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="options"></param>
public static IArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? options = null)
{
fileInfos.CheckNotNull(nameof(fileInfos));
FileInfo[] files = fileInfos.ToArray();
if (files.Length == 0)
throw new InvalidOperationException("No files to open");
FileInfo fileInfo = files[0];
if (files.Length == 1)
return Open(fileInfo, options);
fileInfo.CheckNotNull(nameof(fileInfo));
options ??= new ReaderOptions { LeaveStreamOpen = false };
ArchiveType? type;
using (Stream stream = fileInfo.OpenRead())
IsArchive(stream, out type); //test and reset stream position
if (type != null)
{
switch (type.Value)
{
case ArchiveType.Zip:
return ZipArchive.Open(files, options);
case ArchiveType.SevenZip:
return SevenZipArchive.Open(files, options);
case ArchiveType.GZip:
return GZipArchive.Open(files, options);
case ArchiveType.Rar:
return RarArchive.Open(files, options);
case ArchiveType.Tar:
return TarArchive.Open(files, options);
}
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
public static IArchive Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
{
streams.CheckNotNull(nameof(streams));
if (streams.Count() == 0)
throw new InvalidOperationException("No streams");
if (streams.Count() == 1)
return Open(streams.First(), options);
options ??= new ReaderOptions();
ArchiveType? type;
using (Stream stream = streams.First())
IsArchive(stream, out type); //test and reset stream position
if (type != null)
{
switch (type.Value)
{
case ArchiveType.Zip:
return ZipArchive.Open(streams, options);
case ArchiveType.SevenZip:
return SevenZipArchive.Open(streams, options);
case ArchiveType.GZip:
return GZipArchive.Open(streams, options);
case ArchiveType.Rar:
return RarArchive.Open(streams, options);
case ArchiveType.Tar:
return TarArchive.Open(streams, options);
}
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(string sourceArchive, string destinationDirectory,
ExtractionOptions options = null)
ExtractionOptions? options = null)
{
using (IArchive archive = Open(sourceArchive))
using IArchive archive = Open(sourceArchive);
foreach (IArchiveEntry entry in archive.Entries)
{
foreach (IArchiveEntry entry in archive.Entries)
{
entry.WriteToDirectory(destinationDirectory, options);
}
entry.WriteToDirectory(destinationDirectory, options);
}
}
public static bool IsArchive(string filePath, out ArchiveType? type)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
using (Stream s = File.OpenRead(filePath))
return IsArchive(s, out type);
}
private static bool IsArchive(Stream stream, out ArchiveType? type)
{
type = null;
stream.CheckNotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
{
throw new ArgumentException("Stream should be readable and seekable");
}
if (ZipArchive.IsZipFile(stream, null))
type = ArchiveType.Zip;
stream.Seek(0, SeekOrigin.Begin);
if (type == null)
{
if (SevenZipArchive.IsSevenZipFile(stream))
type = ArchiveType.SevenZip;
stream.Seek(0, SeekOrigin.Begin);
}
if (type == null)
{
if (GZipArchive.IsGZipFile(stream))
type = ArchiveType.GZip;
stream.Seek(0, SeekOrigin.Begin);
}
if (type == null)
{
if (RarArchive.IsRarFile(stream))
type = ArchiveType.Rar;
stream.Seek(0, SeekOrigin.Begin);
}
if (type == null)
{
if (TarArchive.IsTarFile(stream))
type = ArchiveType.Tar;
stream.Seek(0, SeekOrigin.Begin);
}
if (type == null) //test multipartzip as it could find zips in other non compressed archive types?
{
if (ZipArchive.IsZipMulti(stream)) //test the zip (last) file of a multipart zip
type = ArchiveType.Zip;
stream.Seek(0, SeekOrigin.Begin);
}
return type != null;
}
/// <summary>
/// From a passed in archive (zip, rar, 7z, 001), return all parts.
/// </summary>
/// <param name="part1"></param>
/// <returns></returns>
public static IEnumerable<string> GetFileParts(string part1)
{
part1.CheckNotNullOrEmpty(nameof(part1));
return GetFileParts(new FileInfo(part1)).Select(a => a.FullName);
}
/// <summary>
/// From a passed in archive (zip, rar, 7z, 001), return all parts.
/// </summary>
/// <param name="part1"></param>
/// <returns></returns>
public static IEnumerable<FileInfo> GetFileParts(FileInfo part1)
{
part1.CheckNotNull(nameof(part1));
yield return part1;
int i = 1;
FileInfo? part = RarArchiveVolumeFactory.GetFilePart(i++, part1);
if (part != null)
{
yield return part;
while ((part = RarArchiveVolumeFactory.GetFilePart(i++, part1)) != null) //tests split too
yield return part;
}
else
{
i = 1;
while ((part = ZipArchiveVolumeFactory.GetFilePart(i++, part1)) != null) //tests split too
yield return part;
}
}
}

View File

@@ -0,0 +1,29 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Readers;
using System.Linq;
using System.Text;
using SharpCompress.Common.Rar.Headers;
using System.Text.RegularExpressions;
namespace SharpCompress.Archives
{
internal abstract class ArchiveVolumeFactory
{
internal static FileInfo? GetFilePart(int index, FileInfo part1) //base the name on the first part
{
FileInfo? item = null;
//split 001, 002 ...
Match m = Regex.Match(part1.Name, @"^(.*\.)([0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(Path.Combine(part1.DirectoryName!, String.Concat(m.Groups[1].Value, (index + 1).ToString().PadLeft(m.Groups[2].Value.Length, '0'))));
if (item != null && item.Exists)
return item;
return null;
}
}
}

View File

@@ -1,9 +1,10 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.Common.GZip;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.GZip;
using SharpCompress.Writers;
@@ -18,7 +19,7 @@ namespace SharpCompress.Archives.GZip
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(string filePath, ReaderOptions readerOptions = null)
public static GZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
@@ -29,10 +30,34 @@ namespace SharpCompress.Archives.GZip
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new GZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
return new GZipArchive(new SourceStream(fileInfo, i => ArchiveVolumeFactory.GetFilePart(i, fileInfo), readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? readerOptions = null)
{
fileInfos.CheckNotNull(nameof(fileInfos));
FileInfo[] files = fileInfos.ToArray();
return new GZipArchive(new SourceStream(files[0], i => i < files.Length ? files[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.CheckNotNull(nameof(streams));
Stream[] strms = streams.ToArray();
return new GZipArchive(new SourceStream(strms[0], i => i < strms.Length ? strms[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
@@ -40,10 +65,10 @@ namespace SharpCompress.Archives.GZip
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
public static GZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
return new GZipArchive(stream, readerOptions ?? new ReaderOptions());
return new GZipArchive(new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions()));
}
public static GZipArchive Create()
@@ -52,20 +77,21 @@ namespace SharpCompress.Archives.GZip
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal GZipArchive(FileInfo fileInfo, ReaderOptions options)
: base(ArchiveType.GZip, fileInfo, options)
internal GZipArchive(SourceStream srcStream)
: base(ArchiveType.Tar, srcStream)
{
}
protected override IEnumerable<GZipVolume> LoadVolumes(FileInfo file)
protected override IEnumerable<GZipVolume> LoadVolumes(SourceStream srcStream)
{
return new GZipVolume(file, ReaderOptions).AsEnumerable();
srcStream.LoadAllParts();
int idx = 0;
return srcStream.Streams.Select(a => new GZipVolume(a, ReaderOptions, idx++));
}
public static bool IsGZipFile(string filePath)
{
return IsGZipFile(new FileInfo(filePath));
@@ -77,10 +103,9 @@ namespace SharpCompress.Archives.GZip
{
return false;
}
using (Stream stream = fileInfo.OpenRead())
{
return IsGZipFile(stream);
}
using Stream stream = fileInfo.OpenRead();
return IsGZipFile(stream);
}
public void SaveTo(string filePath)
@@ -99,7 +124,7 @@ namespace SharpCompress.Archives.GZip
public static bool IsGZipFile(Stream stream)
{
// read the header on the first read
byte[] header = new byte[10];
Span<byte> header = stackalloc byte[10];
// workitem 8501: handle edge case (decompress empty stream)
if (!stream.ReadFully(header))
@@ -115,16 +140,6 @@ namespace SharpCompress.Archives.GZip
return true;
}
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
internal GZipArchive(Stream stream, ReaderOptions options)
: base(ArchiveType.GZip, stream, options)
{
}
internal GZipArchive()
: base(ArchiveType.GZip)
{
@@ -161,11 +176,6 @@ namespace SharpCompress.Archives.GZip
}
}
protected override IEnumerable<GZipVolume> LoadVolumes(IEnumerable<Stream> streams)
{
return new GZipVolume(streams.First(), ReaderOptions).AsEnumerable();
}
protected override IEnumerable<GZipArchiveEntry> LoadEntries(IEnumerable<GZipVolume> volumes)
{
Stream stream = volumes.Single().Stream;

View File

@@ -15,7 +15,7 @@ namespace SharpCompress.Archives.GZip
public virtual Stream OpenEntryStream()
{
//this is to reset the stream to be read multiple times
var part = Parts.Single() as GZipFilePart;
var part = (GZipFilePart)Parts.Single();
if (part.GetRawStream().Position != part.EntryStartPosition)
{
part.GetRawStream().Position = part.EntryStartPosition;

View File

@@ -1,4 +1,6 @@
using System;
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
@@ -6,7 +8,7 @@ using SharpCompress.IO;
namespace SharpCompress.Archives.GZip
{
internal class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArchiveEntry
internal sealed class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArchiveEntry
{
private readonly bool closeStream;
private readonly Stream stream;
@@ -52,7 +54,7 @@ namespace SharpCompress.Archives.GZip
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new NonDisposingStream(stream);
return NonDisposingStream.Create(stream);
}
internal override void Close()
@@ -63,4 +65,4 @@ namespace SharpCompress.Archives.GZip
}
}
}
}
}

View File

@@ -8,22 +8,17 @@ namespace SharpCompress.Archives
{
public static void WriteTo(this IArchiveEntry archiveEntry, Stream streamToWriteTo)
{
if (archiveEntry.Archive.Type == ArchiveType.Rar && archiveEntry.Archive.IsSolid)
{
throw new InvalidFormatException("Cannot use Archive random access on SOLID Rar files.");
}
if (archiveEntry.IsDirectory)
{
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
}
var streamListener = archiveEntry.Archive as IArchiveExtractionListener;
var streamListener = (IArchiveExtractionListener)archiveEntry.Archive;
streamListener.EnsureEntriesLoaded();
streamListener.FireEntryExtractionBegin(archiveEntry);
streamListener.FireFilePartExtractionBegin(archiveEntry.Key, archiveEntry.Size, archiveEntry.CompressedSize);
var entryStream = archiveEntry.OpenEntryStream();
if (entryStream == null)
if (entryStream is null)
{
return;
}
@@ -36,12 +31,12 @@ namespace SharpCompress.Archives
}
streamListener.FireEntryExtractionEnd(archiveEntry);
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(this IArchiveEntry entry, string destinationDirectory,
ExtractionOptions options = null)
ExtractionOptions? options = null)
{
ExtractionMethods.WriteEntryToDirectory(entry, destinationDirectory, options,
entry.WriteToFile);
@@ -50,10 +45,11 @@ namespace SharpCompress.Archives
/// <summary>
/// Extract to specific file
/// </summary>
public static void WriteToFile(this IArchiveEntry entry, string destinationFileName,
ExtractionOptions options = null)
public static void WriteToFile(this IArchiveEntry entry,
string destinationFileName,
ExtractionOptions? options = null)
{
ExtractionMethods.WriteEntryToFile(entry, destinationFileName, options,
(x, fm) =>
{

View File

@@ -9,7 +9,7 @@ namespace SharpCompress.Archives
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(this IArchive archive, string destinationDirectory,
ExtractionOptions options = null)
ExtractionOptions? options = null)
{
foreach (IArchiveEntry entry in archive.Entries.Where(x => !x.IsDirectory))
{

View File

@@ -11,5 +11,11 @@ namespace SharpCompress.Archives
IArchiveEntry AddEntry(string key, Stream source, bool closeStream, long size = 0, DateTime? modified = null);
void SaveTo(Stream stream, WriterOptions options);
/// <summary>
/// Use this to pause entry rebuilding when adding large collections of entries. Dispose when complete. A using statement is recommended.
/// </summary>
/// <returns>IDisposeable to resume entry rebuilding</returns>
IDisposable PauseEntryRebuilding();
}
}

View File

@@ -35,11 +35,14 @@ namespace SharpCompress.Archives
this IWritableArchive writableArchive,
string filePath, string searchPattern = "*.*", SearchOption searchOption = SearchOption.AllDirectories)
{
foreach (var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption))
using (writableArchive.PauseEntryRebuilding())
{
var fileInfo = new FileInfo(path);
writableArchive.AddEntry(path.Substring(filePath.Length), fileInfo.OpenRead(), true, fileInfo.Length,
fileInfo.LastWriteTime);
foreach (var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption))
{
var fileInfo = new FileInfo(path);
writableArchive.AddEntry(path.Substring(filePath.Length), fileInfo.OpenRead(), true, fileInfo.Length,
fileInfo.LastWriteTime);
}
}
}
public static IArchiveEntry AddEntry(this IWritableArchive writableArchive, string key, FileInfo fileInfo)

View File

@@ -1,4 +1,4 @@
using System.Collections.Generic;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common.Rar;
@@ -13,8 +13,8 @@ namespace SharpCompress.Archives.Rar
/// </summary>
internal class FileInfoRarArchiveVolume : RarVolume
{
internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options)
: base(StreamingMode.Seekable, fileInfo.OpenRead(), FixOptions(options))
internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options, int index = 0)
: base(StreamingMode.Seekable, fileInfo.OpenRead(), FixOptions(options), index)
{
FileInfo = fileInfo;
FileParts = GetVolumeFileParts().ToArray().ToReadOnly();

View File

@@ -1,12 +1,12 @@
using System.IO;
using System.IO;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Archives.Rar
{
internal class FileInfoRarFilePart : SeekableFilePart
internal sealed class FileInfoRarFilePart : SeekableFilePart
{
internal FileInfoRarFilePart(FileInfoRarArchiveVolume volume, string password, MarkHeader mh, FileHeader fh, FileInfo fi)
: base(mh, fh, volume.Stream, password)
internal FileInfoRarFilePart(FileInfoRarArchiveVolume volume, string? password, MarkHeader mh, FileHeader fh, FileInfo fi)
: base(mh, fh, volume.Index, volume.Stream, password)
{
FileInfo = fi;
}

View File

@@ -5,49 +5,49 @@ using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Compressors.Rar;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.Rar;
namespace SharpCompress.Archives.Rar
{
public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
public class
RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
{
internal Lazy<IRarUnpack> UnpackV2017 { get; } = new Lazy<IRarUnpack>(() => new SharpCompress.Compressors.Rar.UnpackV2017.Unpack());
internal Lazy<IRarUnpack> UnpackV1 { get; } = new Lazy<IRarUnpack>(() => new SharpCompress.Compressors.Rar.UnpackV1.Unpack());
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
internal RarArchive(FileInfo fileInfo, ReaderOptions options)
: base(ArchiveType.Rar, fileInfo, options)
{
}
protected override IEnumerable<RarVolume> LoadVolumes(FileInfo file)
{
return RarArchiveVolumeFactory.GetParts(file, ReaderOptions);
}
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="streams"></param>
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal RarArchive(IEnumerable<Stream> streams, ReaderOptions options)
: base(ArchiveType.Rar, streams, options)
internal RarArchive(SourceStream srcStream)
: base(ArchiveType.Rar, srcStream)
{
}
protected override IEnumerable<RarArchiveEntry> LoadEntries(IEnumerable<RarVolume> volumes)
{
return RarArchiveEntryFactory.GetEntries(this, volumes);
return RarArchiveEntryFactory.GetEntries(this, volumes, ReaderOptions);
}
protected override IEnumerable<RarVolume> LoadVolumes(IEnumerable<Stream> streams)
protected override IEnumerable<RarVolume> LoadVolumes(SourceStream srcStream)
{
return RarArchiveVolumeFactory.GetParts(streams, ReaderOptions);
base.SrcStream.LoadAllParts(); //request all streams
Stream[] streams = base.SrcStream.Streams.ToArray();
int idx = 0;
if (streams.Length > 1 && IsRarFile(streams[1], ReaderOptions)) //test part 2 - true = multipart not split
{
base.SrcStream.IsVolumes = true;
streams[1].Position = 0;
base.SrcStream.Position = 0;
return srcStream.Streams.Select(a => new StreamRarArchiveVolume(a, ReaderOptions, idx++));
}
else //split mode or single file
return new StreamRarArchiveVolume(base.SrcStream, ReaderOptions, idx++).AsEnumerable();
}
protected override IReader CreateReaderForSolidExtraction()
@@ -59,16 +59,20 @@ namespace SharpCompress.Archives.Rar
public override bool IsSolid => Volumes.First().IsSolidArchive;
public virtual int MinVersion => Volumes.First().MinVersion;
public virtual int MaxVersion => Volumes.First().MaxVersion;
#region Creation
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
public static RarArchive Open(string filePath, ReaderOptions options = null)
public static RarArchive Open(string filePath, ReaderOptions? options = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
return new RarArchive(new FileInfo(filePath), options ?? new ReaderOptions());
FileInfo fileInfo = new FileInfo(filePath);
return new RarArchive(new SourceStream(fileInfo, i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo), options ?? new ReaderOptions()));
}
/// <summary>
@@ -76,10 +80,10 @@ namespace SharpCompress.Archives.Rar
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
public static RarArchive Open(FileInfo fileInfo, ReaderOptions options = null)
public static RarArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new RarArchive(fileInfo, options ?? new ReaderOptions());
return new RarArchive(new SourceStream(fileInfo, i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo), options ?? new ReaderOptions()));
}
/// <summary>
@@ -87,23 +91,37 @@ namespace SharpCompress.Archives.Rar
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
public static RarArchive Open(Stream stream, ReaderOptions options = null)
public static RarArchive Open(Stream stream, ReaderOptions? options = null)
{
stream.CheckNotNull(nameof(stream));
return Open(stream.AsEnumerable(), options ?? new ReaderOptions());
return new RarArchive(new SourceStream(stream, i => null, options ?? new ReaderOptions()));
}
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static RarArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? readerOptions = null)
{
fileInfos.CheckNotNull(nameof(fileInfos));
FileInfo[] files = fileInfos.ToArray();
return new RarArchive(new SourceStream(files[0], i => i < files.Length ? files[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
public static RarArchive Open(IEnumerable<Stream> streams, ReaderOptions options = null)
/// <param name="readerOptions"></param>
public static RarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.CheckNotNull(nameof(streams));
return new RarArchive(streams, options ?? new ReaderOptions());
Stream[] strms = streams.ToArray();
return new RarArchive(new SourceStream(strms[0], i => i < strms.Length ? strms[i] : null, readerOptions ?? new ReaderOptions()));
}
public static bool IsRarFile(string filePath)
{
return IsRarFile(new FileInfo(filePath));
@@ -120,8 +138,8 @@ namespace SharpCompress.Archives.Rar
return IsRarFile(stream);
}
}
public static bool IsRarFile(Stream stream, ReaderOptions options = null)
public static bool IsRarFile(Stream stream, ReaderOptions? options = null)
{
try
{

View File

@@ -6,6 +6,7 @@ using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Compressors.Rar;
using SharpCompress.Readers;
namespace SharpCompress.Archives.Rar
{
@@ -13,11 +14,14 @@ namespace SharpCompress.Archives.Rar
{
private readonly ICollection<RarFilePart> parts;
private readonly RarArchive archive;
private readonly ReaderOptions readerOptions;
internal RarArchiveEntry(RarArchive archive, IEnumerable<RarFilePart> parts)
internal RarArchiveEntry(RarArchive archive, IEnumerable<RarFilePart> parts, ReaderOptions readerOptions)
{
this.parts = parts.ToList();
this.archive = archive;
this.readerOptions = readerOptions;
this.IsSolid = this.FileHeader.IsSolid;
}
public override CompressionType CompressionType => CompressionType.Rar;
@@ -57,33 +61,29 @@ namespace SharpCompress.Archives.Rar
public Stream OpenEntryStream()
{
if (archive.IsSolid)
{
throw new InvalidOperationException("Use ExtractAllEntries to extract SOLID archives.");
}
if (IsRarV3)
{
return new RarStream(archive.UnpackV1.Value, FileHeader, new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive));
}
return new RarStream(archive.UnpackV2017.Value, FileHeader, new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive));
}
public bool IsComplete
{
get
{
return parts.Select(fp => fp.FileHeader).Any(fh => !fh.IsSplitAfter);
}
public bool IsComplete
{
get
{
var headers = parts.Select(x => x.FileHeader);
return !headers.First().IsSplitBefore && !headers.Last().IsSplitAfter;
}
}
private void CheckIncomplete()
{
if (!IsComplete)
if (!readerOptions.DisableCheckIncomplete && !IsComplete)
{
throw new IncompleteArchiveException("ArchiveEntry is incomplete and cannot perform this operation.");
}
}
}
}
}

View File

@@ -1,5 +1,6 @@
using System.Collections.Generic;
using SharpCompress.Common.Rar;
using SharpCompress.Readers;
namespace SharpCompress.Archives.Rar
{
@@ -36,11 +37,12 @@ namespace SharpCompress.Archives.Rar
}
internal static IEnumerable<RarArchiveEntry> GetEntries(RarArchive archive,
IEnumerable<RarVolume> rarParts)
IEnumerable<RarVolume> rarParts,
ReaderOptions readerOptions)
{
foreach (var groupedParts in GetMatchedFileParts(rarParts))
{
yield return new RarArchiveEntry(archive, groupedParts);
yield return new RarArchiveEntry(archive, groupedParts, readerOptions);
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Rar;
@@ -6,137 +6,35 @@ using SharpCompress.Readers;
using System.Linq;
using System.Text;
using SharpCompress.Common.Rar.Headers;
using System.Text.RegularExpressions;
namespace SharpCompress.Archives.Rar
{
internal static class RarArchiveVolumeFactory
{
internal static IEnumerable<RarVolume> GetParts(IEnumerable<Stream> streams, ReaderOptions options)
internal static FileInfo? GetFilePart(int index, FileInfo part1) //base the name on the first part
{
foreach (Stream s in streams)
{
if (!s.CanRead || !s.CanSeek)
{
throw new ArgumentException("Stream is not readable and seekable");
}
StreamRarArchiveVolume part = new StreamRarArchiveVolume(s, options);
yield return part;
}
}
internal static IEnumerable<RarVolume> GetParts(FileInfo fileInfo, ReaderOptions options)
{
FileInfoRarArchiveVolume part = new FileInfoRarArchiveVolume(fileInfo, options);
yield return part;
FileInfo? item = null;
ArchiveHeader ah = part.ArchiveHeader;
if (!ah.IsVolume)
{
yield break; //if file isn't volume then there is no reason to look
}
fileInfo = GetNextFileInfo(ah, part.FileParts.FirstOrDefault() as FileInfoRarFilePart);
//we use fileinfo because rar is dumb and looks at file names rather than archive info for another volume
while (fileInfo != null && fileInfo.Exists)
{
part = new FileInfoRarArchiveVolume(fileInfo, options);
fileInfo = GetNextFileInfo(ah, part.FileParts.FirstOrDefault() as FileInfoRarFilePart);
yield return part;
}
}
private static FileInfo GetNextFileInfo(ArchiveHeader ah, FileInfoRarFilePart currentFilePart)
{
if (currentFilePart == null)
{
return null;
}
bool oldNumbering = ah.OldNumberingFormat
|| currentFilePart.MarkHeader.OldNumberingFormat;
if (oldNumbering)
{
return FindNextFileWithOldNumbering(currentFilePart.FileInfo);
}
//new style rar - ..part1 | /part01 | part001 ....
Match m = Regex.Match(part1.Name, @"^(.*\.part)([0-9]+)(\.rar)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(Path.Combine(part1.DirectoryName!, String.Concat(m.Groups[1].Value, (index + 1).ToString().PadLeft(m.Groups[2].Value.Length, '0'), m.Groups[3].Value)));
else
{
return FindNextFileWithNewNumbering(currentFilePart.FileInfo);
//old style - ...rar, .r00, .r01 ...
m = Regex.Match(part1.Name, @"^(.*\.r)(ar|[0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(Path.Combine(part1.DirectoryName!, String.Concat(m.Groups[1].Value, index == 0 ? "ar" : (index - 1).ToString().PadLeft(m.Groups[2].Value.Length, '0'))));
else //split .001, .002 ....
return ArchiveVolumeFactory.GetFilePart(index, part1);
}
if (item != null && item.Exists)
return item;
return null; //no more items
}
private static FileInfo FindNextFileWithOldNumbering(FileInfo currentFileInfo)
{
// .rar, .r00, .r01, ...
string extension = currentFileInfo.Extension;
StringBuilder buffer = new StringBuilder(currentFileInfo.FullName.Length);
buffer.Append(currentFileInfo.FullName.Substring(0,
currentFileInfo.FullName.Length - extension.Length));
if (string.Compare(extension, ".rar", StringComparison.OrdinalIgnoreCase) == 0)
{
buffer.Append(".r00");
}
else
{
int num = 0;
if (int.TryParse(extension.Substring(2, 2), out num))
{
num++;
buffer.Append(".r");
if (num < 10)
{
buffer.Append('0');
}
buffer.Append(num);
}
else
{
ThrowInvalidFileName(currentFileInfo);
}
}
return new FileInfo(buffer.ToString());
}
private static FileInfo FindNextFileWithNewNumbering(FileInfo currentFileInfo)
{
// part1.rar, part2.rar, ...
string extension = currentFileInfo.Extension;
if (string.Compare(extension, ".rar", StringComparison.OrdinalIgnoreCase) != 0)
{
throw new ArgumentException("Invalid extension, expected 'rar': " + currentFileInfo.FullName);
}
int startIndex = currentFileInfo.FullName.LastIndexOf(".part");
if (startIndex < 0)
{
ThrowInvalidFileName(currentFileInfo);
}
StringBuilder buffer = new StringBuilder(currentFileInfo.FullName.Length);
buffer.Append(currentFileInfo.FullName, 0, startIndex);
int num = 0;
string numString = currentFileInfo.FullName.Substring(startIndex + 5,
currentFileInfo.FullName.IndexOf('.', startIndex + 5) -
startIndex - 5);
buffer.Append(".part");
if (int.TryParse(numString, out num))
{
num++;
for (int i = 0; i < numString.Length - num.ToString().Length; i++)
{
buffer.Append('0');
}
buffer.Append(num);
}
else
{
ThrowInvalidFileName(currentFileInfo);
}
buffer.Append(".rar");
return new FileInfo(buffer.ToString());
}
private static void ThrowInvalidFileName(FileInfo fileInfo)
{
throw new ArgumentException("Filename invalid or next archive could not be found:"
+ fileInfo.FullName);
}
}
}
}

View File

@@ -1,4 +1,4 @@
using System.IO;
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
@@ -7,10 +7,10 @@ namespace SharpCompress.Archives.Rar
internal class SeekableFilePart : RarFilePart
{
private readonly Stream stream;
private readonly string password;
private readonly string? password;
internal SeekableFilePart(MarkHeader mh, FileHeader fh, Stream stream, string password)
: base(mh, fh)
internal SeekableFilePart(MarkHeader mh, FileHeader fh, int index, Stream stream, string? password)
: base(mh, fh, index)
{
this.stream = stream;
this.password = password;
@@ -21,11 +21,11 @@ namespace SharpCompress.Archives.Rar
stream.Position = FileHeader.DataStartPosition;
if (FileHeader.R4Salt != null)
{
return new RarCryptoWrapper(stream, password, FileHeader.R4Salt);
return new RarCryptoWrapper(stream, password!, FileHeader.R4Salt);
}
return stream;
}
internal override string FilePartName => "Unknown Stream - File Entry: " + FileHeader.FileName;
}
}
}

View File

@@ -1,4 +1,4 @@
using System.Collections.Generic;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
@@ -9,8 +9,8 @@ namespace SharpCompress.Archives.Rar
{
internal class StreamRarArchiveVolume : RarVolume
{
internal StreamRarArchiveVolume(Stream stream, ReaderOptions options)
: base(StreamingMode.Seekable, stream, options)
internal StreamRarArchiveVolume(Stream stream, ReaderOptions options, int index = 0)
: base(StreamingMode.Seekable, stream, options, index)
{
}
@@ -21,7 +21,7 @@ namespace SharpCompress.Archives.Rar
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader)
{
return new SeekableFilePart(markHeader, fileHeader, Stream, ReaderOptions.Password);
return new SeekableFilePart(markHeader, fileHeader, this.Index, Stream, ReaderOptions.Password);
}
}
}
}

View File

@@ -1,4 +1,6 @@
using System;
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -32,8 +34,33 @@ namespace SharpCompress.Archives.SevenZip
public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
{
fileInfo.CheckNotNull("fileInfo");
return new SevenZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
return new SevenZipArchive(new SourceStream(fileInfo, i => ArchiveVolumeFactory.GetFilePart(i, fileInfo), readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions readerOptions = null)
{
fileInfos.CheckNotNull(nameof(fileInfos));
FileInfo[] files = fileInfos.ToArray();
return new SevenZipArchive(new SourceStream(files[0], i => i < files.Length ? files[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(IEnumerable<Stream> streams, ReaderOptions readerOptions = null)
{
streams.CheckNotNull(nameof(streams));
Stream[] strms = streams.ToArray();
return new SevenZipArchive(new SourceStream(strms[0], i => i < strms.Length ? strms[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
@@ -42,17 +69,24 @@ namespace SharpCompress.Archives.SevenZip
public static SevenZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
{
stream.CheckNotNull("stream");
return new SevenZipArchive(stream, readerOptions ?? new ReaderOptions());
return new SevenZipArchive(new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions()));
}
internal SevenZipArchive(FileInfo fileInfo, ReaderOptions readerOptions)
: base(ArchiveType.SevenZip, fileInfo, readerOptions)
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal SevenZipArchive(SourceStream srcStream)
: base(ArchiveType.SevenZip, srcStream)
{
}
protected override IEnumerable<SevenZipVolume> LoadVolumes(FileInfo file)
protected override IEnumerable<SevenZipVolume> LoadVolumes(SourceStream srcStream)
{
return new SevenZipVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
base.SrcStream.LoadAllParts(); //request all streams
int idx = 0;
return new SevenZipVolume(srcStream, ReaderOptions, idx++).AsEnumerable(); //simple single volume or split, multivolume not supported
}
public static bool IsSevenZipFile(string filePath)
@@ -72,43 +106,37 @@ namespace SharpCompress.Archives.SevenZip
}
}
internal SevenZipArchive(Stream stream, ReaderOptions readerOptions)
: base(ArchiveType.SevenZip, stream.AsEnumerable(), readerOptions)
{
}
internal SevenZipArchive()
: base(ArchiveType.SevenZip)
{
}
protected override IEnumerable<SevenZipVolume> LoadVolumes(IEnumerable<Stream> streams)
{
foreach (Stream s in streams)
{
if (!s.CanRead || !s.CanSeek)
{
throw new ArgumentException("Stream is not readable and seekable");
}
SevenZipVolume volume = new SevenZipVolume(s, ReaderOptions);
yield return volume;
}
}
protected override IEnumerable<SevenZipArchiveEntry> LoadEntries(IEnumerable<SevenZipVolume> volumes)
{
var stream = volumes.Single().Stream;
LoadFactory(stream);
var entries = new SevenZipArchiveEntry[database._files.Count];
for (int i = 0; i < database._files.Count; i++)
{
var file = database._files[i];
yield return new SevenZipArchiveEntry(this, new SevenZipFilePart(stream, database, i, file, ReaderOptions.ArchiveEncoding));
entries[i] = new SevenZipArchiveEntry(this, new SevenZipFilePart(stream, database, i, file, ReaderOptions.ArchiveEncoding));
}
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
{
var isSolid = false;
foreach (var entry in group)
{
entry.IsSolid = isSolid;
isSolid = true; //mark others in this group as solid - same as rar behaviour.
}
}
return entries;
}
private void LoadFactory(Stream stream)
{
if (database == null)
if (database is null)
{
stream.Position = 0;
var reader = new ArchiveReader();
@@ -129,7 +157,7 @@ namespace SharpCompress.Archives.SevenZip
}
}
private static ReadOnlySpan<byte> SIGNATURE => new byte[] {(byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C};
private static ReadOnlySpan<byte> SIGNATURE => new byte[] { (byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C };
private static bool SignatureMatch(Stream stream)
{
@@ -154,7 +182,7 @@ namespace SharpCompress.Archives.SevenZip
}
}
private class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
private sealed class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
{
private readonly SevenZipArchive archive;
private CFolder currentFolder;
@@ -180,7 +208,7 @@ namespace SharpCompress.Archives.SevenZip
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
{
currentFolder = group.Key;
if (group.Key == null)
if (group.Key is null)
{
currentStream = Stream.Null;
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -20,7 +20,7 @@ namespace SharpCompress.Archives.Tar
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(string filePath, ReaderOptions readerOptions = null)
public static TarArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
@@ -31,10 +31,34 @@ namespace SharpCompress.Archives.Tar
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
public static TarArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new TarArchive(fileInfo, readerOptions ?? new ReaderOptions());
return new TarArchive(new SourceStream(fileInfo, i => ArchiveVolumeFactory.GetFilePart(i, fileInfo), readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? readerOptions = null)
{
fileInfos.CheckNotNull(nameof(fileInfos));
FileInfo[] files = fileInfos.ToArray();
return new TarArchive(new SourceStream(files[0], i => i < files.Length ? files[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.CheckNotNull(nameof(streams));
Stream[] strms = streams.ToArray();
return new TarArchive(new SourceStream(strms[0], i => i < strms.Length ? strms[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
@@ -42,10 +66,10 @@ namespace SharpCompress.Archives.Tar
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(Stream stream, ReaderOptions readerOptions = null)
public static TarArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
return new TarArchive(stream, readerOptions ?? new ReaderOptions());
return new TarArchive(new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions()));
}
public static bool IsTarFile(string filePath)
@@ -79,29 +103,21 @@ namespace SharpCompress.Archives.Tar
}
return false;
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
internal TarArchive(FileInfo fileInfo, ReaderOptions readerOptions)
: base(ArchiveType.Tar, fileInfo, readerOptions)
{
}
protected override IEnumerable<TarVolume> LoadVolumes(FileInfo file)
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream srcStream)
{
return new TarVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
base.SrcStream.LoadAllParts(); //request all streams
int idx = 0;
return new TarVolume(srcStream, ReaderOptions, idx++).AsEnumerable(); //simple single volume or split, multivolume not supported
}
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
internal TarArchive(Stream stream, ReaderOptions readerOptions)
: base(ArchiveType.Tar, stream, readerOptions)
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal TarArchive(SourceStream srcStream)
: base(ArchiveType.Tar, srcStream)
{
}
@@ -110,16 +126,11 @@ namespace SharpCompress.Archives.Tar
{
}
protected override IEnumerable<TarVolume> LoadVolumes(IEnumerable<Stream> streams)
{
return new TarVolume(streams.First(), ReaderOptions).AsEnumerable();
}
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
{
Stream stream = volumes.Single().Stream;
TarHeader previousHeader = null;
foreach (TarHeader header in TarHeaderFactory.ReadHeader(StreamingMode.Seekable, stream, ReaderOptions.ArchiveEncoding))
TarHeader? previousHeader = null;
foreach (TarHeader? header in TarHeaderFactory.ReadHeader(StreamingMode.Seekable, stream, ReaderOptions.ArchiveEncoding))
{
if (header != null)
{

View File

@@ -1,4 +1,6 @@
using System;
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
@@ -6,7 +8,7 @@ using SharpCompress.IO;
namespace SharpCompress.Archives.Tar
{
internal class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiveEntry
internal sealed class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiveEntry
{
private readonly bool closeStream;
private readonly Stream stream;
@@ -51,7 +53,7 @@ namespace SharpCompress.Archives.Tar
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new NonDisposingStream(stream);
return NonDisposingStream.Create(stream);
}
internal override void Close()
@@ -62,4 +64,4 @@ namespace SharpCompress.Archives.Tar
}
}
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -6,6 +6,7 @@ using SharpCompress.Common;
using SharpCompress.Common.Zip;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors.Deflate;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.Zip;
using SharpCompress.Writers;
@@ -15,20 +16,33 @@ namespace SharpCompress.Archives.Zip
{
public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
{
#nullable disable
private readonly SeekableZipHeaderFactory headerFactory;
#nullable enable
/// <summary>
/// Gets or sets the compression level applied to files added to the archive,
/// if the compression method is set to deflate
/// </summary>
public CompressionLevel DeflateCompressionLevel { get; set; }
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal ZipArchive(SourceStream srcStream)
: base(ArchiveType.Zip, srcStream)
{
headerFactory = new SeekableZipHeaderFactory(srcStream.ReaderOptions.Password, srcStream.ReaderOptions.ArchiveEncoding);
}
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(string filePath, ReaderOptions readerOptions = null)
public static ZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
@@ -39,10 +53,34 @@ namespace SharpCompress.Archives.Zip
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new ZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
return new ZipArchive(new SourceStream(fileInfo, i => ZipArchiveVolumeFactory.GetFilePart(i, fileInfo), readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? readerOptions = null)
{
fileInfos.CheckNotNull(nameof(fileInfos));
FileInfo[] files = fileInfos.ToArray();
return new ZipArchive(new SourceStream(files[0], i => i < files.Length ? files[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.CheckNotNull(nameof(streams));
Stream[] strms = streams.ToArray();
return new ZipArchive(new SourceStream(strms[0], i => i < strms.Length ? strms[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
@@ -50,18 +88,18 @@ namespace SharpCompress.Archives.Zip
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
public static ZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
return new ZipArchive(stream, readerOptions ?? new ReaderOptions());
return new ZipArchive(new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions()));
}
public static bool IsZipFile(string filePath, string password = null)
public static bool IsZipFile(string filePath, string? password = null)
{
return IsZipFile(new FileInfo(filePath), password);
}
public static bool IsZipFile(FileInfo fileInfo, string password = null)
public static bool IsZipFile(FileInfo fileInfo, string? password = null)
{
if (!fileInfo.Exists)
{
@@ -73,14 +111,13 @@ namespace SharpCompress.Archives.Zip
}
}
public static bool IsZipFile(Stream stream, string password = null)
public static bool IsZipFile(Stream stream, string? password = null)
{
StreamingZipHeaderFactory headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding());
try
{
ZipHeader header =
headerFactory.ReadStreamHeader(stream).FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header == null)
ZipHeader? header = headerFactory.ReadStreamHeader(stream).FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
return false;
}
@@ -95,21 +132,63 @@ namespace SharpCompress.Archives.Zip
return false;
}
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
internal ZipArchive(FileInfo fileInfo, ReaderOptions readerOptions)
: base(ArchiveType.Zip, fileInfo, readerOptions)
public static bool IsZipMulti(Stream stream, string? password = null)
{
headerFactory = new SeekableZipHeaderFactory(readerOptions.Password, readerOptions.ArchiveEncoding);
StreamingZipHeaderFactory headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding());
try
{
ZipHeader? header = headerFactory.ReadStreamHeader(stream).FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
if (stream.CanSeek) //could be multipart. Test for central directory - might not be z64 safe
{
SeekableZipHeaderFactory z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
var x = z.ReadSeekableHeader(stream).FirstOrDefault();
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
}
else
return false;
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
protected override IEnumerable<ZipVolume> LoadVolumes(FileInfo file)
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream srcStream)
{
return new ZipVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
base.SrcStream.LoadAllParts(); //request all streams
base.SrcStream.Position = 0;
List<Stream> streams = base.SrcStream.Streams.ToList();
int idx = 0;
if (streams.Count > 1) //test part 2 - true = multipart not split
{
streams[1].Position += 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
bool isZip = IsZipFile(streams[1], ReaderOptions.Password);
streams[1].Position -= 4;
if (isZip)
{
base.SrcStream.IsVolumes = true;
var tmp = streams[0]; //arcs as zip, z01 ... swap the zip the end
streams.RemoveAt(0);
streams.Add(tmp);
//streams[0].Position = 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
return streams.Select(a => new ZipVolume(a, ReaderOptions, idx++));
}
}
//split mode or single file
return new ZipVolume(base.SrcStream, ReaderOptions, idx++).AsEnumerable();
}
internal ZipArchive()
@@ -117,46 +196,35 @@ namespace SharpCompress.Archives.Zip
{
}
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
internal ZipArchive(Stream stream, ReaderOptions readerOptions)
: base(ArchiveType.Zip, stream, readerOptions)
{
headerFactory = new SeekableZipHeaderFactory(readerOptions.Password, readerOptions.ArchiveEncoding);
}
protected override IEnumerable<ZipVolume> LoadVolumes(IEnumerable<Stream> streams)
{
return new ZipVolume(streams.First(), ReaderOptions).AsEnumerable();
}
protected override IEnumerable<ZipArchiveEntry> LoadEntries(IEnumerable<ZipVolume> volumes)
{
var volume = volumes.Single();
Stream stream = volume.Stream;
foreach (ZipHeader h in headerFactory.ReadSeekableHeader(stream))
var vols = volumes.ToArray();
foreach (ZipHeader h in headerFactory.ReadSeekableHeader(vols.Last().Stream))
{
if (h != null)
{
switch (h.ZipHeaderType)
{
case ZipHeaderType.DirectoryEntry:
{
DirectoryEntryHeader deh = (DirectoryEntryHeader)h;
Stream s;
if (deh.RelativeOffsetOfEntryHeader + deh.CompressedSize > vols[deh.DiskNumberStart].Stream.Length)
{
yield return new ZipArchiveEntry(this,
new SeekableZipFilePart(headerFactory,
h as DirectoryEntryHeader,
stream));
var v = vols.Skip(deh.DiskNumberStart).ToArray();
s = new SourceStream(v[0].Stream, i => i < v.Length ? v[i].Stream : null, new ReaderOptions() { LeaveStreamOpen = true });
}
break;
else
s = vols[deh.DiskNumberStart].Stream;
yield return new ZipArchiveEntry(this, new SeekableZipFilePart(headerFactory, deh, s));
}
break;
case ZipHeaderType.DirectoryEnd:
{
byte[] bytes = (h as DirectoryEndHeader).Comment;
volume.Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
yield break;
}
{
byte[] bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
volumes.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
yield break;
}
}
}
}

View File

@@ -6,7 +6,7 @@ namespace SharpCompress.Archives.Zip
{
public class ZipArchiveEntry : ZipEntry, IArchiveEntry
{
internal ZipArchiveEntry(ZipArchive archive, SeekableZipFilePart part)
internal ZipArchiveEntry(ZipArchive archive, SeekableZipFilePart? part)
: base(part)
{
Archive = archive;
@@ -25,6 +25,6 @@ namespace SharpCompress.Archives.Zip
#endregion
public string Comment => (Parts.Single() as SeekableZipFilePart).Comment;
public string? Comment => ((SeekableZipFilePart)Parts.Single()).Comment;
}
}

View File

@@ -0,0 +1,34 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Readers;
using System.Linq;
using System.Text;
using SharpCompress.Common.Rar.Headers;
using System.Text.RegularExpressions;
namespace SharpCompress.Archives.Zip
{
internal static class ZipArchiveVolumeFactory
{
internal static FileInfo? GetFilePart(int index, FileInfo part1) //base the name on the first part
{
FileInfo? item = null;
//load files with zip/zipx first. Swapped to end once loaded in ZipArchive
//new style .zip, z01.. | .zipx, zx01 - if the numbers go beyond 99 then they use 100 ...1000 etc
Match m = Regex.Match(part1.Name, @"^(.*\.)(zipx?|zx?[0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(Path.Combine(part1.DirectoryName!, String.Concat(m.Groups[1].Value, Regex.Replace(m.Groups[2].Value, @"[^xz]", ""), index.ToString().PadLeft(2, '0'))));
else //split - 001, 002 ...
return ArchiveVolumeFactory.GetFilePart(index, part1);
if (item != null && item.Exists)
return item;
return null; //no more items
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
@@ -53,7 +53,7 @@ namespace SharpCompress.Archives.Zip
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new NonDisposingStream(stream);
return NonDisposingStream.Create(stream);
}
internal override void Close()
@@ -65,4 +65,4 @@ namespace SharpCompress.Archives.Zip
}
}
}
}
}

View File

@@ -18,21 +18,25 @@ namespace SharpCompress.Common
/// <summary>
/// Set this encoding when you want to force it for all encoding operations.
/// </summary>
public Encoding Forced { get; set; }
public Encoding? Forced { get; set; }
/// <summary>
/// Set this when you want to use a custom method for all decoding operations.
/// </summary>
/// <returns>string Func(bytes, index, length)</returns>
public Func<byte[], int, int, string> CustomDecoder { get; set; }
public Func<byte[], int, int, string>? CustomDecoder { get; set; }
public ArchiveEncoding()
: this(Encoding.Default, Encoding.Default)
{
Default = Encoding.GetEncoding(437);
Password = Encoding.GetEncoding(437);
}
public ArchiveEncoding(Encoding def, Encoding password)
{
Default = def;
Password = password;
}
#if NETSTANDARD1_3 || NETSTANDARD2_0 || NETSTANDARD2_1
#if !NETFRAMEWORK
static ArchiveEncoding()
{
Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);

View File

@@ -2,16 +2,22 @@
namespace SharpCompress.Common
{
public class CompressedBytesReadEventArgs : EventArgs
public sealed class CompressedBytesReadEventArgs : EventArgs
{
public CompressedBytesReadEventArgs(long compressedBytesRead, long currentFilePartCompressedBytesRead)
{
CompressedBytesRead = compressedBytesRead;
CurrentFilePartCompressedBytesRead = currentFilePartCompressedBytesRead;
}
/// <summary>
/// Compressed bytes read for the current entry
/// </summary>
public long CompressedBytesRead { get; internal set; }
public long CompressedBytesRead { get; }
/// <summary>
/// Current file part read for Multipart files (e.g. Rar)
/// </summary>
public long CurrentFilePartCompressedBytesRead { get; internal set; }
public long CurrentFilePartCompressedBytesRead { get; }
}
}

View File

@@ -1,5 +1,6 @@
using System;
using System;
using System.Collections.Generic;
using System.Linq;
namespace SharpCompress.Common
{
@@ -18,7 +19,7 @@ namespace SharpCompress.Common
/// <summary>
/// The target of a symlink entry internal to the Archive. Will be null if not a symlink.
/// </summary>
public abstract string LinkTarget { get; }
public abstract string? LinkTarget { get; }
/// <summary>
/// The compressed file size
@@ -70,14 +71,14 @@ namespace SharpCompress.Common
/// </summary>
public abstract bool IsSplitAfter { get; }
public int VolumeIndexFirst => this.Parts?.FirstOrDefault()?.Index ?? 0;
public int VolumeIndexLast => this.Parts?.LastOrDefault()?.Index ?? 0;
/// <inheritdoc/>
public override string ToString()
{
return Key;
}
public override string ToString() => Key;
internal abstract IEnumerable<FilePart> Parts { get; }
internal bool IsSolid { get; set; }
public bool IsSolid { get; set; }
internal virtual void Close()
{

View File

@@ -47,7 +47,8 @@ namespace SharpCompress.Common
public override bool CanWrite => false;
public override void Flush() {
public override void Flush()
{
}
public override long Length => _stream.Length;

View File

@@ -8,28 +8,39 @@ namespace SharpCompress.Common
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteEntryToDirectory(IEntry entry, string destinationDirectory,
ExtractionOptions options, Action<string, ExtractionOptions> write)
public static void WriteEntryToDirectory(IEntry entry,
string destinationDirectory,
ExtractionOptions? options,
Action<string, ExtractionOptions?> write)
{
string destinationFileName;
string file = Path.GetFileName(entry.Key);
string fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
options = options ?? new ExtractionOptions()
{
Overwrite = true
};
//check for trailing slash.
if (fullDestinationDirectoryPath[fullDestinationDirectoryPath.Length - 1] != Path.DirectorySeparatorChar)
{
fullDestinationDirectoryPath += Path.DirectorySeparatorChar;
}
if (!Directory.Exists(fullDestinationDirectoryPath))
{
throw new ExtractionException($"Directory does not exist to extract to: {fullDestinationDirectoryPath}");
}
options ??= new ExtractionOptions()
{
Overwrite = true
};
string file = Path.GetFileName(entry.Key);
if (options.ExtractFullPath)
{
string folder = Path.GetDirectoryName(entry.Key);
string destdir = Path.GetFullPath(
Path.Combine(fullDestinationDirectoryPath, folder)
);
string folder = Path.GetDirectoryName(entry.Key)!;
string destdir = Path.GetFullPath(Path.Combine(fullDestinationDirectoryPath, folder));
if (!Directory.Exists(destdir))
{
if (!destdir.StartsWith(fullDestinationDirectoryPath))
if (!destdir.StartsWith(fullDestinationDirectoryPath, StringComparison.Ordinal))
{
throw new ExtractionException("Entry is trying to create a directory outside of the destination directory.");
}
@@ -39,7 +50,7 @@ namespace SharpCompress.Common
destinationFileName = Path.Combine(destdir, file);
}
else
{
{
destinationFileName = Path.Combine(fullDestinationDirectoryPath, file);
}
@@ -48,7 +59,7 @@ namespace SharpCompress.Common
{
destinationFileName = Path.GetFullPath(destinationFileName);
if (!destinationFileName.StartsWith(fullDestinationDirectoryPath))
if (!destinationFileName.StartsWith(fullDestinationDirectoryPath, StringComparison.Ordinal))
{
throw new ExtractionException("Entry is trying to write a file outside of the destination directory.");
}
@@ -59,14 +70,14 @@ namespace SharpCompress.Common
Directory.CreateDirectory(destinationFileName);
}
}
public static void WriteEntryToFile(IEntry entry, string destinationFileName,
ExtractionOptions options,
ExtractionOptions? options,
Action<string, FileMode> openAndWrite)
{
if (entry.LinkTarget != null)
{
if (null == options.WriteSymbolicLink)
if (options?.WriteSymbolicLink is null)
{
throw new ExtractionException("Entry is a symbolic link but ExtractionOptions.WriteSymbolicLink delegate is null");
}
@@ -75,10 +86,10 @@ namespace SharpCompress.Common
else
{
FileMode fm = FileMode.Create;
options = options ?? new ExtractionOptions()
{
Overwrite = true
};
options ??= new ExtractionOptions()
{
Overwrite = true
};
if (!options.Overwrite)
{

View File

@@ -7,7 +7,7 @@ namespace SharpCompress.Common
/// <summary>
/// overwrite target if it exists
/// </summary>
public bool Overwrite {get; set; }
public bool Overwrite { get; set; }
/// <summary>
/// extract with internal directory structure

View File

@@ -1,4 +1,4 @@
using System.IO;
using System.IO;
namespace SharpCompress.Common
{
@@ -12,9 +12,10 @@ namespace SharpCompress.Common
internal ArchiveEncoding ArchiveEncoding { get; }
internal abstract string FilePartName { get; }
public int Index { get; set; }
internal abstract Stream GetCompressedStream();
internal abstract Stream GetRawStream();
internal abstract Stream? GetRawStream();
internal bool Skipped { get; set; }
}
}

View File

@@ -2,21 +2,28 @@
namespace SharpCompress.Common
{
public class FilePartExtractionBeginEventArgs : EventArgs
public sealed class FilePartExtractionBeginEventArgs : EventArgs
{
public FilePartExtractionBeginEventArgs(string name, long size, long compressedSize)
{
Name = name;
Size = size;
CompressedSize = compressedSize;
}
/// <summary>
/// File name for the part for the current entry
/// </summary>
public string Name { get; internal set; }
public string Name { get; }
/// <summary>
/// Uncompressed size of the current entry in the part
/// </summary>
public long Size { get; internal set; }
public long Size { get; }
/// <summary>
/// Compressed size of the current entry in the part
/// </summary>
public long CompressedSize { get; internal set; }
public long CompressedSize { get; }
}
}

View File

@@ -15,15 +15,15 @@ namespace SharpCompress.Common.GZip
public override CompressionType CompressionType => CompressionType.GZip;
public override long Crc => 0;
public override long Crc => _filePart.Crc ?? 0;
public override string Key => _filePart.FilePartName;
public override string LinkTarget => null;
public override string? LinkTarget => null;
public override long CompressedSize => 0;
public override long Size => 0;
public override long Size => _filePart.UncompressedSize ?? 0;
public override DateTime? LastModifiedTime => _filePart.DateModified;

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
@@ -8,24 +8,33 @@ using SharpCompress.Compressors.Deflate;
namespace SharpCompress.Common.GZip
{
internal class GZipFilePart : FilePart
internal sealed class GZipFilePart : FilePart
{
private string _name;
private string? _name;
private readonly Stream _stream;
internal GZipFilePart(Stream stream, ArchiveEncoding archiveEncoding)
: base(archiveEncoding)
: base(archiveEncoding)
{
ReadAndValidateGzipHeader(stream);
EntryStartPosition = stream.Position;
_stream = stream;
ReadAndValidateGzipHeader();
if (stream.CanSeek)
{
long position = stream.Position;
stream.Position = stream.Length - 8;
ReadTrailer();
stream.Position = position;
}
EntryStartPosition = stream.Position;
}
internal long EntryStartPosition { get; }
internal DateTime? DateModified { get; private set; }
internal uint? Crc { get; private set; }
internal uint? UncompressedSize { get; private set; }
internal override string FilePartName => _name;
internal override string FilePartName => _name!;
internal override Stream GetCompressedStream()
{
@@ -37,11 +46,21 @@ namespace SharpCompress.Common.GZip
return _stream;
}
private void ReadAndValidateGzipHeader(Stream stream)
private void ReadTrailer()
{
// Read and potentially verify the GZIP trailer: CRC32 and size mod 2^32
Span<byte> trailer = stackalloc byte[8];
int n = _stream.Read(trailer);
Crc = BinaryPrimitives.ReadUInt32LittleEndian(trailer);
UncompressedSize = BinaryPrimitives.ReadUInt32LittleEndian(trailer.Slice(4));
}
private void ReadAndValidateGzipHeader()
{
// read the header on the first read
byte[] header = new byte[10];
int n = stream.Read(header, 0, header.Length);
Span<byte> header = stackalloc byte[10];
int n = _stream.Read(header);
// workitem 8501: handle edge case (decompress empty stream)
if (n == 0)
@@ -59,17 +78,17 @@ namespace SharpCompress.Common.GZip
throw new ZlibException("Bad GZIP header.");
}
int timet = BinaryPrimitives.ReadInt32LittleEndian(header.AsSpan(4));
int timet = BinaryPrimitives.ReadInt32LittleEndian(header.Slice(4));
DateModified = TarHeader.EPOCH.AddSeconds(timet);
if ((header[3] & 0x04) == 0x04)
{
// read and discard extra field
n = stream.Read(header, 0, 2); // 2-byte length field
n = _stream.Read(header.Slice(0, 2)); // 2-byte length field
Int16 extraLength = (Int16)(header[0] + header[1] * 256);
short extraLength = (short)(header[0] + header[1] * 256);
byte[] extra = new byte[extraLength];
if (!stream.ReadFully(extra))
if (!_stream.ReadFully(extra))
{
throw new ZlibException("Unexpected end-of-file reading GZIP header.");
}
@@ -77,27 +96,27 @@ namespace SharpCompress.Common.GZip
}
if ((header[3] & 0x08) == 0x08)
{
_name = ReadZeroTerminatedString(stream);
_name = ReadZeroTerminatedString(_stream);
}
if ((header[3] & 0x10) == 0x010)
{
ReadZeroTerminatedString(stream);
ReadZeroTerminatedString(_stream);
}
if ((header[3] & 0x02) == 0x02)
{
stream.ReadByte(); // CRC16, ignore
_stream.ReadByte(); // CRC16, ignore
}
}
private string ReadZeroTerminatedString(Stream stream)
{
byte[] buf1 = new byte[1];
Span<byte> buf1 = stackalloc byte[1];
var list = new List<byte>();
bool done = false;
do
{
// workitem 7740
int n = stream.Read(buf1, 0, 1);
int n = stream.Read(buf1);
if (n != 1)
{
throw new ZlibException("Unexpected EOF reading GZIP header.");

View File

@@ -1,12 +1,12 @@
using System.IO;
using System.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common.GZip
{
public class GZipVolume : Volume
{
public GZipVolume(Stream stream, ReaderOptions options)
: base(stream, options)
public GZipVolume(Stream stream, ReaderOptions options, int index = 0)
: base(stream, options, index)
{
}
@@ -20,4 +20,4 @@ namespace SharpCompress.Common.GZip
public override bool IsMultiVolume => true;
}
}
}

View File

@@ -1,4 +1,5 @@
using System;
using System;
using System.Collections.Generic;
namespace SharpCompress.Common
{
@@ -10,13 +11,16 @@ namespace SharpCompress.Common
long Crc { get; }
DateTime? CreatedTime { get; }
string Key { get; }
string LinkTarget { get; }
string? LinkTarget { get; }
bool IsDirectory { get; }
bool IsEncrypted { get; }
bool IsSplitAfter { get; }
bool IsSolid { get; }
int VolumeIndexFirst { get; }
int VolumeIndexLast { get; }
DateTime? LastAccessedTime { get; }
DateTime? LastModifiedTime { get; }
long Size { get; }
int? Attrib { get; }
}
}
}

View File

@@ -1,8 +1,11 @@
using System;
using System;
namespace SharpCompress.Common
{
public interface IVolume : IDisposable
{
int Index { get; }
string FileName { get; }
}
}
}

View File

@@ -4,8 +4,8 @@ namespace SharpCompress.Common.Rar.Headers
{
internal class AvHeader : RarHeader
{
public AvHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Av)
public AvHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Av)
{
if (IsRar5)
{

View File

@@ -1,10 +1,12 @@
using SharpCompress.IO;
#nullable disable
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{
internal class ArchiveCryptHeader : RarHeader
{
private const int CRYPT_VERSION = 0; // Supported encryption version.
private const int SIZE_SALT50 = 16;
private const int SIZE_SALT30 = 8;
@@ -13,14 +15,14 @@ namespace SharpCompress.Common.Rar.Headers
private const int SIZE_PSWCHECK_CSUM = 4;
private const int CRYPT5_KDF_LG2_COUNT = 15; // LOG2 of PDKDF2 iteration count.
private const int CRYPT5_KDF_LG2_COUNT_MAX = 24; // LOG2 of maximum accepted iteration count.
private bool _usePswCheck;
private uint _lg2Count; // Log2 of PBKDF2 repetition count.
private byte[] _salt;
private byte[] _pswCheck;
private byte[] _pswCheckCsm;
public ArchiveCryptHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Crypt)
{
@@ -33,12 +35,12 @@ namespace SharpCompress.Common.Rar.Headers
{
//error?
return;
}
}
var encryptionFlags = reader.ReadRarVIntUInt32();
_usePswCheck = FlagUtility.HasFlag(encryptionFlags, EncryptionFlagsV5.CHFL_CRYPT_PSWCHECK);
_lg2Count = reader.ReadRarVIntByte(1);
//UsePswCheck = HasHeaderFlag(EncryptionFlagsV5.CHFL_CRYPT_PSWCHECK);
if (_lg2Count > CRYPT5_KDF_LG2_COUNT_MAX)
{

View File

@@ -2,16 +2,16 @@ using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{
internal class ArchiveHeader : RarHeader
internal sealed class ArchiveHeader : RarHeader
{
public ArchiveHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Archive)
public ArchiveHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Archive)
{
}
protected override void ReadFinish(MarkingBinaryReader reader)
{
if (IsRar5)
if (IsRar5)
{
Flags = reader.ReadRarVIntUInt16();
if (HasFlag(ArchiveFlagsV5.HAS_VOLUME_NUMBER))
@@ -22,8 +22,8 @@ namespace SharpCompress.Common.Rar.Headers
//if (ExtraSize != 0) {
// ReadLocator(reader);
//}
}
else
}
else
{
Flags = HeaderFlags;
HighPosAv = reader.ReadInt16();
@@ -35,7 +35,8 @@ namespace SharpCompress.Common.Rar.Headers
}
}
private void ReadLocator(MarkingBinaryReader reader) {
private void ReadLocator(MarkingBinaryReader reader)
{
var size = reader.ReadRarVIntUInt16();
var type = reader.ReadRarVIntUInt16();
if (type != 1)
@@ -47,18 +48,20 @@ namespace SharpCompress.Common.Rar.Headers
const ushort hasQuickOpenOffset = 0x01;
const ushort hasRecoveryOffset = 0x02;
ulong quickOpenOffset = 0;
if ((flags & hasQuickOpenOffset) == hasQuickOpenOffset) {
if ((flags & hasQuickOpenOffset) == hasQuickOpenOffset)
{
quickOpenOffset = reader.ReadRarVInt();
}
ulong recoveryOffset = 0;
if ((flags & hasRecoveryOffset) == hasRecoveryOffset) {
if ((flags & hasRecoveryOffset) == hasRecoveryOffset)
{
recoveryOffset = reader.ReadRarVInt();
}
}
private ushort Flags { get; set; }
private ushort Flags { get; set; }
private bool HasFlag(ushort flag)
private bool HasFlag(ushort flag)
{
return (Flags & flag) == flag;
}
@@ -78,7 +81,7 @@ namespace SharpCompress.Common.Rar.Headers
public bool IsVolume => HasFlag(IsRar5 ? ArchiveFlagsV5.VOLUME : ArchiveFlagsV4.VOLUME);
// RAR5: Volume number field is present. True for all volumes except first.
public bool IsFirstVolume => IsRar5 ? VolumeNumber == null : HasFlag(ArchiveFlagsV4.FIRST_VOLUME);
public bool IsFirstVolume => IsRar5 ? VolumeNumber is null : HasFlag(ArchiveFlagsV4.FIRST_VOLUME);
public bool IsSolid => HasFlag(IsRar5 ? ArchiveFlagsV5.SOLID : ArchiveFlagsV4.SOLID);
}

View File

@@ -5,8 +5,8 @@ namespace SharpCompress.Common.Rar.Headers
internal class CommentHeader : RarHeader
{
protected CommentHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Comment)
{
: base(header, reader, HeaderType.Comment)
{
if (IsRar5)
{
throw new InvalidFormatException("unexpected rar5 record");

View File

@@ -4,14 +4,14 @@ namespace SharpCompress.Common.Rar.Headers
{
internal class EndArchiveHeader : RarHeader
{
public EndArchiveHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.EndArchive)
public EndArchiveHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.EndArchive)
{
}
protected override void ReadFinish(MarkingBinaryReader reader)
{
if (IsRar5)
if (IsRar5)
{
Flags = reader.ReadRarVIntUInt16();
}
@@ -31,7 +31,7 @@ namespace SharpCompress.Common.Rar.Headers
private ushort Flags { get; set; }
private bool HasFlag(ushort flag)
private bool HasFlag(ushort flag)
{
return (Flags & flag) == flag;
}

View File

@@ -1,3 +1,5 @@
#nullable disable
#if !Rar2017_64bit
using nint = System.Int32;
using nuint = System.UInt32;
@@ -19,18 +21,18 @@ namespace SharpCompress.Common.Rar.Headers
{
private uint _fileCrc;
public FileHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType)
: base(header, reader, headerType)
public FileHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType)
: base(header, reader, headerType)
{
}
protected override void ReadFinish(MarkingBinaryReader reader)
protected override void ReadFinish(MarkingBinaryReader reader)
{
if (IsRar5)
if (IsRar5)
{
ReadFromReaderV5(reader);
}
else
}
else
{
ReadFromReaderV4(reader);
}
@@ -47,11 +49,13 @@ namespace SharpCompress.Common.Rar.Headers
FileAttributes = reader.ReadRarVIntUInt32();
if (HasFlag(FileFlagsV5.HAS_MOD_TIME)) {
if (HasFlag(FileFlagsV5.HAS_MOD_TIME))
{
FileLastModifiedTime = Utility.UnixTimeToDateTime(reader.ReadUInt32());
}
if (HasFlag(FileFlagsV5.HAS_CRC32)) {
if (HasFlag(FileFlagsV5.HAS_CRC32))
{
FileCrc = reader.ReadUInt32();
}
@@ -63,7 +67,7 @@ namespace SharpCompress.Common.Rar.Headers
// but it was already used in RAR 1.5 and Unpack needs to distinguish
// them.
CompressionAlgorithm = (byte)((compressionInfo & 0x3f) + 50);
// 7th bit (0x0040) defines the solid flag. If it is set, RAR continues to use the compression dictionary left after processing preceding files.
// It can be set only for file headers and is never set for service headers.
IsSolid = (compressionInfo & 0x40) == 0x40;
@@ -72,7 +76,7 @@ namespace SharpCompress.Common.Rar.Headers
CompressionMethod = (byte)((compressionInfo >> 7) & 0x7);
// Bits 11 - 14 (0x3c00) define the minimum size of dictionary size required to extract data. Value 0 means 128 KB, 1 - 256 KB, ..., 14 - 2048 MB, 15 - 4096 MB.
WindowSize = IsDirectory ? 0 : ((size_t)0x20000) << ((compressionInfo>>10) & 0xf);
WindowSize = IsDirectory ? 0 : ((size_t)0x20000) << ((compressionInfo >> 10) & 0xf);
HostOs = reader.ReadRarVIntByte();
@@ -99,18 +103,20 @@ namespace SharpCompress.Common.Rar.Headers
FileName = ConvertPathV5(Encoding.UTF8.GetString(b, 0, b.Length));
// extra size seems to be redudant since we know the total header size
if (ExtraSize != RemainingHeaderBytes(reader))
if (ExtraSize != RemainingHeaderBytes(reader))
{
throw new InvalidFormatException("rar5 header size / extra size inconsistency");
}
isEncryptedRar5 = false;
while (RemainingHeaderBytes(reader) > 0) {
while (RemainingHeaderBytes(reader) > 0)
{
var size = reader.ReadRarVIntUInt16();
int n = RemainingHeaderBytes(reader);
var type = reader.ReadRarVIntUInt16();
switch (type) {
switch (type)
{
//TODO
case 1: // file encryption
{
@@ -118,7 +124,7 @@ namespace SharpCompress.Common.Rar.Headers
//var version = reader.ReadRarVIntByte();
//if (version != 0) throw new InvalidFormatException("unknown encryption algorithm " + version);
}
}
break;
// case 2: // file hash
// {
@@ -129,38 +135,41 @@ namespace SharpCompress.Common.Rar.Headers
{
ushort flags = reader.ReadRarVIntUInt16();
var isWindowsTime = (flags & 1) == 0;
if ((flags & 0x2) == 0x2) {
if ((flags & 0x2) == 0x2)
{
FileLastModifiedTime = ReadExtendedTimeV5(reader, isWindowsTime);
}
if ((flags & 0x4) == 0x4) {
if ((flags & 0x4) == 0x4)
{
FileCreatedTime = ReadExtendedTimeV5(reader, isWindowsTime);
}
if ((flags & 0x8) == 0x8) {
if ((flags & 0x8) == 0x8)
{
FileLastAccessedTime = ReadExtendedTimeV5(reader, isWindowsTime);
}
}
break;
//TODO
// case 4: // file version
// {
//
// }
// break;
// case 5: // file system redirection
// {
//
// }
// break;
// case 6: // unix owner
// {
//
// }
// break;
// case 7: // service data
// {
//
// }
// break;
//TODO
// case 4: // file version
// {
//
// }
// break;
// case 5: // file system redirection
// {
//
// }
// break;
// case 6: // unix owner
// {
//
// }
// break;
// case 7: // service data
// {
//
// }
// break;
default:
// skip unknown record types to allow new record types to be added in the future
@@ -169,25 +178,26 @@ namespace SharpCompress.Common.Rar.Headers
// drain any trailing bytes of extra record
int did = n - RemainingHeaderBytes(reader);
int drain = size - did;
if (drain > 0)
if (drain > 0)
{
reader.ReadBytes(drain);
}
}
if (AdditionalDataSize != 0) {
if (AdditionalDataSize != 0)
{
CompressedSize = AdditionalDataSize;
}
}
private static DateTime ReadExtendedTimeV5(MarkingBinaryReader reader, bool isWindowsTime)
private static DateTime ReadExtendedTimeV5(MarkingBinaryReader reader, bool isWindowsTime)
{
if (isWindowsTime)
if (isWindowsTime)
{
return DateTime.FromFileTime(reader.ReadInt64());
}
else
}
else
{
return Utility.UnixTimeToDateTime(reader.ReadUInt32());
}
@@ -199,7 +209,7 @@ namespace SharpCompress.Common.Rar.Headers
{
// replace embedded \\ with valid filename char
return path.Replace('\\', '-').Replace('/', '\\');
}
}
return path;
}
@@ -374,20 +384,22 @@ namespace SharpCompress.Common.Rar.Headers
private ushort Flags { get; set; }
private bool HasFlag(ushort flag)
private bool HasFlag(ushort flag)
{
return (Flags & flag) == flag;
}
internal uint FileCrc
{
get {
if (IsRar5 && !HasFlag(FileFlagsV5.HAS_CRC32)) {
//!!! rar5:
internal uint FileCrc
{
get
{
if (IsRar5 && !HasFlag(FileFlagsV5.HAS_CRC32))
{
//!!! rar5:
throw new InvalidOperationException("TODO rar5");
}
return _fileCrc;
}
return _fileCrc;
}
private set => _fileCrc = value;
}
@@ -407,7 +419,7 @@ namespace SharpCompress.Common.Rar.Headers
//case 29: // rar 3.x compression
//case 50: // RAR 5.0 compression algorithm.
internal byte CompressionAlgorithm { get; private set; }
public bool IsSolid { get; private set; }
// unused for UnpackV1 implementation (limitation)
@@ -425,13 +437,14 @@ namespace SharpCompress.Common.Rar.Headers
internal long DataStartPosition { get; set; }
public Stream PackedStream { get; set; }
public bool IsSplitBefore => IsRar5 ? HasHeaderFlag(HeaderFlagsV5.SPLIT_BEFORE) : HasFlag(FileFlagsV4.SPLIT_BEFORE);
public bool IsSplitAfter => IsRar5 ? HasHeaderFlag(HeaderFlagsV5.SPLIT_AFTER) : HasFlag(FileFlagsV4.SPLIT_AFTER);
public bool IsDirectory => HasFlag(IsRar5 ? FileFlagsV5.DIRECTORY : FileFlagsV4.DIRECTORY);
private bool isEncryptedRar5 = false;
public bool IsEncrypted => IsRar5 ? isEncryptedRar5: HasFlag(FileFlagsV4.PASSWORD);
public bool IsEncrypted => IsRar5 ? isEncryptedRar5 : HasFlag(FileFlagsV4.PASSWORD);
internal DateTime? FileLastModifiedTime { get; private set; }
internal DateTime? FileCreatedTime { get; private set; }

View File

@@ -42,10 +42,10 @@ namespace SharpCompress.Common.Rar.Headers
}
internal static class EncryptionFlagsV5
{
{
// RAR 5.0 archive encryption header specific flags.
public const uint CHFL_CRYPT_PSWCHECK = 0x01; // Password check data is present.
public const uint FHEXTRA_CRYPT_PSWCHECK = 0x01; // Password check data is present.
public const uint FHEXTRA_CRYPT_HASHMAC = 0x02;
}

View File

@@ -1,6 +1,6 @@
namespace SharpCompress.Common.Rar.Headers
{
internal interface IRarHeader
internal interface IRarHeader
{
HeaderType HeaderType { get; }
}

View File

@@ -11,39 +11,39 @@ namespace SharpCompress.Common.Rar.Headers
public bool IsRar5 { get; }
private MarkHeader(bool isRar5)
{
private MarkHeader(bool isRar5)
{
IsRar5 = isRar5;
}
public HeaderType HeaderType => HeaderType.Mark;
private static byte GetByte(Stream stream)
private static byte GetByte(Stream stream)
{
var b = stream.ReadByte();
if (b != -1)
if (b != -1)
{
return (byte)b;
}
throw new EndOfStreamException();
}
public static MarkHeader Read(Stream stream, bool leaveStreamOpen, bool lookForHeader)
public static MarkHeader Read(Stream stream, bool leaveStreamOpen, bool lookForHeader)
{
int maxScanIndex = lookForHeader ? MAX_SFX_SIZE : 0;
try
{
int start = -1;
var b = GetByte(stream); start++;
while (start <= maxScanIndex)
while (start <= maxScanIndex)
{
// Rar old signature: 52 45 7E 5E
// Rar4 signature: 52 61 72 21 1A 07 00
// Rar5 signature: 52 61 72 21 1A 07 01 00
if (b == 0x52)
if (b == 0x52)
{
b = GetByte(stream); start++;
if (b == 0x61)
if (b == 0x61)
{
b = GetByte(stream); start++;
if (b != 0x72)
@@ -70,7 +70,7 @@ namespace SharpCompress.Common.Rar.Headers
}
b = GetByte(stream); start++;
if (b == 1)
if (b == 1)
{
b = GetByte(stream); start++;
if (b != 0)
@@ -79,13 +79,13 @@ namespace SharpCompress.Common.Rar.Headers
}
return new MarkHeader(true); // Rar5
}
else if (b == 0)
}
else if (b == 0)
{
return new MarkHeader(false); // Rar4
}
}
else if (b == 0x45)
}
}
else if (b == 0x45)
{
b = GetByte(stream); start++;
if (b != 0x7e)
@@ -100,9 +100,9 @@ namespace SharpCompress.Common.Rar.Headers
}
throw new InvalidFormatException("Rar format version pre-4 is unsupported.");
}
}
else
}
}
else
{
b = GetByte(stream); start++;
}

View File

@@ -2,23 +2,23 @@
namespace SharpCompress.Common.Rar.Headers
{
internal class NewSubHeaderType : IEquatable<NewSubHeaderType>
internal sealed class NewSubHeaderType : IEquatable<NewSubHeaderType>
{
internal static readonly NewSubHeaderType SUBHEAD_TYPE_CMT = new NewSubHeaderType('C', 'M', 'T');
internal static readonly NewSubHeaderType SUBHEAD_TYPE_CMT = new('C', 'M', 'T');
//internal static final NewSubHeaderType SUBHEAD_TYPE_ACL = new NewSubHeaderType(new byte[]{'A','C','L'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_ACL = new (new byte[]{'A','C','L'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_STREAM = new NewSubHeaderType(new byte[]{'S','T','M'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_STREAM = new (new byte[]{'S','T','M'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_UOWNER = new NewSubHeaderType(new byte[]{'U','O','W'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_UOWNER = new (new byte[]{'U','O','W'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_AV = new NewSubHeaderType(new byte[]{'A','V'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_AV = new (new byte[]{'A','V'});
internal static readonly NewSubHeaderType SUBHEAD_TYPE_RR = new NewSubHeaderType('R', 'R');
internal static readonly NewSubHeaderType SUBHEAD_TYPE_RR = new('R', 'R');
//internal static final NewSubHeaderType SUBHEAD_TYPE_OS2EA = new NewSubHeaderType(new byte[]{'E','A','2'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_OS2EA = new (new byte[]{'E','A','2'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_BEOSEA = new NewSubHeaderType(new byte[]{'E','A','B','E'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_BEOSEA = new (new byte[]{'E','A','B','E'});
private readonly byte[] _bytes;
@@ -37,19 +37,13 @@ namespace SharpCompress.Common.Rar.Headers
{
return false;
}
for (int i = 0; i < bytes.Length; ++i)
{
if (_bytes[i] != bytes[i])
{
return false;
}
}
return true;
return _bytes.AsSpan().SequenceEqual(bytes);
}
public bool Equals(NewSubHeaderType other)
public bool Equals(NewSubHeaderType? other)
{
return Equals(other._bytes);
return other is not null && Equals(other._bytes);
}
}
}

View File

@@ -3,10 +3,10 @@
namespace SharpCompress.Common.Rar.Headers
{
// ProtectHeader is part of the Recovery Record feature
internal class ProtectHeader : RarHeader
internal sealed class ProtectHeader : RarHeader
{
public ProtectHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Protect)
public ProtectHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Protect)
{
if (IsRar5)
{
@@ -26,6 +26,6 @@ namespace SharpCompress.Common.Rar.Headers
internal byte Version { get; private set; }
internal ushort RecSectors { get; private set; }
internal uint TotalBlocks { get; private set; }
internal byte[] Mark { get; private set; }
internal byte[]? Mark { get; private set; }
}
}

View File

@@ -11,7 +11,7 @@ namespace SharpCompress.Common.Rar.Headers
private readonly HeaderType _headerType;
private readonly bool _isRar5;
internal static RarHeader TryReadBase(RarCrcBinaryReader reader, bool isRar5, ArchiveEncoding archiveEncoding)
internal static RarHeader? TryReadBase(RarCrcBinaryReader reader, bool isRar5, ArchiveEncoding archiveEncoding)
{
try
{
@@ -23,12 +23,12 @@ namespace SharpCompress.Common.Rar.Headers
}
}
private RarHeader(RarCrcBinaryReader reader, bool isRar5, ArchiveEncoding archiveEncoding)
private RarHeader(RarCrcBinaryReader reader, bool isRar5, ArchiveEncoding archiveEncoding)
{
_headerType = HeaderType.Null;
_isRar5 = isRar5;
ArchiveEncoding = archiveEncoding;
if (IsRar5)
if (IsRar5)
{
HeaderCrc = reader.ReadUInt32();
reader.ResetCrc();
@@ -45,7 +45,9 @@ namespace SharpCompress.Common.Rar.Headers
{
AdditionalDataSize = (long)reader.ReadRarVInt();
}
} else {
}
else
{
reader.Mark();
HeaderCrc = reader.ReadUInt16();
reader.ResetCrc();
@@ -59,7 +61,8 @@ namespace SharpCompress.Common.Rar.Headers
}
}
protected RarHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType) {
protected RarHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType)
{
_headerType = headerType;
_isRar5 = header.IsRar5;
HeaderCrc = header.HeaderCrc;
@@ -80,7 +83,8 @@ namespace SharpCompress.Common.Rar.Headers
VerifyHeaderCrc(reader.GetCrc32());
}
protected int RemainingHeaderBytes(MarkingBinaryReader reader) {
protected int RemainingHeaderBytes(MarkingBinaryReader reader)
{
return checked(HeaderSize - (int)reader.CurrentReadByteCount);
}
@@ -108,7 +112,7 @@ namespace SharpCompress.Common.Rar.Headers
protected ushort HeaderFlags { get; }
protected bool HasHeaderFlag(ushort flag)
protected bool HasHeaderFlag(ushort flag)
{
return (HeaderFlags & flag) == flag;
}

View File

@@ -25,7 +25,7 @@ namespace SharpCompress.Common.Rar.Headers
_isRar5 = markHeader.IsRar5;
yield return markHeader;
RarHeader header;
RarHeader? header;
while ((header = TryReadNextHeader(stream)) != null)
{
yield return header;
@@ -38,16 +38,16 @@ namespace SharpCompress.Common.Rar.Headers
}
}
private RarHeader TryReadNextHeader(Stream stream)
private RarHeader? TryReadNextHeader(Stream stream)
{
RarCrcBinaryReader reader;
if (!IsEncrypted)
if (!IsEncrypted)
{
reader = new RarCrcBinaryReader(stream);
}
else
}
else
{
if (Options.Password == null)
if (Options.Password is null)
{
throw new CryptographicException("Encrypted Rar archive has no password specified.");
}
@@ -55,7 +55,7 @@ namespace SharpCompress.Common.Rar.Headers
}
var header = RarHeader.TryReadBase(reader, _isRar5, Options.ArchiveEncoding);
if (header == null)
if (header is null)
{
return null;
}
@@ -65,7 +65,7 @@ namespace SharpCompress.Common.Rar.Headers
case HeaderCodeV.RAR4_ARCHIVE_HEADER:
{
var ah = new ArchiveHeader(header, reader);
if (ah.IsEncrypted == true)
if (ah.IsEncrypted == true)
{
//!!! rar5 we don't know yet
IsEncrypted = true;
@@ -127,13 +127,13 @@ namespace SharpCompress.Common.Rar.Headers
case StreamingMode.Streaming:
{
var ms = new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize);
if (fh.R4Salt == null)
if (fh.R4Salt is null)
{
fh.PackedStream = ms;
}
else
{
fh.PackedStream = new RarCryptoWrapper(ms, Options.Password, fh.R4Salt);
fh.PackedStream = new RarCryptoWrapper(ms, Options.Password!, fh.R4Salt);
}
}
break;
@@ -150,11 +150,11 @@ namespace SharpCompress.Common.Rar.Headers
return new EndArchiveHeader(header, reader);
}
case HeaderCodeV.RAR5_ARCHIVE_ENCRYPTION_HEADER:
{
var ch = new ArchiveCryptHeader(header, reader);
IsEncrypted = true;
return ch;
}
{
var ch = new ArchiveCryptHeader(header, reader);
IsEncrypted = true;
return ch;
}
default:
{
throw new InvalidFormatException("Unknown Rar Header: " + header.HeaderCode);
@@ -162,21 +162,26 @@ namespace SharpCompress.Common.Rar.Headers
}
}
private void SkipData(FileHeader fh, RarCrcBinaryReader reader) {
switch (StreamingMode) {
case StreamingMode.Seekable: {
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
private void SkipData(FileHeader fh, RarCrcBinaryReader reader)
{
switch (StreamingMode)
{
case StreamingMode.Seekable:
{
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming: {
//skip the data because it's useless?
reader.BaseStream.Skip(fh.CompressedSize);
}
case StreamingMode.Streaming:
{
//skip the data because it's useless?
reader.BaseStream.Skip(fh.CompressedSize);
}
break;
default: {
throw new InvalidFormatException("Invalid StreamingMode");
}
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
}
}

View File

@@ -5,8 +5,8 @@ namespace SharpCompress.Common.Rar.Headers
internal class SignHeader : RarHeader
{
protected SignHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Sign)
{
: base(header, reader, HeaderType.Sign)
{
if (IsRar5)
{
throw new InvalidFormatException("unexpected rar5 record");

View File

@@ -3,7 +3,7 @@ using System.IO;
namespace SharpCompress.Common.Rar
{
internal class RarCryptoBinaryReader : RarCrcBinaryReader
internal sealed class RarCryptoBinaryReader : RarCrcBinaryReader
{
private RarRijndael _rijndael;
private byte[] _salt;
@@ -19,7 +19,9 @@ namespace SharpCompress.Common.Rar
// coderb: not sure why this was being done at this logical point
//SkipQueue();
byte[] salt = ReadBytes(8);
InitializeAes(salt);
_salt = salt;
_rijndael = RarRijndael.InitializeFrom(_password, salt);
}
// track read count ourselves rather than using the underlying stream since we buffer
@@ -39,12 +41,6 @@ namespace SharpCompress.Common.Rar
private bool UseEncryption => _salt != null;
internal void InitializeAes(byte[] salt)
{
_salt = salt;
_rijndael = RarRijndael.InitializeFrom(_password, salt);
}
public override byte ReadByte()
{
if (UseEncryption)

View File

@@ -4,7 +4,7 @@ using System.IO;
namespace SharpCompress.Common.Rar
{
internal class RarCryptoWrapper : Stream
internal sealed class RarCryptoWrapper : Stream
{
private readonly Stream _actualStream;
private readonly byte[] _salt;
@@ -35,7 +35,7 @@ namespace SharpCompress.Common.Rar
public override int Read(byte[] buffer, int offset, int count)
{
if (_salt == null)
if (_salt is null)
{
return _actualStream.Read(buffer, offset, count);
}
@@ -50,11 +50,11 @@ namespace SharpCompress.Common.Rar
if (sizeToRead > 0)
{
int alignedSize = sizeToRead + ((~sizeToRead + 1) & 0xf);
byte[] cipherText = new byte[RarRijndael.CRYPTO_BLOCK_SIZE];
Span<byte> cipherText = stackalloc byte[RarRijndael.CRYPTO_BLOCK_SIZE];
for (int i = 0; i < alignedSize / 16; i++)
{
//long ax = System.currentTimeMillis();
_actualStream.Read(cipherText, 0, RarRijndael.CRYPTO_BLOCK_SIZE);
_actualStream.Read(cipherText);
var readBytes = _rijndael.ProcessBlock(cipherText);
foreach (var readByte in readBytes)
@@ -91,7 +91,7 @@ namespace SharpCompress.Common.Rar
if (_rijndael != null)
{
_rijndael.Dispose();
_rijndael = null;
_rijndael = null!;
}
base.Dispose(disposing);
}

View File

@@ -10,8 +10,8 @@ namespace SharpCompress.Common.Rar
/// <summary>
/// As the V2017 port isn't complete, add this check to use the legacy Rar code.
/// </summary>
internal bool IsRarV3 => FileHeader.CompressionAlgorithm == 29 || FileHeader.CompressionAlgorithm == 36;
internal bool IsRarV3 => FileHeader.CompressionAlgorithm == 20 || FileHeader.CompressionAlgorithm == 26 || FileHeader.CompressionAlgorithm == 29 || FileHeader.CompressionAlgorithm == 36; //Nanook - Added 20+26 as Test arc from WinRar2.8 (algo 20) was failing with 2017 code
/// <summary>
/// The File's 32 bit CRC Hash
/// </summary>
@@ -22,7 +22,7 @@ namespace SharpCompress.Common.Rar
/// </summary>
public override string Key => FileHeader.FileName;
public override string LinkTarget => null;
public override string? LinkTarget => null;
/// <summary>
/// The entry last modified time in the archive, if recorded

View File

@@ -1,4 +1,4 @@
using System.IO;
using System.IO;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Common.Rar
@@ -8,20 +8,21 @@ namespace SharpCompress.Common.Rar
/// </summary>
internal abstract class RarFilePart : FilePart
{
internal RarFilePart(MarkHeader mh, FileHeader fh)
internal RarFilePart(MarkHeader mh, FileHeader fh, int index)
: base(fh.ArchiveEncoding)
{
MarkHeader = mh;
FileHeader = fh;
Index = index;
}
internal MarkHeader MarkHeader { get; }
internal FileHeader FileHeader { get; }
internal override Stream GetRawStream()
internal override Stream? GetRawStream()
{
return null;
}
}
}
}

View File

@@ -1,4 +1,6 @@
using System;
#nullable disable
using System;
using System.Security.Cryptography;
using System.Text;
using SharpCompress.Crypto;
@@ -25,13 +27,13 @@ namespace SharpCompress.Common.Rar
_rijndael = new RijndaelEngine();
_aesInitializationVector = new byte[CRYPTO_BLOCK_SIZE];
int rawLength = 2*_password.Length;
int rawLength = 2 * _password.Length;
byte[] rawPassword = new byte[rawLength + 8];
byte[] passwordBytes = Encoding.UTF8.GetBytes(_password);
for (int i = 0; i < _password.Length; i++)
{
rawPassword[i*2] = passwordBytes[i];
rawPassword[i*2 + 1] = 0;
rawPassword[i * 2] = passwordBytes[i];
rawPassword[i * 2 + 1] = 0;
}
for (int i = 0; i < _salt.Length; i++)
{
@@ -66,11 +68,11 @@ namespace SharpCompress.Common.Rar
{
for (int j = 0; j < 4; j++)
{
aesKey[i*4 + j] = (byte)
(((digest[i*4]*0x1000000) & 0xff000000 |
(uint) ((digest[i*4 + 1]*0x10000) & 0xff0000) |
(uint) ((digest[i*4 + 2]*0x100) & 0xff00) |
(uint) (digest[i*4 + 3] & 0xff)) >> (j*8));
aesKey[i * 4 + j] = (byte)
(((digest[i * 4] * 0x1000000) & 0xff000000 |
(uint)((digest[i * 4 + 1] * 0x10000) & 0xff0000) |
(uint)((digest[i * 4 + 2] * 0x100) & 0xff00) |
(uint)(digest[i * 4 + 3] & 0xff)) >> (j * 8));
}
}
@@ -85,11 +87,11 @@ namespace SharpCompress.Common.Rar
return rijndael;
}
public byte[] ProcessBlock(byte[] cipherText)
public byte[] ProcessBlock(ReadOnlySpan<byte> cipherText)
{
var plainText = new byte[CRYPTO_BLOCK_SIZE];
Span<byte> plainText = stackalloc byte[CRYPTO_BLOCK_SIZE]; // 16 bytes
byte[] decryptedBytes = new byte[CRYPTO_BLOCK_SIZE];
_rijndael.ProcessBlock(cipherText, 0, plainText, 0);
_rijndael.ProcessBlock(cipherText, plainText);
for (int j = 0; j < CRYPTO_BLOCK_SIZE; j++)
{

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -14,14 +14,17 @@ namespace SharpCompress.Common.Rar
public abstract class RarVolume : Volume
{
private readonly RarHeaderFactory _headerFactory;
internal int _maxCompressionAlgorithm;
internal RarVolume(StreamingMode mode, Stream stream, ReaderOptions options)
: base(stream, options)
internal RarVolume(StreamingMode mode, Stream stream, ReaderOptions options, int index = 0)
: base(stream, options, index)
{
_headerFactory = new RarHeaderFactory(mode, options);
}
#nullable disable
internal ArchiveHeader ArchiveHeader { get; private set; }
#nullable enable
internal StreamingMode Mode => _headerFactory.StreamingMode;
@@ -31,26 +34,28 @@ namespace SharpCompress.Common.Rar
internal IEnumerable<RarFilePart> GetVolumeFileParts()
{
MarkHeader lastMarkHeader = null;
MarkHeader? lastMarkHeader = null;
foreach (var header in _headerFactory.ReadHeaders(Stream))
{
switch (header.HeaderType)
{
case HeaderType.Mark:
{
lastMarkHeader = header as MarkHeader;
}
{
lastMarkHeader = (MarkHeader)header;
}
break;
case HeaderType.Archive:
{
ArchiveHeader = header as ArchiveHeader;
}
{
ArchiveHeader = (ArchiveHeader)header;
}
break;
case HeaderType.File:
{
var fh = header as FileHeader;
yield return CreateFilePart(lastMarkHeader, fh);
}
{
var fh = (FileHeader)header;
if (_maxCompressionAlgorithm < fh.CompressionAlgorithm)
_maxCompressionAlgorithm = fh.CompressionAlgorithm;
yield return CreateFilePart(lastMarkHeader!, fh);
}
break;
}
}
@@ -58,7 +63,7 @@ namespace SharpCompress.Common.Rar
private void EnsureArchiveHeaderLoaded()
{
if (ArchiveHeader == null)
if (ArchiveHeader is null)
{
if (Mode == StreamingMode.Streaming)
{
@@ -108,5 +113,37 @@ namespace SharpCompress.Common.Rar
return ArchiveHeader.IsSolid;
}
}
public int MinVersion
{
get
{
EnsureArchiveHeaderLoaded();
if (_maxCompressionAlgorithm >= 50)
return 5; //5-6
else if (_maxCompressionAlgorithm >= 29)
return 3; //3-4
else if (_maxCompressionAlgorithm >= 20)
return 2; //2
else
return 1;
}
}
public int MaxVersion
{
get
{
EnsureArchiveHeaderLoaded();
if (_maxCompressionAlgorithm >= 50)
return 6; //5-6
else if (_maxCompressionAlgorithm >= 29)
return 4; //3-4
else if (_maxCompressionAlgorithm >= 20)
return 2; //2
else
return 1;
}
}
}
}
}

View File

@@ -3,15 +3,16 @@ using SharpCompress.Readers;
namespace SharpCompress.Common
{
public class ReaderExtractionEventArgs<T> : EventArgs
public sealed class ReaderExtractionEventArgs<T> : EventArgs
{
internal ReaderExtractionEventArgs(T entry, ReaderProgress readerProgress = null)
internal ReaderExtractionEventArgs(T entry, ReaderProgress? readerProgress = null)
{
Item = entry;
ReaderProgress = readerProgress;
}
public T Item { get; }
public ReaderProgress ReaderProgress { get; }
public ReaderProgress? ReaderProgress { get; }
}
}

View File

@@ -1,4 +1,6 @@
using System;
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Compressors.LZMA;
@@ -35,7 +37,7 @@ namespace SharpCompress.Common.SevenZip
_packSizes.Clear();
_packCrCs.Clear();
_folders.Clear();
_numUnpackStreamsVector = null;
_numUnpackStreamsVector = null!;
_files.Clear();
_packStreamStartPositions.Clear();
@@ -87,7 +89,7 @@ namespace SharpCompress.Common.SevenZip
{
// v3.13 incorrectly worked with empty folders
// v4.07: Loop for skipping empty folders
for (;;)
for (; ; )
{
if (folderIndex >= _folders.Count)
{
@@ -96,7 +98,7 @@ namespace SharpCompress.Common.SevenZip
_folderStartFileIndex.Add(i); // check it
if (_numUnpackStreamsVector[folderIndex] != 0)
if (_numUnpackStreamsVector![folderIndex] != 0)
{
break;
}
@@ -114,7 +116,7 @@ namespace SharpCompress.Common.SevenZip
indexInFolder++;
if (indexInFolder >= _numUnpackStreamsVector[folderIndex])
if (indexInFolder >= _numUnpackStreamsVector![folderIndex])
{
folderIndex++;
indexInFolder = 0;

View File

@@ -1,4 +1,6 @@
using System;
#nullable disable
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
@@ -88,7 +90,7 @@ namespace SharpCompress.Common.SevenZip
private void WaitAttribute(BlockType attribute)
{
for (;;)
for (; ; )
{
BlockType? type = ReadId();
if (type == attribute)
@@ -450,7 +452,7 @@ namespace SharpCompress.Common.SevenZip
#endif
BlockType? type;
for (;;)
for (; ; )
{
type = ReadId();
if (type == BlockType.End)
@@ -465,7 +467,7 @@ namespace SharpCompress.Common.SevenZip
SkipData();
}
if (packCrCs == null)
if (packCrCs is null)
{
packCrCs = new List<uint?>(numPackStreams);
for (int i = 0; i < numPackStreams; i++)
@@ -506,7 +508,7 @@ namespace SharpCompress.Common.SevenZip
int index = 0;
for (int i = 0; i < numFolders; i++)
{
var f = new CFolder {_firstPackStreamId = index};
var f = new CFolder { _firstPackStreamId = index };
folders.Add(f);
GetNextFolderItem(f);
index += f._packStreams.Count;
@@ -537,7 +539,7 @@ namespace SharpCompress.Common.SevenZip
#endif
}
for (;;)
for (; ; )
{
BlockType? type = ReadId();
if (type == BlockType.End)
@@ -578,7 +580,7 @@ namespace SharpCompress.Common.SevenZip
numUnpackStreamsInFolders = null;
BlockType? type;
for (;;)
for (; ; )
{
type = ReadId();
if (type == BlockType.NumUnpackStream)
@@ -600,7 +602,7 @@ namespace SharpCompress.Common.SevenZip
#endif
continue;
}
if (type == BlockType.Crc || type == BlockType.Size)
if (type is BlockType.Crc or BlockType.Size)
{
break;
}
@@ -611,7 +613,7 @@ namespace SharpCompress.Common.SevenZip
SkipData();
}
if (numUnpackStreamsInFolders == null)
if (numUnpackStreamsInFolders is null)
{
numUnpackStreamsInFolders = new List<int>(folders.Count);
for (int i = 0; i < folders.Count; i++)
@@ -670,7 +672,7 @@ namespace SharpCompress.Common.SevenZip
digests = null;
for (;;)
for (; ; )
{
if (type == BlockType.Crc)
{
@@ -703,7 +705,7 @@ namespace SharpCompress.Common.SevenZip
}
else if (type == BlockType.End)
{
if (digests == null)
if (digests is null)
{
digests = new List<uint?>(numDigestsTotal);
for (int i = 0; i < numDigestsTotal; i++)
@@ -753,7 +755,7 @@ namespace SharpCompress.Common.SevenZip
unpackSizes = null;
digests = null;
for (;;)
for (; ; )
{
switch (ReadId())
{
@@ -789,22 +791,14 @@ namespace SharpCompress.Common.SevenZip
#endif
try
{
long dataStartPos;
List<long> packSizes;
List<uint?> packCrCs;
List<CFolder> folders;
List<int> numUnpackStreamsInFolders;
List<long> unpackSizes;
List<uint?> digests;
ReadStreamsInfo(null,
out dataStartPos,
out packSizes,
out packCrCs,
out folders,
out numUnpackStreamsInFolders,
out unpackSizes,
out digests);
out long dataStartPos,
out List<long> packSizes,
out List<uint?> packCrCs,
out List<CFolder> folders,
out List<int> numUnpackStreamsInFolders,
out List<long> unpackSizes,
out List<uint?> digests);
dataStartPos += baseOffset;
@@ -932,7 +926,7 @@ namespace SharpCompress.Common.SevenZip
BitVector antiFileVector = null;
int numEmptyStreams = 0;
for (;;)
for (; ; )
{
type = ReadId();
if (type == BlockType.End)
@@ -967,7 +961,7 @@ namespace SharpCompress.Common.SevenZip
#if DEBUG
Log.Write("WinAttributes:");
#endif
ReadAttributeVector(dataVector, numFiles, delegate(int i, uint? attr)
ReadAttributeVector(dataVector, numFiles, delegate (int i, uint? attr)
{
// Some third party implementations established an unofficial extension
// of the 7z archive format by placing posix file attributes in the high
@@ -1055,7 +1049,7 @@ namespace SharpCompress.Common.SevenZip
#if DEBUG
Log.Write("StartPos:");
#endif
ReadNumberVector(dataVector, numFiles, delegate(int i, long? startPos)
ReadNumberVector(dataVector, numFiles, delegate (int i, long? startPos)
{
db._files[i].StartPos = startPos;
#if DEBUG
@@ -1070,7 +1064,7 @@ namespace SharpCompress.Common.SevenZip
#if DEBUG
Log.Write("CTime:");
#endif
ReadDateTimeVector(dataVector, numFiles, delegate(int i, DateTime? time)
ReadDateTimeVector(dataVector, numFiles, delegate (int i, DateTime? time)
{
db._files[i].CTime = time;
#if DEBUG
@@ -1085,7 +1079,7 @@ namespace SharpCompress.Common.SevenZip
#if DEBUG
Log.Write("ATime:");
#endif
ReadDateTimeVector(dataVector, numFiles, delegate(int i, DateTime? time)
ReadDateTimeVector(dataVector, numFiles, delegate (int i, DateTime? time)
{
db._files[i].ATime = time;
#if DEBUG
@@ -1100,7 +1094,7 @@ namespace SharpCompress.Common.SevenZip
#if DEBUG
Log.Write("MTime:");
#endif
ReadDateTimeVector(dataVector, numFiles, delegate(int i, DateTime? time)
ReadDateTimeVector(dataVector, numFiles, delegate (int i, DateTime? time)
{
db._files[i].MTime = time;
#if DEBUG
@@ -1254,7 +1248,7 @@ namespace SharpCompress.Common.SevenZip
if (nextHeaderOffset > _streamEnding - db._startPositionAfterHeader)
{
throw new IndexOutOfRangeException();
throw new InvalidOperationException("nextHeaderOffset is invalid");
}
_stream.Seek(nextHeaderOffset, SeekOrigin.Current);
@@ -1443,8 +1437,7 @@ namespace SharpCompress.Common.SevenZip
private Stream GetCachedDecoderStream(ArchiveDatabase db, int folderIndex)
{
Stream s;
if (!_cachedStreams.TryGetValue(folderIndex, out s))
if (!_cachedStreams.TryGetValue(folderIndex, out Stream s))
{
CFolder folderInfo = db._folders[folderIndex];
int packStreamIndex = db._folders[folderIndex]._firstPackStreamId;
@@ -1487,16 +1480,11 @@ namespace SharpCompress.Common.SevenZip
public void Extract(ArchiveDatabase db, int[] indices)
{
int numItems;
bool allFilesMode = (indices == null);
if (allFilesMode)
{
numItems = db._files.Count;
}
else
{
numItems = indices.Length;
}
bool allFilesMode = (indices is null);
int numItems = allFilesMode
? db._files.Count
: indices.Length;
if (numItems == 0)
{
@@ -1529,6 +1517,7 @@ namespace SharpCompress.Common.SevenZip
}
}
byte[] buffer = null;
foreach (CExtractFolderInfo efi in extractFolderInfoVector)
{
int startIndex;
@@ -1565,8 +1554,8 @@ namespace SharpCompress.Common.SevenZip
Stream s = DecoderStreamHelper.CreateDecoderStream(_stream, folderStartPackPos, packSizes,
folderInfo, db.PasswordProvider);
byte[] buffer = new byte[4 << 10];
for (;;)
buffer ??= new byte[4 << 10];
for (; ; )
{
int processed = s.Read(buffer, 0, buffer.Length);
if (processed == 0)

View File

@@ -1,4 +1,6 @@
namespace SharpCompress.Common.SevenZip
#nullable disable
namespace SharpCompress.Common.SevenZip
{
internal class CCoderInfo
{

View File

@@ -1,4 +1,6 @@
using System;
#nullable disable
using System;
namespace SharpCompress.Common.SevenZip
{

View File

@@ -30,7 +30,7 @@ namespace SharpCompress.Common.SevenZip
}
}
throw new Exception();
throw new InvalidOperationException();
}
public int GetNumOutStreams()
@@ -185,4 +185,4 @@ namespace SharpCompress.Common.SevenZip
return true;
}
}
}
}

View File

@@ -1,6 +1,6 @@
namespace SharpCompress.Common.SevenZip
{
internal struct CMethodId
internal readonly struct CMethodId
{
public const ulong K_COPY_ID = 0;
public const ulong K_LZMA_ID = 0x030101;
@@ -24,9 +24,9 @@
return _id.GetHashCode();
}
public override bool Equals(object obj)
public override bool Equals(object? obj)
{
return obj is CMethodId && (CMethodId)obj == this;
return obj is CMethodId other && Equals(other);
}
public bool Equals(CMethodId other)

View File

@@ -161,7 +161,7 @@ namespace SharpCompress.Common.SevenZip
{
int ending = Offset;
for (;;)
for (; ; )
{
if (ending + 2 > _ending)
{

View File

@@ -18,7 +18,7 @@ namespace SharpCompress.Common.SevenZip
public override string Key => FilePart.Header.Name;
public override string LinkTarget => null;
public override string? LinkTarget => null;
public override long CompressedSize => 0;
@@ -32,13 +32,13 @@ namespace SharpCompress.Common.SevenZip
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsEncrypted => FilePart.IsEncrypted;
public override bool IsDirectory => FilePart.Header.IsDir;
public override bool IsSplitAfter => false;
public override int? Attrib => (int)FilePart.Header.Attrib;
public override int? Attrib => FilePart.Header.Attrib.HasValue ? (int?)FilePart.Header.Attrib.Value : null;
internal override IEnumerable<FilePart> Parts => FilePart.AsEnumerable<FilePart>();
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.IO;
using System.Linq;
using SharpCompress.IO;
@@ -25,12 +25,11 @@ namespace SharpCompress.Common.SevenZip
}
internal CFileItem Header { get; }
internal CFolder Folder { get; }
internal int Index { get; }
internal CFolder? Folder { get; }
internal override string FilePartName => Header.Name;
internal override Stream GetRawStream()
internal override Stream? GetRawStream()
{
return null;
}
@@ -39,11 +38,11 @@ namespace SharpCompress.Common.SevenZip
{
if (!Header.HasStream)
{
return null;
return null!;
}
var folderStream = _database.GetFolderStream(_stream, Folder, _database.PasswordProvider);
var folderStream = _database.GetFolderStream(_stream, Folder!, _database.PasswordProvider);
int firstFileIndex = _database._folderStartFileIndex[_database._folders.IndexOf(Folder)];
int firstFileIndex = _database._folderStartFileIndex[_database._folders.IndexOf(Folder!)];
int skipCount = Index - firstFileIndex;
long skipSize = 0;
for (int i = 0; i < skipCount; i++)
@@ -61,7 +60,7 @@ namespace SharpCompress.Common.SevenZip
{
get
{
if (_type == null)
if (_type is null)
{
_type = GetCompression();
}
@@ -82,25 +81,27 @@ namespace SharpCompress.Common.SevenZip
internal CompressionType GetCompression()
{
var coder = Folder._coders.First();
var coder = Folder!._coders.First();
switch (coder._methodId._id)
{
{
case K_LZMA:
case K_LZMA2:
{
return CompressionType.LZMA;
}
{
return CompressionType.LZMA;
}
case K_PPMD:
{
return CompressionType.PPMd;
}
{
return CompressionType.PPMd;
}
case K_B_ZIP2:
{
return CompressionType.BZip2;
}
{
return CompressionType.BZip2;
}
default:
throw new NotImplementedException();
}
}
internal bool IsEncrypted => Folder!._coders.FindIndex(c => c._methodId._id == CMethodId.K_AES_ID) != -1;
}
}
}

View File

@@ -1,13 +1,13 @@
using System.IO;
using System.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common.SevenZip
{
public class SevenZipVolume : Volume
{
public SevenZipVolume(Stream stream, ReaderOptions readerFactoryOptions)
: base(stream, readerFactoryOptions)
public SevenZipVolume(Stream stream, ReaderOptions readerFactoryOptions, int index = 0)
: base(stream, readerFactoryOptions, index)
{
}
}
}
}

View File

@@ -1,11 +1,13 @@
using System;
#nullable disable
using System;
using System.Buffers.Binary;
using System.IO;
using System.Text;
namespace SharpCompress.Common.Tar.Headers
{
internal class TarHeader
internal sealed class TarHeader
{
internal static readonly DateTime EPOCH = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
@@ -17,11 +19,9 @@ namespace SharpCompress.Common.Tar.Headers
internal string Name { get; set; }
internal string LinkName { get; set; }
//internal int Mode { get; set; }
//internal int UserId { get; set; }
//internal string UserName { get; set; }
//internal int GroupId { get; set; }
//internal string GroupName { get; set; }
internal long Mode { get; set; }
internal long UserId { get; set; }
internal long GroupId { get; set; }
internal long Size { get; set; }
internal DateTime LastModifiedTime { get; set; }
internal EntryType EntryType { get; set; }
@@ -95,7 +95,7 @@ namespace SharpCompress.Common.Tar.Headers
{
numPaddingBytes = BLOCK_SIZE;
}
output.Write(new byte[numPaddingBytes], 0, numPaddingBytes);
output.Write(stackalloc byte[numPaddingBytes]);
}
internal bool Read(BinaryReader reader)
@@ -125,9 +125,12 @@ namespace SharpCompress.Common.Tar.Headers
EntryType = ReadEntryType(buffer);
Size = ReadSize(buffer);
//Mode = ReadASCIIInt32Base8(buffer, 100, 7);
//UserId = ReadASCIIInt32Base8(buffer, 108, 7);
//GroupId = ReadASCIIInt32Base8(buffer, 116, 7);
Mode = ReadAsciiInt64Base8(buffer, 100, 7);
if(EntryType == EntryType.Directory)
Mode |= 0b1_000_000_000;
UserId = ReadAsciiInt64Base8(buffer, 108, 7);
GroupId = ReadAsciiInt64Base8(buffer, 116, 7);
long unixTimeStamp = ReadAsciiInt64Base8(buffer, 136, 11);
LastModifiedTime = EPOCH.AddSeconds(unixTimeStamp).ToLocalTime();
@@ -258,10 +261,16 @@ namespace SharpCompress.Common.Tar.Headers
return Convert.ToInt64(s);
}
private static readonly byte[] eightSpaces = {
(byte)' ', (byte)' ', (byte)' ', (byte)' ',
(byte)' ', (byte)' ', (byte)' ', (byte)' '
};
internal static int RecalculateChecksum(byte[] buf)
{
// Set default value for checksum. That is 8 spaces.
Encoding.UTF8.GetBytes(" ").CopyTo(buf, 148);
eightSpaces.CopyTo(buf, 148);
// Calculate checksum
int headerChecksum = 0;
@@ -274,7 +283,7 @@ namespace SharpCompress.Common.Tar.Headers
internal static int RecalculateAltChecksum(byte[] buf)
{
Encoding.UTF8.GetBytes(" ").CopyTo(buf, 148);
eightSpaces.CopyTo(buf, 148);
int headerChecksum = 0;
foreach (byte b in buf)
{

View File

@@ -1,4 +1,6 @@
using System;
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Tar.Headers;
@@ -42,6 +44,12 @@ namespace SharpCompress.Common.Tar
public override bool IsSplitAfter => false;
public long Mode => _filePart.Header.Mode;
public long UserID => _filePart.Header.UserId;
public long GroupId => _filePart.Header.GroupId;
internal override IEnumerable<FilePart> Parts => _filePart.AsEnumerable<FilePart>();
internal static IEnumerable<TarEntry> GetEntries(StreamingMode mode, Stream stream,

View File

@@ -4,7 +4,7 @@ using SharpCompress.IO;
namespace SharpCompress.Common.Tar
{
internal class TarFilePart : FilePart
internal sealed class TarFilePart : FilePart
{
private readonly Stream _seekableStream;
@@ -23,13 +23,13 @@ namespace SharpCompress.Common.Tar
{
if (_seekableStream != null)
{
_seekableStream.Position = Header.DataStartPosition.Value;
return new ReadOnlySubStream(_seekableStream, Header.Size);
_seekableStream.Position = Header.DataStartPosition!.Value;
return new TarReadOnlySubStream(_seekableStream, Header.Size);
}
return Header.PackedStream;
}
internal override Stream GetRawStream()
internal override Stream? GetRawStream()
{
return null;
}

View File

@@ -7,11 +7,11 @@ namespace SharpCompress.Common.Tar
{
internal static class TarHeaderFactory
{
internal static IEnumerable<TarHeader> ReadHeader(StreamingMode mode, Stream stream, ArchiveEncoding archiveEncoding)
internal static IEnumerable<TarHeader?> ReadHeader(StreamingMode mode, Stream stream, ArchiveEncoding archiveEncoding)
{
while (true)
{
TarHeader header = null;
TarHeader? header = null;
try
{
BinaryReader reader = new BinaryReader(stream);

View File

@@ -20,22 +20,24 @@ namespace SharpCompress.Common.Tar
{
return;
}
_isDisposed = true;
if (disposing)
{
long skipBytes = _amountRead % 512;
if (skipBytes == 0)
// Ensure we read all remaining blocks for this entry.
Stream.Skip(BytesLeftToRead);
_amountRead += BytesLeftToRead;
// If the last block wasn't a full 512 bytes, skip the remaining padding bytes.
var bytesInLastBlock = _amountRead % 512;
if (bytesInLastBlock != 0)
{
return;
Stream.Skip(512 - bytesInLastBlock);
}
skipBytes = 512 - skipBytes;
if (skipBytes == 0)
{
return;
}
var buffer = new byte[skipBytes];
Stream.ReadFully(buffer);
}
base.Dispose(disposing);
}

View File

@@ -1,13 +1,13 @@
using System.IO;
using System.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common.Tar
{
public class TarVolume : Volume
{
public TarVolume(Stream stream, ReaderOptions readerOptions)
: base(stream, readerOptions)
public TarVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
: base(stream, readerOptions, index)
{
}
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.IO;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -9,12 +9,13 @@ namespace SharpCompress.Common
{
private readonly Stream _actualStream;
internal Volume(Stream stream, ReaderOptions readerOptions)
internal Volume(Stream stream, ReaderOptions readerOptions, int index = 0)
{
Index = index;
ReaderOptions = readerOptions;
if (readerOptions.LeaveStreamOpen)
{
stream = new NonDisposingStream(stream);
stream = NonDisposingStream.Create(stream);
}
_actualStream = stream;
}
@@ -29,6 +30,10 @@ namespace SharpCompress.Common
/// </summary>
public virtual bool IsFirstVolume => true;
public virtual int Index { get; internal set; }
public string FileName { get { return (_actualStream as FileStream)?.Name!; } }
/// <summary>
/// RarArchive is part of a multi-part archive.
/// </summary>
@@ -48,4 +53,4 @@ namespace SharpCompress.Common
GC.SuppressFinalize(this);
}
}
}
}

View File

@@ -33,7 +33,7 @@ namespace SharpCompress.Common.Zip.Headers
public ushort CommentLength { get; private set; }
public byte[] Comment { get; private set; }
public byte[]? Comment { get; private set; }
public ushort TotalNumberOfEntries { get; private set; }

View File

@@ -32,7 +32,7 @@ namespace SharpCompress.Common.Zip.Headers
byte[] name = reader.ReadBytes(nameLength);
byte[] extra = reader.ReadBytes(extraLength);
byte[] comment = reader.ReadBytes(commentLength);
// According to .ZIP File Format Specification
//
// For example: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
@@ -40,7 +40,7 @@ namespace SharpCompress.Common.Zip.Headers
// Bit 11: Language encoding flag (EFS). If this bit is set,
// the filename and comment fields for this file
// MUST be encoded using UTF-8. (see APPENDIX D)
if (Flags.HasFlag(HeaderFlags.Efs))
{
Name = ArchiveEncoding.DecodeUTF8(name);
@@ -63,6 +63,8 @@ namespace SharpCompress.Common.Zip.Headers
var zip64ExtraData = Extra.OfType<Zip64ExtendedInformationExtraField>().FirstOrDefault();
if (zip64ExtraData != null)
{
zip64ExtraData.Process(UncompressedSize, CompressedSize, RelativeOffsetOfEntryHeader, DiskNumberStart);
if (CompressedSize == uint.MaxValue)
{
CompressedSize = zip64ExtraData.CompressedSize;
@@ -92,6 +94,6 @@ namespace SharpCompress.Common.Zip.Headers
public ushort DiskNumberStart { get; set; }
public string Comment { get; private set; }
public string? Comment { get; private set; }
}
}

View File

@@ -24,7 +24,7 @@ namespace SharpCompress.Common.Zip.Headers
ushort extraLength = reader.ReadUInt16();
byte[] name = reader.ReadBytes(nameLength);
byte[] extra = reader.ReadBytes(extraLength);
// According to .ZIP File Format Specification
//
// For example: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
@@ -32,7 +32,7 @@ namespace SharpCompress.Common.Zip.Headers
// Bit 11: Language encoding flag (EFS). If this bit is set,
// the filename and comment fields for this file
// MUST be encoded using UTF-8. (see APPENDIX D)
if (Flags.HasFlag(HeaderFlags.Efs))
{
Name = ArchiveEncoding.DecodeUTF8(name);
@@ -41,7 +41,7 @@ namespace SharpCompress.Common.Zip.Headers
{
Name = ArchiveEncoding.Decode(name);
}
LoadExtra(extra);
var unicodePathExtra = Extra.FirstOrDefault(u => u.Type == ExtraDataType.UnicodePathExtraField);
@@ -53,6 +53,8 @@ namespace SharpCompress.Common.Zip.Headers
var zip64ExtraData = Extra.OfType<Zip64ExtendedInformationExtraField>().FirstOrDefault();
if (zip64ExtraData != null)
{
zip64ExtraData.Process(UncompressedSize, CompressedSize, 0, 0);
if (CompressedSize == uint.MaxValue)
{
CompressedSize = zip64ExtraData.CompressedSize;

View File

@@ -18,13 +18,25 @@ namespace SharpCompress.Common.Zip.Headers
internal class ExtraData
{
internal ExtraDataType Type { get; set; }
internal ushort Length { get; set; }
internal byte[] DataBytes { get; set; }
public ExtraData(ExtraDataType type, ushort length, byte[] dataBytes)
{
Type = type;
Length = length;
DataBytes = dataBytes;
}
internal ExtraDataType Type { get; }
internal ushort Length { get; }
internal byte[] DataBytes { get; }
}
internal class ExtraUnicodePathExtraField : ExtraData
internal sealed class ExtraUnicodePathExtraField : ExtraData
{
public ExtraUnicodePathExtraField(ExtraDataType type, ushort length, byte[] dataBytes)
: base(type, length, dataBytes)
{
}
internal byte Version => DataBytes[0];
internal byte[] NameCrc32
@@ -49,70 +61,79 @@ namespace SharpCompress.Common.Zip.Headers
}
}
internal class Zip64ExtendedInformationExtraField : ExtraData
internal sealed class Zip64ExtendedInformationExtraField : ExtraData
{
public Zip64ExtendedInformationExtraField(ExtraDataType type, ushort length, byte[] dataBytes)
: base(type, length, dataBytes)
{
Type = type;
Length = length;
DataBytes = dataBytes;
Process();
}
//From the spec values are only in the extradata if the standard
//value is set to 0xFFFF, but if one of the sizes are present, both are.
//Hence if length == 4 volume only
// if length == 8 offset only
// if length == 12 offset + volume
// if length == 16 sizes only
// if length == 20 sizes + volume
// if length == 24 sizes + offset
// if length == 28 everything.
//It is unclear how many of these are used in the wild.
private void Process()
// From the spec, values are only in the extradata if the standard
// value is set to 0xFFFFFFFF (or 0xFFFF for the Disk Start Number).
// Values, if present, must appear in the following order:
// - Original Size
// - Compressed Size
// - Relative Header Offset
// - Disk Start Number
public void Process(long uncompressedFileSize, long compressedFileSize, long relativeHeaderOffset, ushort diskNumber)
{
switch (DataBytes.Length)
var bytesRequired = ((uncompressedFileSize == uint.MaxValue) ? 8 : 0)
+ ((compressedFileSize == uint.MaxValue) ? 8 : 0)
+ ((relativeHeaderOffset == uint.MaxValue) ? 8 : 0)
+ ((diskNumber == ushort.MaxValue) ? 4 : 0);
var currentIndex = 0;
if (bytesRequired > DataBytes.Length)
{
case 4:
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes);
return;
case 8:
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
return;
case 12:
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes.AsSpan(8));
return;
case 16:
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
return;
case 20:
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes.AsSpan(16));
return;
case 24:
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(16));
return;
case 28:
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(16));
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes.AsSpan(24));
return;
default:
throw new ArchiveException("Unexpected size of of Zip64 extended information extra field");
throw new ArchiveException("Zip64 extended information extra field is not large enough for the required information");
}
if (uncompressedFileSize == uint.MaxValue)
{
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(currentIndex));
currentIndex += 8;
}
if (compressedFileSize == uint.MaxValue)
{
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(currentIndex));
currentIndex += 8;
}
if (relativeHeaderOffset == uint.MaxValue)
{
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(currentIndex));
currentIndex += 8;
}
if (diskNumber == ushort.MaxValue)
{
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes.AsSpan(currentIndex));
}
}
/// <summary>
/// Uncompressed file size. Only valid after <see cref="Process(long, long, long, ushort)"/> has been called and if the
/// original entry header had a corresponding 0xFFFFFFFF value.
/// </summary>
public long UncompressedSize { get; private set; }
/// <summary>
/// Compressed file size. Only valid after <see cref="Process(long, long, long, ushort)"/> has been called and if the
/// original entry header had a corresponding 0xFFFFFFFF value.
/// </summary>
public long CompressedSize { get; private set; }
/// <summary>
/// Relative offset of the entry header. Only valid after <see cref="Process(long, long, long, ushort)"/> has been called and if the
/// original entry header had a corresponding 0xFFFFFFFF value.
/// </summary>
public long RelativeOffsetOfEntryHeader { get; private set; }
/// <summary>
/// Volume number. Only valid after <see cref="Process(long, long, long, ushort)"/> has been called and if the
/// original entry header had a corresponding 0xFFFF value.
/// </summary>
public uint VolumeNumber { get; private set; }
}
@@ -120,30 +141,12 @@ namespace SharpCompress.Common.Zip.Headers
{
internal static ExtraData Create(ExtraDataType type, ushort length, byte[] extraData)
{
switch (type)
return type switch
{
case ExtraDataType.UnicodePathExtraField:
return new ExtraUnicodePathExtraField
{
Type = type,
Length = length,
DataBytes = extraData
};
case ExtraDataType.Zip64ExtendedInformationExtraField:
return new Zip64ExtendedInformationExtraField
(
type,
length,
extraData
);
default:
return new ExtraData
{
Type = type,
Length = length,
DataBytes = extraData
};
}
ExtraDataType.UnicodePathExtraField => new ExtraUnicodePathExtraField(type, length, extraData),
ExtraDataType.Zip64ExtendedInformationExtraField => new Zip64ExtendedInformationExtraField(type, length, extraData),
_ => new ExtraData(type, length, extraData)
};
}
}
}

View File

@@ -43,6 +43,6 @@ namespace SharpCompress.Common.Zip.Headers
public long DirectoryStartOffsetRelativeToDisk { get; private set; }
public byte[] DataSector { get; private set; }
public byte[]? DataSector { get; private set; }
}
}

View File

@@ -1,4 +1,6 @@
using System;
#nullable disable
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
@@ -18,7 +20,7 @@ namespace SharpCompress.Common.Zip.Headers
{
get
{
if (Name.EndsWith("/"))
if (Name.EndsWith('/'))
{
return true;
}
@@ -26,7 +28,7 @@ namespace SharpCompress.Common.Zip.Headers
//.NET Framework 4.5 : System.IO.Compression::CreateFromDirectory() probably writes backslashes to headers
return CompressedSize == 0
&& UncompressedSize == 0
&& Name.EndsWith("\\");
&& Name.EndsWith('\\');
}
}
@@ -52,7 +54,7 @@ namespace SharpCompress.Common.Zip.Headers
internal PkwareTraditionalEncryptionData ComposeEncryptionData(Stream archiveStream)
{
if (archiveStream == null)
if (archiveStream is null)
{
throw new ArgumentNullException(nameof(archiveStream));
}
@@ -60,7 +62,7 @@ namespace SharpCompress.Common.Zip.Headers
var buffer = new byte[12];
archiveStream.ReadFully(buffer);
PkwareTraditionalEncryptionData encryptionData = PkwareTraditionalEncryptionData.ForRead(Password, this, buffer);
PkwareTraditionalEncryptionData encryptionData = PkwareTraditionalEncryptionData.ForRead(Password!, this, buffer);
return encryptionData;
}
@@ -103,6 +105,6 @@ namespace SharpCompress.Common.Zip.Headers
internal ZipFilePart Part { get; set; }
internal bool IsZip64 => CompressedSize == uint.MaxValue;
internal bool IsZip64 => CompressedSize >= uint.MaxValue;
}
}

View File

@@ -40,7 +40,7 @@ namespace SharpCompress.Common.Zip
throw new NotSupportedException("This stream does not encrypt via Read()");
}
if (buffer == null)
if (buffer is null)
{
throw new ArgumentNullException(nameof(buffer));
}
@@ -64,7 +64,7 @@ namespace SharpCompress.Common.Zip
return;
}
byte[] plaintext = null;
byte[] plaintext;
if (offset != 0)
{
plaintext = new byte[count];

Some files were not shown because too many files have changed in this diff Show More