Compare commits

...

120 Commits
0.21.1 ... 0.25

Author SHA1 Message Date
Adam Hathcock
762497b1c1 Tag for 0.25.0 and update packages 2020-04-03 08:25:43 +01:00
Adam Hathcock
be9edc7512 Merge pull request #500 from Erior/Issue_86
ZipReader/StreamingZipReaderFactory fails for archive entries which are uncompressed files in ZIP format #86
2020-01-17 09:38:19 +00:00
Lars Vahlenberg
9bf9d34d94 Issue86 Proposal 2020-01-16 22:08:48 +01:00
Adam Hathcock
df8405006c Fix workflow name 2020-01-03 09:24:08 +00:00
Adam Hathcock
d135fdce58 Give github actions build a name and use badge 2020-01-03 09:22:51 +00:00
Adam Hathcock
ba570b93bb Merge pull request #496 from Bond-009/allocations
Reduce the amount of allocations
2020-01-03 09:18:17 +00:00
Adam Hathcock
6dfe0c7a96 Merge branch 'master' into allocations 2020-01-03 09:16:46 +00:00
Adam Hathcock
73d4430a65 Merge pull request #498 from adamhathcock/build-netcore3
Build netcore3
2020-01-03 09:15:14 +00:00
Adam Hathcock
ce6fd9b976 JUst one target 2020-01-03 09:12:10 +00:00
Adam Hathcock
ae7e8c03f2 Put wrong SDK 2020-01-03 09:07:34 +00:00
Adam Hathcock
22e2526f4c Update cake and dependencies 2020-01-03 09:06:13 +00:00
Adam Hathcock
50283d9411 Add new build targets for netcore3 2020-01-03 09:02:04 +00:00
Bond-009
d2c2b58f3b Fix language version and add netstandard2.1 2020-01-02 17:43:58 +01:00
Bond_009
50d4b39ca0 Fix test 2019-12-30 22:17:45 +01:00
Bond_009
1ed675e960 Minor improvement 2019-12-30 19:19:05 +01:00
Bond_009
80b0671844 Reduce the amount of allocations
* Directly fill an array instead of filling a List and copying that to
an array
* Use own buffer when writing bytes to a stream
* Remove DataConverter class, replaced by BinaryPrimitives
2019-12-30 18:58:25 +01:00
Bond-009
6f387336c0 Use functions from System.Memory instead of selfmade ones (#495)
* Use functions from System.Memory instead of selfmade ones

* Update SharpCompress.Test.csproj
2019-12-30 15:19:46 +00:00
Adam Hathcock
9540b01bcc NET Standard 1.3 and 2.0 only (#482)
* Remove NET35, NET45 and NET Standard 1.0

* Update README and memset

* Remove NETCORE build flag

* NET 46 too?

* Update packages and usage
2019-10-10 09:24:41 +01:00
Adam Hathcock
446d6914c1 Merge pull request #483 from Bond-009/nameof
Use nameof for param names
2019-09-17 14:21:04 +01:00
Bond_009
637223aa53 Use nameof for param names 2019-09-17 13:28:44 +02:00
Adam Hathcock
17d5565120 Merge pull request #478 from Bond-009/buffers
Use System.Buffers Nuget package
2019-09-17 10:05:29 +01:00
Bond_009
4b54187b4c Fix build 2019-09-11 21:33:57 +02:00
Bond_009
cfb1421367 Use System.Buffers Nuget package 2019-09-11 20:06:50 +02:00
Adam Hathcock
5072a0f6f5 Merge pull request #471 from adamhathcock/release-024
Bump version and dependencies
2019-08-20 20:36:38 +01:00
Adam Hathcock
357dff1403 Bump version and dependencies 2019-08-20 14:29:47 -05:00
Adam Hathcock
a2bd66ded8 Merge pull request #460 from itn3000/tar-fix-garbled2
fix filename garbling in tar(#414)
2019-06-27 12:16:53 +01:00
itn3000
6bfa3c25a4 add more comments 2019-06-27 20:01:40 +09:00
itn3000
1ea9ab72c1 add comment for subtracting 2019-06-27 19:59:16 +09:00
itn3000
07c42b8725 replace magic number 2019-06-27 10:59:21 +09:00
itn3000
70392c32e2 use Buffer.BlockCopy for performance 2019-06-27 09:47:26 +09:00
itn3000
9b4b2a9f7c considering encoding in processing filename(#414)
modify test tar archive because it was not expected one.
(expected "тест.txt" in encoding 866, but actual is omitted upper byte)
2019-06-26 17:34:12 +09:00
Adam Hathcock
d3dd708b58 Merge pull request #457 from DannyBoyk/issue_456_zip_bounded_substreams_data_descriptors
Return a bounded substream when data descriptors are used in seekable zips
2019-06-04 13:42:24 +01:00
Daniel Nash
af264cdc58 Return a bounded substream when data descriptors are used in seekable zips 2019-06-04 08:31:42 -04:00
Adam Hathcock
cfd6df976f Merge pull request #455 from DannyBoyk/issue_454_zip_bad_extra_field
Handle a bad extra field in a local file header in zip files
2019-06-04 09:24:55 +01:00
Daniel Nash
b2bd20b47e Handle a bad extra field in a local file header in zip files 2019-06-03 13:02:28 -04:00
Adam Hathcock
ffea093e95 Merge pull request #453 from Lssikkes/master
Fix for clients failing on just having a 64 bit offset in ZIP64
2019-05-24 19:33:59 +01:00
Leroy Sikkes
78eb8fcf92 Fix for clients that don't support ZIP64 standard correctly in case headers are only pointed to in ZIP64 directory structure 2019-05-24 18:27:49 +02:00
Adam Hathcock
a052956881 Merge pull request #452 from Lssikkes/master
Various fixes for ZIP64 writer (zero byte entries, 32 bit where supported)
2019-05-24 16:17:48 +01:00
Lssikkes
9319ea6992 Updated ZIP64 writer to write 32 bit values to directory entries for better compatibility.
Support for zero byte files without corruption errors from WinRAR/7-zip
2019-05-24 16:14:30 +02:00
Adam Hathcock
4e5b70dbfa Merge pull request #444 from eugeny-trunin/mem-opt
Memory and speed optimization
2019-03-20 15:13:00 +00:00
evgeny
c68eaa8397 Memory and speed optimization 2019-03-20 17:46:57 +03:00
Adam Hathcock
bbb7c85ba7 Merge pull request #442 from turbolocust/master
Fix: ArchiveEncoding was ignored in TarWriterOptions
2019-03-19 08:31:31 +00:00
Matthias Fussenegger
8174359228 Fix: ArchiveEncoding was ignored in TarWriterOptions 2019-03-18 18:25:00 +01:00
Adam Hathcock
acf66c5195 Merge pull request #440 from adamhathcock/release-023
Bump release for 0.23 and update dependencies
2019-02-27 12:57:30 +00:00
Adam Hathcock
880c9fa97a Bump release and update dependencies 2019-02-27 12:55:16 +00:00
Adam Hathcock
e5c111f2be Merge pull request #436 from Numpsy/populate_zip_volume_comment
Changes to populate ZipArchive.ZipVolumne.Comment
2019-01-14 08:14:03 +00:00
Richard Webb
4e9cd064dd Unit test to show reading of a Zip volume/archive comment 2019-01-13 21:05:55 +00:00
Richard Webb
12a6d3977e Return the DirectoryEndHeader from SeekableZipHeaderFactory.ReadSeekable so that it can be used by ZipArchive 2018-12-14 22:44:44 +00:00
Adam Hathcock
a95bbaf820 Merge pull request #423 from markfinal/tarsymlink
Tar symlink support
2018-11-05 08:31:48 +00:00
Mark Final
70bafa653b Tar symlink extraction
Extended SharpCompress.Common.ExtractionOptions with a delegate to write symbolic links. If not is null, and a symbolic link is encountered, an exception is thrown.
Removed Mono.Posix.NETStandard from the library, but added to the .NET Core 2.1 test application.
Extended the test to implement the delegate.
2018-11-03 09:45:12 +00:00
Mark Final
3f4338489c Removed unnecessary code 2018-11-01 21:57:49 +00:00
Mark Final
d91e58f2cc Refs #248. Refs #132. Added a test case of a tar containing symlinks
This is a source archive of the MoltenVK project from github, which is my use-case for SharpCompress.
I added a test case in the project, which should extract the tar, and validate any symlink targets with what the tar thinks it ought to be.
2018-11-01 21:51:14 +00:00
Mark Final
192b9c1e8b Ref #248. Ref #132. Tar reader support for symlinks for .NET standard 2 and Posix platforms
Extracts linkname from the tar header, and exposes this on IEntry as the LinkTarget (string) property. If an entry is not a symlink, then that property is null.

Uses Mono.Posix.NETStandard nuget to create a symlink. However, this is only applicable to .NET standard 2.0+. So far, unable to find a nuget that works for older versions.

Also, not sure what to do on Windows.
2018-11-01 21:48:51 +00:00
Adam Hathcock
0941239454 Merge pull request #417 from KyotoFox/fix-entrystream-flush
Allow Flush on EntryStream
2018-10-04 12:48:08 +01:00
Knut Ørland
53ad00cdc4 Use soft tabs 2018-10-04 13:13:14 +02:00
Knut Ørland
6dd5da48f7 Added test that calls EntryStream.Flush() 2018-10-04 13:08:53 +02:00
Knut Ørland
efae8328a9 Don't throw an exception when flushing an EntryStream
From Microsoft docs: “In a class derived from Stream that doesn't
support writing, Flush is typically implemented as an empty method to
ensure full compatibility with other Stream types since it's valid to
flush a read-only stream.”
2018-10-04 13:05:36 +02:00
Adam Hathcock
f1facc51de Merge pull request #409 from G-Research/RecogniseEmptyTarArchives
Recognise empty tar archives.
2018-09-25 13:20:59 +01:00
Adam Hathcock
a471ca6a76 Use Cake tool on circle. Update test packages 2018-08-31 09:27:04 +01:00
Elliot Prior
83f6690576 Recognise empty tar archives.
Currently, when ArchiveFactory.Open is called on an empty tar archive, it throws due to being unable to determine the stream type. This fix allows it to recognise empty tar files by checking for whether the filename is empty, the size is empty and the entry type is defined. Add a test to try opening an empty archive.
2018-08-16 10:25:47 +01:00
Adam Hathcock
1850ea67f6 Merge pull request #408 from majoreffort/master
Test and fix for #407
2018-07-24 09:42:03 +01:00
majoreffort
2fd6178aa9 Fixed length in TarHeader#WriteStringBytes 2018-07-23 19:58:37 +02:00
majoreffort
ec044e6f42 Added Tar test for 100 char filename issue. 2018-07-23 19:48:01 +02:00
Adam Hathcock
bd96279649 Merge pull request #404 from MattKotsenas/bugfix/idisposable
Enable parallel test execution
2018-07-12 19:53:50 +01:00
Matt Kotsenas
f7ad595945 Enable test parallelization and remove garbage collection workaround
Now that the sources of file locking are fixed, enable test parallelization
and the forced garbage collection workaround.

Lastly, remove the `IsLocked` check because it doesn't work in a
parallel test running world - the file may be locked due to another test
running.
2018-07-12 10:33:19 -07:00
Matt Kotsenas
93c0b91de9 Refactor TestSharpCompressWithEmptyStream
Refactor `TestSharpCompressWithEmptyStream` so it asserts that the files
and bytes are the same.
2018-07-12 10:32:08 -07:00
Matt Kotsenas
138038b08f Move RarReaderTests over to user ReaderFactory
- Refactor `RarReaderTests` to use `ReaderFactory`
- Update `ReaderTests.Read` to support Rar tests
2018-07-12 10:32:08 -07:00
Matt Kotsenas
e9a6fed607 FIXUP decouple UseReader from VerifyFiles 2018-07-11 16:53:34 -07:00
Matt Kotsenas
87a1440382 Decouple UseReader from VerifyFiles 2018-07-11 16:49:49 -07:00
Matt Kotsenas
3c2f4ebe9b Combine ForwardOnlyStream and NonSeekableStream
Delete `NonSeekableStream` used in Zip64 tests in favor
of `ForwardOnlyStream` used in Mocks.

Additionally, delete the `ForwardOnlyStream.ReadByte` implementation
as the implementation on the base Stream is sufficient.
2018-07-11 16:42:03 -07:00
Matt Kotsenas
933ffe7828 Remove unused code from ArchiveTests 2018-07-11 16:33:46 -07:00
Matt Kotsenas
7d20ba5243 Simplify RarHeaderTests 2018-07-11 16:21:19 -07:00
Matt Kotsenas
44dc36af48 Update ReaderTests base class to validate Dispose
Update the `ReaderTests` base class to validate that `Dispose` is
called appropriately in both the close and the leave open cases.
2018-07-11 16:21:19 -07:00
Matt Kotsenas
98558c5ba9 Refactor TestStream constructor
Refactor the `TestStream` constructor so by default it defers to
the underlying Stream
2018-07-11 16:21:19 -07:00
Matt Kotsenas
6c25322465 Follow best-practices for Dispose in Volume and ForwardOnlyStream
Update `Volume` and `ForwardOnlyStream` to follow the project's
general pattern and best-practices for `Dispose`
2018-07-11 16:21:19 -07:00
Matt Kotsenas
6d1d62fd32 Delete AppendingStream
`AppendingStream` is unused, so rather than refactor it, just delete it.
2018-07-11 16:21:19 -07:00
Matt Kotsenas
ee4ae661d7 Refactor ListeningStream
Refactor `ListeningStream`:

- Override of `WriteByte` was redundant and removed
- Make `Dispose` delegate to base class
2018-07-11 16:21:19 -07:00
Matt Kotsenas
0473ec1626 Open test archives as read
Update `RarHeaderFactoryTests` and `GZipArchiveTests` to open the test
readers as `FileAccess.Read` and `FileShare.Read` to prevent issues with
multiple test from trying to open exclusive access to files.
2018-07-11 16:21:19 -07:00
Matt Kotsenas
c6cf0d40ee Simplify ReaderTests
The `IEnumerable<string>` version of `ReaderTests` is unused, so delete
it to simplify the code.
2018-07-11 16:21:19 -07:00
Matt Kotsenas
4cd80e96f3 Simplify GZip bad compression test 2018-07-11 16:21:19 -07:00
Matt Kotsenas
16524717ba Fix Stream leak in ArchiveFactory
`ArchiveFactory.Open` has two overloads that take `string` or
`FileInfo` (string delegates to FileInfo). Both of these implementations
open a `Stream` with the default `ReaderOptions`, which leaves the
stream open, resulting in a leak.

The fix is to set `LeaveOpen` to `false` if no options were provided.
Note that if a user was provding options and `LeaveOpen` was set to
`true`, the code did and will still leak.
2018-07-11 16:21:19 -07:00
Matt Kotsenas
cab1ce3d0c Update sub-streams to uniformly inherit from NonDisposingStream
Update the sub-stream classes to all inherit from `NonDisposingStream`.
This allows them to correctly implement the `Dispose` pattern, and delegate
the actual disposal to `NonDisposingStream`.

In doing so, we need to remove some redundant overrides from
`NonDisposingStream`, otherwise `BufferedSubStream` would use the
overrides inherited from `NonDisposingStream` instead of the ones
inherited from `Stream` (i.e. delegate `ReadByte` to `Read`).
2018-07-11 16:17:49 -07:00
Matt Kotsenas
6c2e5e1164 Cleanup NonDisposingStream for reuse
- Remove the duplicate `GC.SuppressFinalization` call
(called in `System.IO.Stream)
- Improve the `ThrowOnDispose` error message
2018-07-11 12:19:34 -07:00
Matt Kotsenas
c2bf540057 Close verification streams in TestBase.CompareArchivesByPath 2018-07-11 12:12:30 -07:00
Matt Kotsenas
a35c66e166 Move RewindableStreamTest.cs to the Streams/ folder 2018-07-10 12:07:33 -07:00
Matt Kotsenas
084c5e2686 Rename StreamTests.cs --> LzmaStreamTests.cs 2018-07-10 12:07:32 -07:00
Matt Kotsenas
6ae715b153 Move the TestStream and ForwardOnlyStream to Mocks folder
Move the `TestStream` and `ForwardOnlyStream` to Mocks/ to separate them
from the test classes.
2018-07-10 12:07:32 -07:00
Adam Hathcock
9c8692806a Merge pull request #403 from MattKotsenas/bugfix/parallel-tests
Fix and re-enable tests
2018-07-10 20:01:20 +01:00
Matt Kotsenas
2d87351d2a Add tests back to AppVeyor 2018-07-10 11:52:00 -07:00
Matt Kotsenas
3114afde0e Add workaround for in-use files
The `TestBase` is not always able to delete the scratch folder in
`Dispose()` because sometimes the files are still in use.

This problem appears to be leaked file handles (likely due to incorrect
handling of `IDisposable`). To avoid the problem for now, force a
garbage collection prior to deleting the scratch folder.
2018-07-10 11:49:38 -07:00
Matt Kotsenas
7b338511cc Create unique scratch path per test
Tests fail in Visual Studio because they try to reuse the same scratch
working space, and each test is responsible for resetting the space. To
simplify the test code:

1. Make `TestBase` `IDisposable` and have it create the scratch space
2. Remove `ResetScratch()` as it is now handled by the base class
3. Add a unique ID to each scrach space folder to prevent collisions
2018-07-10 11:46:44 -07:00
Adam Hathcock
09c27681e1 Merge pull request #402 from a764578566/master
file search support linq Pattern
2018-07-10 13:21:09 +01:00
zhoujr
4ebc1f82b7 file search support linq Pattern 2018-07-10 19:58:59 +08:00
Adam Hathcock
4640ca497a Merge pull request #400 from MattKotsenas/feature/avoid-exception-in-readerfactory
Avoid throwing NotSupportedException in ReaderFactory hot path
2018-07-10 08:47:13 +01:00
Matt Kotsenas
bebccaae28 Avoid throwing NotSupportedException in ReaderFactory hot path
`ReaderFactory.Open()` calls `ZipArchive.IsZipFile()` to determine if
the `Stream` is a zip archive, which calls into
`ZipHeaderFactory.ReadHeader()`, which throws a `NotSupportedException`
when the `Stream` is not a zip archive.

To be clear, this exception is caught and `IsZipFile()` returns `false`,
but when called in a hot-path, these exceptions can become expensive.

To address this issue, `ReadHeader` now returns `null` in the default
cause instead of throwing. All callsites were already checking for and
handling `null`, so no behavior changes.
2018-07-09 18:44:46 -07:00
Adam Hathcock
7ee53373c6 Remove tests as AppVeyor can’t handle them at the moment 2018-07-09 09:05:10 +01:00
Adam Hathcock
d577fe1ac6 Merge pull request #385 from twirpx/master
Fixed EFS flag handling
2018-07-09 08:48:34 +01:00
Adam Hathcock
9f976aaf78 Merge branch 'master' into master 2018-07-09 08:48:26 +01:00
Adam Hathcock
8a7d7e366f Merge pull request #396 from andreas-eriksson/Rar5IsEncrypted
Correctly set IsEncrypted for entries in Rar5.
2018-07-09 08:48:12 +01:00
Adam Hathcock
540ab1c6fa Merge branch 'master' into master 2018-07-09 08:47:32 +01:00
Adam Hathcock
6792afbdb1 Merge branch 'master' into Rar5IsEncrypted 2018-07-09 08:44:32 +01:00
Adam Hathcock
e5a7185671 Mark for 0.22 2018-07-09 08:42:45 +01:00
Adam Hathcock
cdaf453b2d Update dependencies and tests to .NET Core 2.1 2018-07-09 08:39:37 +01:00
Andreas Eriksson
f9cc80e1de Correctly set IsEncrypted for entries in Rar5. 2018-06-29 15:51:40 +02:00
Adam Hathcock
7beff9e83c Merge pull request #395 from adamhathcock/zip-slip-readers
Zip slip for Readers
2018-06-28 11:56:44 +01:00
Adam Hathcock
8f49f1b6f8 Merge remote-tracking branch 'origin/master' into zip-slip-readers 2018-06-28 11:52:43 +01:00
Adam Hathcock
7e336a0247 Slip in new SDK 2018-06-28 11:51:17 +01:00
Adam Hathcock
e37e8bdadc Move path handling for extraction to be common
Reader and Archive now share more extraction logic
2018-06-28 11:46:51 +01:00
Adam Hathcock
40bd61b16b Merge pull request #389 from frankyjuang/patch-1
Fix comment
2018-06-08 08:59:52 +01:00
Juang, Yi-Lin
87fbb45099 Fix comment 2018-06-08 11:27:43 +08:00
twirpx
e822f9a95c Tests fixed to use explicit use of 866 encoding because of usage file named in Russian in several tests 2018-05-30 22:17:27 +05:00
twirpx
8a5a9159e1 Fixed DirectoryEntryHeader Name/Comment decoding in case of EFS flags set 2018-05-30 21:47:31 +05:00
twirpx
73b3c6b419 Merge branch 'master' of https://github.com/adamhathcock/sharpcompress 2018-05-30 20:28:15 +05:00
Adam Hathcock
f9bd7ebdb0 Merge pull request #384 from MrJul/perf-readbyte
Implemented ReadByte/WriteByte on streams to improve performance
2018-05-28 09:21:28 +01:00
Julien Lebosquain
540618c062 Implemented ReadByte/WriteByte on streams to improve performance 2018-05-27 16:31:44 +02:00
Adam Hathcock
9e96dec8c9 Merge pull request #383 from itn3000/add-filename-encoding-example
add example for custom file encoding
2018-05-23 09:14:46 +01:00
itn3000
7b7af612ba add example for custom file encoding 2018-05-23 09:46:36 +09:00
Adam Hathcock
3a747ba87e Update USAGE with new stream handling 2018-05-16 08:51:33 +01:00
twirpx
149f5e4fb5 Minor fixes 2017-08-22 11:46:32 +05:00
twirpx
1793fc949d Fixed bug: Passing default ReaderOptions when creating ZipReader for solid extraction 2017-08-16 08:57:36 +05:00
151 changed files with 1594 additions and 3305 deletions

View File

@@ -1,15 +0,0 @@
version: 2
jobs:
build:
docker:
- image: microsoft/dotnet:2.0.7-sdk-2.1.200
steps:
- checkout
- run:
name: Install unzip
command: |
apt-get update
apt-get install -y unzip
- run:
name: Build
command: ./build.sh

17
.github/workflows/dotnetcore.yml vendored Normal file
View File

@@ -0,0 +1,17 @@
name: SharpCompress
on: [push]
jobs:
build:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [windows-latest, ubuntu-latest]
steps:
- uses: actions/checkout@v1
- uses: actions/setup-dotnet@v1
with:
dotnet-version: 3.1.100
- name: Run the Cake script
uses: ecampidoglio/cake-action@master

View File

@@ -1,14 +1,14 @@
# SharpCompress
SharpCompress is a compression library in pure C# for .NET 3.5, 4.5, .NET Standard 1.0, 1.3 that can unrar, un7zip, unzip, untar unbzip2 and ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip are implemented.
SharpCompress is a compression library in pure C# for .NET Standard 1.3 and 2.0 that can unrar, un7zip, unzip, untar unbzip2 and ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip are implemented.
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).
AppVeyor Build -
[![Build status](https://ci.appveyor.com/api/projects/status/voxg971oemmvxh1e/branch/master?svg=true)](https://ci.appveyor.com/project/adamhathcock/sharpcompress/branch/master)
Circle CI Build -
[![CircleCI](https://circleci.com/gh/adamhathcock/sharpcompress.svg?style=svg)](https://circleci.com/gh/adamhathcock/sharpcompress)
GitHub Actions Build -
[![GitHubActions](https://github.com/adamhathcock/sharpcompress/workflows/SharpCompress/badge.svg)](https://circleci.com/gh/adamhathcock/sharpcompress)
## Need Help?

View File

@@ -122,6 +122,7 @@
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpKeepExistingMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpPlaceEmbeddedOnSameLineMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpRenamePlacementToArrangementMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpUseContinuousIndentInsideBracesMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EAddAccessorOwnerDeclarationBracesMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002ECSharpPlaceAttributeOnSameLineMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateBlankLinesAroundFieldToBlankLinesAroundProperty/@EntryIndexedValue">True</s:Boolean>

View File

@@ -1,8 +1,8 @@
# SharpCompress Usage
## Stream Rules
## Stream Rules (changed with 0.21)
When dealing with Streams, the rule should be that you don't close a stream you didn't create. This, in effect, should mean you should always put a Stream in a using block to dispose it.
When dealing with Streams, the rule should be that you don't close a stream you didn't create. This, in effect, should mean you should always put a Stream in a using block to dispose it.
However, the .NET Framework often has classes that will dispose streams by default to make things "easy" like the following:
@@ -23,13 +23,12 @@ using (var reader = new StreamReader(fileStream))
}
```
To deal with the "correct" rules as well as the expectations of users, I've decided on this:
* When writing, leave streams open.
* When reading, close streams
To deal with the "correct" rules as well as the expectations of users, I've decided to always close wrapped streams as of 0.21.
To be explicit though, consider always using the overloads that use `ReaderOptions` or `WriterOptions` and explicitly set `LeaveStreamOpen` the way you want.
If using Compression Stream classes directly and you don't want the wrapped stream to be closed. Use the `NonDisposingStream` as a wrapped to prevent the stream being disposed. The change in 0.21 simplified a lot even though the usage is a bit more convoluted.
## Samples
Also, look over the tests for more thorough [examples](https://github.com/adamhathcock/sharpcompress/tree/master/tests/SharpCompress.Test)
@@ -128,3 +127,20 @@ using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, new WriterOption
writer.WriteAll("D:\\temp", "*", SearchOption.AllDirectories);
}
```
### Extract zip which has non-utf8 encoded filename(cp932)
```C#
var opts = new SharpCompress.Readers.ReaderOptions();
var encoding = Encoding.GetEncoding(932);
opts.ArchiveEncoding = new SharpCompress.Common.ArchiveEncoding();
opts.ArchiveEncoding.CustomDecoder = (data, x, y) =>
{
return encoding.GetString(data);
};
var tr = SharpCompress.Archives.Zip.ZipArchive.Open("test.zip", opts);
foreach(var entry in tr.Entries)
{
Console.WriteLine($"{entry.Key}");
}
```

View File

@@ -1,5 +1,5 @@
version: '{build}'
image: Visual Studio 2017
image: Visual Studio 2019
pull_requests:
do_not_increment_build_number: true

View File

@@ -17,24 +17,24 @@ Task("Build")
{
c.SetConfiguration("Release")
.SetVerbosity(Verbosity.Minimal)
.UseToolVersion(MSBuildToolVersion.VS2017);
.UseToolVersion(MSBuildToolVersion.VS2019);
});
}
else
{
var settings = new DotNetCoreBuildSettings
{
Framework = "netstandard1.0",
Framework = "netstandard1.3",
Configuration = "Release",
NoRestore = true
};
DotNetCoreBuild("./src/SharpCompress/SharpCompress.csproj", settings);
settings.Framework = "netstandard1.3";
settings.Framework = "netstandard2.0";
DotNetCoreBuild("./src/SharpCompress/SharpCompress.csproj", settings);
settings.Framework = "netstandard2.0";
settings.Framework = "netstandard2.1";
DotNetCoreBuild("./src/SharpCompress/SharpCompress.csproj", settings);
}
});
@@ -49,7 +49,7 @@ Task("Test")
var settings = new DotNetCoreTestSettings
{
Configuration = "Release",
Framework = "netcoreapp2.0"
Framework = "netcoreapp3.1"
};
DotNetCoreTest(file.ToString(), settings);
}
@@ -64,7 +64,7 @@ Task("Pack")
MSBuild("src/SharpCompress/SharpCompress.csproj", c => c
.SetConfiguration("Release")
.SetVerbosity(Verbosity.Minimal)
.UseToolVersion(MSBuildToolVersion.VS2017)
.UseToolVersion(MSBuildToolVersion.VS2019)
.WithProperty("NoBuild", "true")
.WithTarget("Pack"));
}

View File

@@ -24,7 +24,6 @@ namespace SharpCompress.Archives
private bool disposed;
#if !NO_FILE
internal AbstractArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerOptions)
{
Type = type;
@@ -40,7 +39,6 @@ namespace SharpCompress.Archives
protected abstract IEnumerable<TVolume> LoadVolumes(FileInfo file);
#endif
internal AbstractArchive(ArchiveType type, IEnumerable<Stream> streams, ReaderOptions readerOptions)
{
@@ -142,12 +140,12 @@ namespace SharpCompress.Archives
/// <summary>
/// Use this method to extract all entries in an archive in order.
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
/// extracted sequentially for the best performance.
///
///
/// This method will load all entry information from the archive.
///
/// WARNING: this will reuse the underlying stream for the archive. Errors may
///
/// WARNING: this will reuse the underlying stream for the archive. Errors may
/// occur if this is used at the same time as other extraction methods on this instance.
/// </summary>
/// <returns></returns>
@@ -176,4 +174,4 @@ namespace SharpCompress.Archives
}
}
}
}
}

View File

@@ -28,12 +28,10 @@ namespace SharpCompress.Archives
{
}
#if !NO_FILE
internal AbstractWritableArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerFactoryOptions)
: base(type, fileInfo, readerFactoryOptions)
{
}
#endif
public override ICollection<TEntry> Entries
{
@@ -144,4 +142,4 @@ namespace SharpCompress.Archives
modifiedEntries.Cast<Entry>().ForEach(x => x.Close());
}
}
}
}

View File

@@ -6,7 +6,6 @@ using SharpCompress.Archives.SevenZip;
using SharpCompress.Archives.Tar;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Readers;
namespace SharpCompress.Archives
@@ -21,7 +20,7 @@ namespace SharpCompress.Archives
/// <returns></returns>
public static IArchive Open(Stream stream, ReaderOptions readerOptions = null)
{
stream.CheckNotNull("stream");
stream.CheckNotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
{
throw new ArgumentException("Stream should be readable and seekable");
@@ -82,8 +81,6 @@ namespace SharpCompress.Archives
}
}
#if !NO_FILE
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
@@ -91,8 +88,8 @@ namespace SharpCompress.Archives
/// <param name="options"></param>
public static IArchive Open(string filePath, ReaderOptions options = null)
{
filePath.CheckNotNullOrEmpty("filePath");
return Open(new FileInfo(filePath), options ?? new ReaderOptions());
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), options);
}
/// <summary>
@@ -102,37 +99,32 @@ namespace SharpCompress.Archives
/// <param name="options"></param>
public static IArchive Open(FileInfo fileInfo, ReaderOptions options = null)
{
fileInfo.CheckNotNull("fileInfo");
options = options ?? new ReaderOptions();
fileInfo.CheckNotNull(nameof(fileInfo));
options = options ?? new ReaderOptions { LeaveStreamOpen = false };
using (var stream = fileInfo.OpenRead())
{
if (ZipArchive.IsZipFile(stream, null))
{
stream.Dispose();
return ZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (SevenZipArchive.IsSevenZipFile(stream))
{
stream.Dispose();
return SevenZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (GZipArchive.IsGZipFile(stream))
{
stream.Dispose();
return GZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (RarArchive.IsRarFile(stream, options))
{
stream.Dispose();
return RarArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (TarArchive.IsTarFile(stream))
{
stream.Dispose();
return TarArchive.Open(fileInfo, options);
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
@@ -153,6 +145,5 @@ namespace SharpCompress.Archives
}
}
}
#endif
}
}
}

View File

@@ -13,8 +13,6 @@ namespace SharpCompress.Archives.GZip
{
public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
{
#if !NO_FILE
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
@@ -22,7 +20,7 @@ namespace SharpCompress.Archives.GZip
/// <param name="readerOptions"></param>
public static GZipArchive Open(string filePath, ReaderOptions readerOptions = null)
{
filePath.CheckNotNullOrEmpty("filePath");
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
@@ -33,10 +31,9 @@ namespace SharpCompress.Archives.GZip
/// <param name="readerOptions"></param>
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
{
fileInfo.CheckNotNull("fileInfo");
fileInfo.CheckNotNull(nameof(fileInfo));
return new GZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
#endif
/// <summary>
/// Takes a seekable Stream as a source
@@ -45,7 +42,7 @@ namespace SharpCompress.Archives.GZip
/// <param name="readerOptions"></param>
public static GZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
{
stream.CheckNotNull("stream");
stream.CheckNotNull(nameof(stream));
return new GZipArchive(stream, readerOptions ?? new ReaderOptions());
}
@@ -54,8 +51,6 @@ namespace SharpCompress.Archives.GZip
return new GZipArchive();
}
#if !NO_FILE
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
@@ -100,7 +95,6 @@ namespace SharpCompress.Archives.GZip
SaveTo(stream, new WriterOptions(CompressionType.GZip));
}
}
#endif
public static bool IsGZipFile(Stream stream)
{
@@ -185,4 +179,4 @@ namespace SharpCompress.Archives.GZip
return GZipReader.Open(stream);
}
}
}
}

View File

@@ -1,7 +1,6 @@
using System.IO;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives
{
@@ -37,59 +36,15 @@ namespace SharpCompress.Archives
}
streamListener.FireEntryExtractionEnd(archiveEntry);
}
#if !NO_FILE
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(this IArchiveEntry entry, string destinationDirectory,
ExtractionOptions options = null)
{
string destinationFileName;
string file = Path.GetFileName(entry.Key);
string fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
options = options ?? new ExtractionOptions()
{
Overwrite = true
};
if (options.ExtractFullPath)
{
string folder = Path.GetDirectoryName(entry.Key);
string destdir = Path.GetFullPath(
Path.Combine(fullDestinationDirectoryPath, folder)
);
if (!Directory.Exists(destdir))
{
if (!destdir.StartsWith(fullDestinationDirectoryPath))
{
throw new ExtractionException("Entry is trying to create a directory outside of the destination directory.");
}
Directory.CreateDirectory(destdir);
}
destinationFileName = Path.Combine(destdir, file);
}
else
{
destinationFileName = Path.Combine(fullDestinationDirectoryPath, file);
}
if (!entry.IsDirectory)
{
destinationFileName = Path.GetFullPath(destinationFileName);
if (!destinationFileName.StartsWith(fullDestinationDirectoryPath))
{
throw new ExtractionException("Entry is trying to write a file outside of the destination directory.");
}
entry.WriteToFile(destinationFileName, options);
}
ExtractionMethods.WriteEntryToDirectory(entry, destinationDirectory, options,
entry.WriteToFile);
}
/// <summary>
@@ -98,24 +53,15 @@ namespace SharpCompress.Archives
public static void WriteToFile(this IArchiveEntry entry, string destinationFileName,
ExtractionOptions options = null)
{
FileMode fm = FileMode.Create;
options = options ?? new ExtractionOptions()
{
Overwrite = true
};
if (!options.Overwrite)
{
fm = FileMode.CreateNew;
}
using (FileStream fs = File.Open(destinationFileName, fm))
{
entry.WriteTo(fs);
}
entry.PreserveExtractionOptions(destinationFileName, options);
ExtractionMethods.WriteEntryToFile(entry, destinationFileName, options,
(x, fm) =>
{
using (FileStream fs = File.Open(destinationFileName, fm))
{
entry.WriteTo(fs);
}
});
}
#endif
}
}

View File

@@ -1,18 +1,13 @@
#if !NO_FILE
using System.Linq;
using SharpCompress.Readers;
#endif
using System.Linq;
using SharpCompress.Common;
namespace SharpCompress.Archives
{
public static class IArchiveExtensions
{
#if !NO_FILE
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(this IArchive archive, string destinationDirectory,
ExtractionOptions options = null)
{
@@ -21,6 +16,5 @@ namespace SharpCompress.Archives
entry.WriteToDirectory(destinationDirectory, options);
}
}
#endif
}
}

View File

@@ -1,6 +1,4 @@
#if !NO_FILE
using System;
#endif
using System;
using System.IO;
using SharpCompress.Writers;
@@ -8,8 +6,6 @@ namespace SharpCompress.Archives
{
public static class IWritableArchiveExtensions
{
#if !NO_FILE
public static void AddEntry(this IWritableArchive writableArchive,
string entryPath, string filePath)
{
@@ -39,11 +35,7 @@ namespace SharpCompress.Archives
this IWritableArchive writableArchive,
string filePath, string searchPattern = "*.*", SearchOption searchOption = SearchOption.AllDirectories)
{
#if NET35
foreach (var path in Directory.GetFiles(filePath, searchPattern, searchOption))
#else
foreach (var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption))
#endif
{
var fileInfo = new FileInfo(path);
writableArchive.AddEntry(path.Substring(filePath.Length), fileInfo.OpenRead(), true, fileInfo.Length,
@@ -58,6 +50,5 @@ namespace SharpCompress.Archives
}
return writableArchive.AddEntry(key, fileInfo.OpenRead(), true, fileInfo.Length, fileInfo.LastWriteTime);
}
#endif
}
}

View File

@@ -1,7 +1,6 @@
#if !NO_FILE
using System.Collections.Generic;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
@@ -18,7 +17,7 @@ namespace SharpCompress.Archives.Rar
: base(StreamingMode.Seekable, fileInfo.OpenRead(), FixOptions(options))
{
FileInfo = fileInfo;
FileParts = GetVolumeFileParts().ToReadOnly();
FileParts = GetVolumeFileParts().ToArray().ToReadOnly();
}
private static ReaderOptions FixOptions(ReaderOptions options)
@@ -43,4 +42,3 @@ namespace SharpCompress.Archives.Rar
}
}
}
#endif

View File

@@ -1,6 +1,4 @@
#if !NO_FILE
using System.IO;
using System.IO;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Archives.Rar
@@ -25,4 +23,3 @@ namespace SharpCompress.Archives.Rar
}
}
}
#endif

View File

@@ -15,8 +15,6 @@ namespace SharpCompress.Archives.Rar
internal Lazy<IRarUnpack> UnpackV2017 { get; } = new Lazy<IRarUnpack>(() => new SharpCompress.Compressors.Rar.UnpackV2017.Unpack());
internal Lazy<IRarUnpack> UnpackV1 { get; } = new Lazy<IRarUnpack>(() => new SharpCompress.Compressors.Rar.UnpackV1.Unpack());
#if !NO_FILE
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
@@ -31,7 +29,6 @@ namespace SharpCompress.Archives.Rar
{
return RarArchiveVolumeFactory.GetParts(file, ReaderOptions);
}
#endif
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
@@ -63,9 +60,6 @@ namespace SharpCompress.Archives.Rar
public override bool IsSolid => Volumes.First().IsSolidArchive;
#region Creation
#if !NO_FILE
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
@@ -73,7 +67,7 @@ namespace SharpCompress.Archives.Rar
/// <param name="options"></param>
public static RarArchive Open(string filePath, ReaderOptions options = null)
{
filePath.CheckNotNullOrEmpty("filePath");
filePath.CheckNotNullOrEmpty(nameof(filePath));
return new RarArchive(new FileInfo(filePath), options ?? new ReaderOptions());
}
@@ -84,10 +78,9 @@ namespace SharpCompress.Archives.Rar
/// <param name="options"></param>
public static RarArchive Open(FileInfo fileInfo, ReaderOptions options = null)
{
fileInfo.CheckNotNull("fileInfo");
fileInfo.CheckNotNull(nameof(fileInfo));
return new RarArchive(fileInfo, options ?? new ReaderOptions());
}
#endif
/// <summary>
/// Takes a seekable Stream as a source
@@ -96,7 +89,7 @@ namespace SharpCompress.Archives.Rar
/// <param name="options"></param>
public static RarArchive Open(Stream stream, ReaderOptions options = null)
{
stream.CheckNotNull("stream");
stream.CheckNotNull(nameof(stream));
return Open(stream.AsEnumerable(), options ?? new ReaderOptions());
}
@@ -107,11 +100,10 @@ namespace SharpCompress.Archives.Rar
/// <param name="options"></param>
public static RarArchive Open(IEnumerable<Stream> streams, ReaderOptions options = null)
{
streams.CheckNotNull("streams");
streams.CheckNotNull(nameof(streams));
return new RarArchive(streams, options ?? new ReaderOptions());
}
#if !NO_FILE
public static bool IsRarFile(string filePath)
{
return IsRarFile(new FileInfo(filePath));
@@ -128,7 +120,6 @@ namespace SharpCompress.Archives.Rar
return IsRarFile(stream);
}
}
#endif
public static bool IsRarFile(Stream stream, ReaderOptions options = null)
{
@@ -145,4 +136,4 @@ namespace SharpCompress.Archives.Rar
#endregion
}
}
}

View File

@@ -3,11 +3,9 @@ using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Readers;
#if !NO_FILE
using System.Linq;
using System.Text;
using SharpCompress.Common.Rar.Headers;
#endif
namespace SharpCompress.Archives.Rar
{
@@ -25,8 +23,7 @@ namespace SharpCompress.Archives.Rar
yield return part;
}
}
#if !NO_FILE
internal static IEnumerable<RarVolume> GetParts(FileInfo fileInfo, ReaderOptions options)
{
FileInfoRarArchiveVolume part = new FileInfoRarArchiveVolume(fileInfo, options);
@@ -141,7 +138,5 @@ namespace SharpCompress.Archives.Rar
throw new ArgumentException("Filename invalid or next archive could not be found:"
+ fileInfo.FullName);
}
#endif
}
}

View File

@@ -19,12 +19,10 @@ namespace SharpCompress.Archives.Rar
internal override Stream GetCompressedStream()
{
stream.Position = FileHeader.DataStartPosition;
#if !NO_CRYPTO
if (FileHeader.R4Salt != null)
{
return new RarCryptoWrapper(stream, password, FileHeader.R4Salt);
}
#endif
return stream;
}

View File

@@ -13,8 +13,6 @@ namespace SharpCompress.Archives.SevenZip
public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVolume>
{
private ArchiveDatabase database;
#if !NO_FILE
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
@@ -36,7 +34,6 @@ namespace SharpCompress.Archives.SevenZip
fileInfo.CheckNotNull("fileInfo");
return new SevenZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
#endif
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
@@ -48,7 +45,6 @@ namespace SharpCompress.Archives.SevenZip
return new SevenZipArchive(stream, readerOptions ?? new ReaderOptions());
}
#if !NO_FILE
internal SevenZipArchive(FileInfo fileInfo, ReaderOptions readerOptions)
: base(ArchiveType.SevenZip, fileInfo, readerOptions)
{
@@ -75,7 +71,6 @@ namespace SharpCompress.Archives.SevenZip
return IsSevenZipFile(stream);
}
}
#endif
internal SevenZipArchive(Stream stream, ReaderOptions readerOptions)
: base(ArchiveType.SevenZip, stream.AsEnumerable(), readerOptions)
@@ -140,7 +135,7 @@ namespace SharpCompress.Archives.SevenZip
{
BinaryReader reader = new BinaryReader(stream);
byte[] signatureBytes = reader.ReadBytes(6);
return signatureBytes.BinaryEquals(SIGNATURE);
return signatureBytes.SequenceEqual(SIGNATURE);
}
protected override IReader CreateReaderForSolidExtraction()
@@ -206,7 +201,7 @@ namespace SharpCompress.Archives.SevenZip
return CreateEntryStream(new ReadOnlySubStream(currentStream, currentItem.Size));
}
}
private class PasswordProvider : IPasswordProvider
{
private readonly string _password;
@@ -214,7 +209,6 @@ namespace SharpCompress.Archives.SevenZip
public PasswordProvider(string password)
{
_password = password;
}
public string CryptoGetTextPassword()

View File

@@ -15,8 +15,6 @@ namespace SharpCompress.Archives.Tar
{
public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
{
#if !NO_FILE
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
@@ -24,7 +22,7 @@ namespace SharpCompress.Archives.Tar
/// <param name="readerOptions"></param>
public static TarArchive Open(string filePath, ReaderOptions readerOptions = null)
{
filePath.CheckNotNullOrEmpty("filePath");
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
@@ -35,10 +33,9 @@ namespace SharpCompress.Archives.Tar
/// <param name="readerOptions"></param>
public static TarArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
{
fileInfo.CheckNotNull("fileInfo");
fileInfo.CheckNotNull(nameof(fileInfo));
return new TarArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
#endif
/// <summary>
/// Takes a seekable Stream as a source
@@ -47,12 +44,10 @@ namespace SharpCompress.Archives.Tar
/// <param name="readerOptions"></param>
public static TarArchive Open(Stream stream, ReaderOptions readerOptions = null)
{
stream.CheckNotNull("stream");
stream.CheckNotNull(nameof(stream));
return new TarArchive(stream, readerOptions ?? new ReaderOptions());
}
#if !NO_FILE
public static bool IsTarFile(string filePath)
{
return IsTarFile(new FileInfo(filePath));
@@ -69,24 +64,22 @@ namespace SharpCompress.Archives.Tar
return IsTarFile(stream);
}
}
#endif
public static bool IsTarFile(Stream stream)
{
try
{
TarHeader tar = new TarHeader(new ArchiveEncoding());
tar.Read(new BinaryReader(stream));
return tar.Name.Length > 0 && Enum.IsDefined(typeof(EntryType), tar.EntryType);
TarHeader tarHeader = new TarHeader(new ArchiveEncoding());
bool readSucceeded = tarHeader.Read(new BinaryReader(stream));
bool isEmptyArchive = tarHeader.Name.Length == 0 && tarHeader.Size == 0 && Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
return readSucceeded || isEmptyArchive;
}
catch
{
}
return false;
}
#if !NO_FILE
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
@@ -101,7 +94,6 @@ namespace SharpCompress.Archives.Tar
{
return new TarVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
}
#endif
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
@@ -202,4 +194,4 @@ namespace SharpCompress.Archives.Tar
return TarReader.Open(stream);
}
}
}
}

View File

@@ -22,9 +22,7 @@ namespace SharpCompress.Archives.Zip
/// if the compression method is set to deflate
/// </summary>
public CompressionLevel DeflateCompressionLevel { get; set; }
#if !NO_FILE
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
@@ -32,7 +30,7 @@ namespace SharpCompress.Archives.Zip
/// <param name="readerOptions"></param>
public static ZipArchive Open(string filePath, ReaderOptions readerOptions = null)
{
filePath.CheckNotNullOrEmpty("filePath");
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
@@ -43,10 +41,9 @@ namespace SharpCompress.Archives.Zip
/// <param name="readerOptions"></param>
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
{
fileInfo.CheckNotNull("fileInfo");
fileInfo.CheckNotNull(nameof(fileInfo));
return new ZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
#endif
/// <summary>
/// Takes a seekable Stream as a source
@@ -55,12 +52,10 @@ namespace SharpCompress.Archives.Zip
/// <param name="readerOptions"></param>
public static ZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
{
stream.CheckNotNull("stream");
stream.CheckNotNull(nameof(stream));
return new ZipArchive(stream, readerOptions ?? new ReaderOptions());
}
#if !NO_FILE
public static bool IsZipFile(string filePath, string password = null)
{
return IsZipFile(new FileInfo(filePath), password);
@@ -77,7 +72,6 @@ namespace SharpCompress.Archives.Zip
return IsZipFile(stream, password);
}
}
#endif
public static bool IsZipFile(Stream stream, string password = null)
{
@@ -101,9 +95,7 @@ namespace SharpCompress.Archives.Zip
return false;
}
}
#if !NO_FILE
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
@@ -119,7 +111,6 @@ namespace SharpCompress.Archives.Zip
{
return new ZipVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
}
#endif
internal ZipArchive()
: base(ArchiveType.Zip)
@@ -211,4 +202,4 @@ namespace SharpCompress.Archives.Zip
return ZipReader.Open(stream, ReaderOptions);
}
}
}
}

View File

@@ -5,7 +5,6 @@ using System.Runtime.CompilerServices;
[assembly: AssemblyTitle("SharpCompress")]
[assembly: AssemblyProduct("SharpCompress")]
[assembly: InternalsVisibleTo("SharpCompress.Test" + SharpCompress.AssemblyInfo.PublicKeySuffix)]
[assembly: InternalsVisibleTo("SharpCompress.Test.Portable" + SharpCompress.AssemblyInfo.PublicKeySuffix)]
[assembly: CLSCompliant(true)]
namespace SharpCompress

View File

@@ -1,119 +0,0 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
#if NETCORE
using System.Runtime.CompilerServices;
using System.Threading;
namespace SharpCompress.Buffers
{
/// <summary>
/// Provides a resource pool that enables reusing instances of type <see cref="T:T[]"/>.
/// </summary>
/// <remarks>
/// <para>
/// Renting and returning buffers with an <see cref="ArrayPool{T}"/> can increase performance
/// in situations where arrays are created and destroyed frequently, resulting in significant
/// memory pressure on the garbage collector.
/// </para>
/// <para>
/// This class is thread-safe. All members may be used by multiple threads concurrently.
/// </para>
/// </remarks>
internal abstract class ArrayPool<T>
{
/// <summary>The lazily-initialized shared pool instance.</summary>
private static ArrayPool<T> s_sharedInstance = null;
/// <summary>
/// Retrieves a shared <see cref="ArrayPool{T}"/> instance.
/// </summary>
/// <remarks>
/// The shared pool provides a default implementation of <see cref="ArrayPool{T}"/>
/// that's intended for general applicability. It maintains arrays of multiple sizes, and
/// may hand back a larger array than was actually requested, but will never hand back a smaller
/// array than was requested. Renting a buffer from it with <see cref="Rent"/> will result in an
/// existing buffer being taken from the pool if an appropriate buffer is available or in a new
/// buffer being allocated if one is not available.
/// </remarks>
public static ArrayPool<T> Shared
{
[MethodImpl(MethodImplOptions.AggressiveInlining)]
get { return Volatile.Read(ref s_sharedInstance) ?? EnsureSharedCreated(); }
}
/// <summary>Ensures that <see cref="s_sharedInstance"/> has been initialized to a pool and returns it.</summary>
[MethodImpl(MethodImplOptions.NoInlining)]
private static ArrayPool<T> EnsureSharedCreated()
{
Interlocked.CompareExchange(ref s_sharedInstance, Create(), null);
return s_sharedInstance;
}
/// <summary>
/// Creates a new <see cref="ArrayPool{T}"/> instance using default configuration options.
/// </summary>
/// <returns>A new <see cref="ArrayPool{T}"/> instance.</returns>
public static ArrayPool<T> Create()
{
return new DefaultArrayPool<T>();
}
/// <summary>
/// Creates a new <see cref="ArrayPool{T}"/> instance using custom configuration options.
/// </summary>
/// <param name="maxArrayLength">The maximum length of array instances that may be stored in the pool.</param>
/// <param name="maxArraysPerBucket">
/// The maximum number of array instances that may be stored in each bucket in the pool. The pool
/// groups arrays of similar lengths into buckets for faster access.
/// </param>
/// <returns>A new <see cref="ArrayPool{T}"/> instance with the specified configuration options.</returns>
/// <remarks>
/// The created pool will group arrays into buckets, with no more than <paramref name="maxArraysPerBucket"/>
/// in each bucket and with those arrays not exceeding <paramref name="maxArrayLength"/> in length.
/// </remarks>
public static ArrayPool<T> Create(int maxArrayLength, int maxArraysPerBucket)
{
return new DefaultArrayPool<T>(maxArrayLength, maxArraysPerBucket);
}
/// <summary>
/// Retrieves a buffer that is at least the requested length.
/// </summary>
/// <param name="minimumLength">The minimum length of the array needed.</param>
/// <returns>
/// An <see cref="T:T[]"/> that is at least <paramref name="minimumLength"/> in length.
/// </returns>
/// <remarks>
/// This buffer is loaned to the caller and should be returned to the same pool via
/// <see cref="Return"/> so that it may be reused in subsequent usage of <see cref="Rent"/>.
/// It is not a fatal error to not return a rented buffer, but failure to do so may lead to
/// decreased application performance, as the pool may need to create a new buffer to replace
/// the one lost.
/// </remarks>
public abstract T[] Rent(int minimumLength);
/// <summary>
/// Returns to the pool an array that was previously obtained via <see cref="Rent"/> on the same
/// <see cref="ArrayPool{T}"/> instance.
/// </summary>
/// <param name="array">
/// The buffer previously obtained from <see cref="Rent"/> to return to the pool.
/// </param>
/// <param name="clearArray">
/// If <c>true</c> and if the pool will store the buffer to enable subsequent reuse, <see cref="Return"/>
/// will clear <paramref name="array"/> of its contents so that a subsequent consumer via <see cref="Rent"/>
/// will not see the previous consumer's content. If <c>false</c> or if the pool will release the buffer,
/// the array's contents are left unchanged.
/// </param>
/// <remarks>
/// Once a buffer has been returned to the pool, the caller gives up all ownership of the buffer
/// and must not use it. The reference returned from a given call to <see cref="Rent"/> must only be
/// returned via <see cref="Return"/> once. The default <see cref="ArrayPool{T}"/>
/// may hold onto the returned buffer in order to rent it again, or it may release the returned buffer
/// if it's determined that the pool already has enough buffers stored.
/// </remarks>
public abstract void Return(T[] array, bool clearArray = false);
}
}
#endif

View File

@@ -1,144 +0,0 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
#if NETCORE
using System;
namespace SharpCompress.Buffers
{
internal sealed partial class DefaultArrayPool<T> : ArrayPool<T>
{
/// <summary>The default maximum length of each array in the pool (2^20).</summary>
private const int DefaultMaxArrayLength = 1024 * 1024;
/// <summary>The default maximum number of arrays per bucket that are available for rent.</summary>
private const int DefaultMaxNumberOfArraysPerBucket = 50;
/// <summary>Lazily-allocated empty array used when arrays of length 0 are requested.</summary>
private static T[] s_emptyArray; // we support contracts earlier than those with Array.Empty<T>()
private readonly Bucket[] _buckets;
internal DefaultArrayPool() : this(DefaultMaxArrayLength, DefaultMaxNumberOfArraysPerBucket)
{
}
internal DefaultArrayPool(int maxArrayLength, int maxArraysPerBucket)
{
if (maxArrayLength <= 0)
{
throw new ArgumentOutOfRangeException(nameof(maxArrayLength));
}
if (maxArraysPerBucket <= 0)
{
throw new ArgumentOutOfRangeException(nameof(maxArraysPerBucket));
}
// Our bucketing algorithm has a min length of 2^4 and a max length of 2^30.
// Constrain the actual max used to those values.
const int MinimumArrayLength = 0x10, MaximumArrayLength = 0x40000000;
if (maxArrayLength > MaximumArrayLength)
{
maxArrayLength = MaximumArrayLength;
}
else if (maxArrayLength < MinimumArrayLength)
{
maxArrayLength = MinimumArrayLength;
}
// Create the buckets.
int poolId = Id;
int maxBuckets = Utilities.SelectBucketIndex(maxArrayLength);
var buckets = new Bucket[maxBuckets + 1];
for (int i = 0; i < buckets.Length; i++)
{
buckets[i] = new Bucket(Utilities.GetMaxSizeForBucket(i), maxArraysPerBucket, poolId);
}
_buckets = buckets;
}
/// <summary>Gets an ID for the pool to use with events.</summary>
private int Id => GetHashCode();
public override T[] Rent(int minimumLength)
{
// Arrays can't be smaller than zero. We allow requesting zero-length arrays (even though
// pooling such an array isn't valuable) as it's a valid length array, and we want the pool
// to be usable in general instead of using `new`, even for computed lengths.
if (minimumLength < 0)
{
throw new ArgumentOutOfRangeException(nameof(minimumLength));
}
else if (minimumLength == 0)
{
// No need for events with the empty array. Our pool is effectively infinite
// and we'll never allocate for rents and never store for returns.
return s_emptyArray ?? (s_emptyArray = new T[0]);
}
T[] buffer = null;
int index = Utilities.SelectBucketIndex(minimumLength);
if (index < _buckets.Length)
{
// Search for an array starting at the 'index' bucket. If the bucket is empty, bump up to the
// next higher bucket and try that one, but only try at most a few buckets.
const int MaxBucketsToTry = 2;
int i = index;
do
{
// Attempt to rent from the bucket. If we get a buffer from it, return it.
buffer = _buckets[i].Rent();
if (buffer != null)
{
return buffer;
}
}
while (++i < _buckets.Length && i != index + MaxBucketsToTry);
// The pool was exhausted for this buffer size. Allocate a new buffer with a size corresponding
// to the appropriate bucket.
buffer = new T[_buckets[index]._bufferLength];
}
else
{
// The request was for a size too large for the pool. Allocate an array of exactly the requested length.
// When it's returned to the pool, we'll simply throw it away.
buffer = new T[minimumLength];
}
return buffer;
}
public override void Return(T[] array, bool clearArray = false)
{
if (array == null)
{
throw new ArgumentNullException(nameof(array));
}
else if (array.Length == 0)
{
// Ignore empty arrays. When a zero-length array is rented, we return a singleton
// rather than actually taking a buffer out of the lowest bucket.
return;
}
// Determine with what bucket this array length is associated
int bucket = Utilities.SelectBucketIndex(array.Length);
// If we can tell that the buffer was allocated, drop it. Otherwise, check if we have space in the pool
if (bucket < _buckets.Length)
{
// Clear the array if the user requests
if (clearArray)
{
Array.Clear(array, 0, array.Length);
}
// Return the buffer to its bucket. In the future, we might consider having Return return false
// instead of dropping a bucket, in which case we could try to return to a lower-sized bucket,
// just as how in Rent we allow renting from a higher-sized bucket.
_buckets[bucket].Return(array);
}
}
}
}
#endif

View File

@@ -1,111 +0,0 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
#if NETCORE
using System;
using System.Diagnostics;
using System.Threading;
namespace SharpCompress.Buffers
{
internal sealed partial class DefaultArrayPool<T> : ArrayPool<T>
{
/// <summary>Provides a thread-safe bucket containing buffers that can be Rent'd and Return'd.</summary>
private sealed class Bucket
{
internal readonly int _bufferLength;
private readonly T[][] _buffers;
private readonly int _poolId;
private SpinLock _lock; // do not make this readonly; it's a mutable struct
private int _index;
/// <summary>
/// Creates the pool with numberOfBuffers arrays where each buffer is of bufferLength length.
/// </summary>
internal Bucket(int bufferLength, int numberOfBuffers, int poolId)
{
_lock = new SpinLock(Debugger.IsAttached); // only enable thread tracking if debugger is attached; it adds non-trivial overheads to Enter/Exit
_buffers = new T[numberOfBuffers][];
_bufferLength = bufferLength;
_poolId = poolId;
}
/// <summary>Gets an ID for the bucket to use with events.</summary>
internal int Id => GetHashCode();
/// <summary>Takes an array from the bucket. If the bucket is empty, returns null.</summary>
internal T[] Rent()
{
T[][] buffers = _buffers;
T[] buffer = null;
// While holding the lock, grab whatever is at the next available index and
// update the index. We do as little work as possible while holding the spin
// lock to minimize contention with other threads. The try/finally is
// necessary to properly handle thread aborts on platforms which have them.
bool lockTaken = false, allocateBuffer = false;
try
{
_lock.Enter(ref lockTaken);
if (_index < buffers.Length)
{
buffer = buffers[_index];
buffers[_index++] = null;
allocateBuffer = buffer == null;
}
}
finally
{
if (lockTaken) _lock.Exit(false);
}
// While we were holding the lock, we grabbed whatever was at the next available index, if
// there was one. If we tried and if we got back null, that means we hadn't yet allocated
// for that slot, in which case we should do so now.
if (allocateBuffer)
{
buffer = new T[_bufferLength];
}
return buffer;
}
/// <summary>
/// Attempts to return the buffer to the bucket. If successful, the buffer will be stored
/// in the bucket and true will be returned; otherwise, the buffer won't be stored, and false
/// will be returned.
/// </summary>
internal void Return(T[] array)
{
// Check to see if the buffer is the correct size for this bucket
if (array.Length != _bufferLength)
{
throw new ArgumentException("Buffer not from pool", nameof(array));
}
// While holding the spin lock, if there's room available in the bucket,
// put the buffer into the next available slot. Otherwise, we just drop it.
// The try/finally is necessary to properly handle thread aborts on platforms
// which have them.
bool lockTaken = false;
try
{
_lock.Enter(ref lockTaken);
if (_index != 0)
{
_buffers[--_index] = array;
}
}
finally
{
if (lockTaken) _lock.Exit(false);
}
}
}
}
}
#endif

View File

@@ -1,38 +0,0 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
#if NETCORE
using System.Diagnostics;
using System.Runtime.CompilerServices;
namespace SharpCompress.Buffers
{
internal static class Utilities
{
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static int SelectBucketIndex(int bufferSize)
{
Debug.Assert(bufferSize > 0);
uint bitsRemaining = ((uint)bufferSize - 1) >> 4;
int poolIndex = 0;
if (bitsRemaining > 0xFFFF) { bitsRemaining >>= 16; poolIndex = 16; }
if (bitsRemaining > 0xFF) { bitsRemaining >>= 8; poolIndex += 8; }
if (bitsRemaining > 0xF) { bitsRemaining >>= 4; poolIndex += 4; }
if (bitsRemaining > 0x3) { bitsRemaining >>= 2; poolIndex += 2; }
if (bitsRemaining > 0x1) { bitsRemaining >>= 1; poolIndex += 1; }
return poolIndex + (int)bitsRemaining;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static int GetMaxSizeForBucket(int binIndex)
{
int maxSize = 16 << binIndex;
Debug.Assert(maxSize >= 0);
return maxSize;
}
}
}
#endif

View File

@@ -28,42 +28,30 @@ namespace SharpCompress.Common
public ArchiveEncoding()
{
Default = Encoding.UTF8;
Password = Encoding.UTF8;
Default = Encoding.GetEncoding(437);
Password = Encoding.GetEncoding(437);
}
#if NETSTANDARD1_3 || NETSTANDARD2_0
static ArchiveEncoding()
{
Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
}
#endif
public string Decode(byte[] bytes)
{
return Decode(bytes, 0, bytes.Length);
}
public string Decode437(byte[] bytes)
{
#if NETSTANDARD1_0
return Decode(bytes, 0, bytes.Length);
#else
//allow forced and custom to override this.
if (CustomDecoder != null || Forced != null)
{
return Decode(bytes, 0, bytes.Length);
}
var extendedAsciiEncoding = Encoding.GetEncoding(437);
return extendedAsciiEncoding.GetString(bytes, 0, bytes.Length);
#endif
}
public string Decode(byte[] bytes, int start, int length)
{
return GetDecoder().Invoke(bytes, start, length);
}
public string DecodeUTF8(byte[] bytes)
{
return Encoding.UTF8.GetString(bytes, 0, bytes.Length);
}
public byte[] Encode(string str)
{
return GetEncoding().GetBytes(str);
@@ -79,4 +67,4 @@ namespace SharpCompress.Common
return CustomDecoder ?? ((bytes, index, count) => GetEncoding().GetString(bytes, index, count));
}
}
}
}

View File

@@ -15,6 +15,11 @@ namespace SharpCompress.Common
/// </summary>
public abstract string Key { get; }
/// <summary>
/// The target of a symlink entry internal to the Archive. Will be null if not a symlink.
/// </summary>
public abstract string LinkTarget { get; }
/// <summary>
/// The compressed file size
/// </summary>
@@ -56,7 +61,7 @@ namespace SharpCompress.Common
public abstract bool IsEncrypted { get; }
/// <summary>
/// Entry is password protected and encrypted and cannot be extracted.
/// Entry is directory.
/// </summary>
public abstract bool IsDirectory { get; }
@@ -83,4 +88,4 @@ namespace SharpCompress.Common
/// </summary>
public virtual int? Attrib => throw new NotImplementedException();
}
}
}

View File

@@ -47,9 +47,7 @@ namespace SharpCompress.Common
public override bool CanWrite => false;
public override void Flush()
{
throw new NotSupportedException();
public override void Flush() {
}
public override long Length => _stream.Length;
@@ -66,6 +64,16 @@ namespace SharpCompress.Common
return read;
}
public override int ReadByte()
{
int value = _stream.ReadByte();
if (value == -1)
{
_completed = true;
}
return value;
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();

View File

@@ -0,0 +1,93 @@
using System;
using System.IO;
namespace SharpCompress.Common
{
internal static class ExtractionMethods
{
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteEntryToDirectory(IEntry entry, string destinationDirectory,
ExtractionOptions options, Action<string, ExtractionOptions> write)
{
string destinationFileName;
string file = Path.GetFileName(entry.Key);
string fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
options = options ?? new ExtractionOptions()
{
Overwrite = true
};
if (options.ExtractFullPath)
{
string folder = Path.GetDirectoryName(entry.Key);
string destdir = Path.GetFullPath(
Path.Combine(fullDestinationDirectoryPath, folder)
);
if (!Directory.Exists(destdir))
{
if (!destdir.StartsWith(fullDestinationDirectoryPath))
{
throw new ExtractionException("Entry is trying to create a directory outside of the destination directory.");
}
Directory.CreateDirectory(destdir);
}
destinationFileName = Path.Combine(destdir, file);
}
else
{
destinationFileName = Path.Combine(fullDestinationDirectoryPath, file);
}
if (!entry.IsDirectory)
{
destinationFileName = Path.GetFullPath(destinationFileName);
if (!destinationFileName.StartsWith(fullDestinationDirectoryPath))
{
throw new ExtractionException("Entry is trying to write a file outside of the destination directory.");
}
write(destinationFileName, options);
}
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
{
Directory.CreateDirectory(destinationFileName);
}
}
public static void WriteEntryToFile(IEntry entry, string destinationFileName,
ExtractionOptions options,
Action<string, FileMode> openAndWrite)
{
if (entry.LinkTarget != null)
{
if (null == options.WriteSymbolicLink)
{
throw new ExtractionException("Entry is a symbolic link but ExtractionOptions.WriteSymbolicLink delegate is null");
}
options.WriteSymbolicLink(destinationFileName, entry.LinkTarget);
}
else
{
FileMode fm = FileMode.Create;
options = options ?? new ExtractionOptions()
{
Overwrite = true
};
if (!options.Overwrite)
{
fm = FileMode.CreateNew;
}
openAndWrite(destinationFileName, fm);
entry.PreserveExtractionOptions(destinationFileName, options);
}
}
}
}

View File

@@ -1,4 +1,4 @@
namespace SharpCompress.Readers
namespace SharpCompress.Common
{
public class ExtractionOptions
{
@@ -21,5 +21,14 @@
/// preserve windows file attributes
/// </summary>
public bool PreserveAttributes { get; set; }
/// <summary>
/// Delegate for writing symbolic links to disk.
/// sourcePath is where the symlink is created.
/// targetPath is what the symlink refers to.
/// </summary>
public delegate void SymbolicLinkWriterDelegate(string sourcePath, string targetPath);
public SymbolicLinkWriterDelegate WriteSymbolicLink;
}
}

View File

@@ -10,11 +10,11 @@ namespace SharpCompress.Common
}
internal ArchiveEncoding ArchiveEncoding { get; }
internal abstract string FilePartName { get; }
internal abstract Stream GetCompressedStream();
internal abstract Stream GetRawStream();
internal bool Skipped { get; set; }
}
}
}

View File

@@ -20,6 +20,8 @@ namespace SharpCompress.Common.GZip
public override string Key => _filePart.FilePartName;
public override string LinkTarget => null;
public override long CompressedSize => 0;
public override long Size => 0;

View File

@@ -1,11 +1,10 @@
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Converters;
using System.Text;
namespace SharpCompress.Common.GZip
{
@@ -60,7 +59,7 @@ namespace SharpCompress.Common.GZip
throw new ZlibException("Bad GZIP header.");
}
Int32 timet = DataConverter.LittleEndian.GetInt32(header, 4);
int timet = BinaryPrimitives.ReadInt32LittleEndian(header.AsSpan(4));
DateModified = TarHeader.EPOCH.AddSeconds(timet);
if ((header[3] & 0x04) == 0x04)
{
@@ -69,7 +68,7 @@ namespace SharpCompress.Common.GZip
Int16 extraLength = (Int16)(header[0] + header[1] * 256);
byte[] extra = new byte[extraLength];
if (!stream.ReadFully(extra))
{
throw new ZlibException("Unexpected end-of-file reading GZIP header.");
@@ -117,4 +116,4 @@ namespace SharpCompress.Common.GZip
return ArchiveEncoding.Decode(buffer);
}
}
}
}

View File

@@ -10,13 +10,11 @@ namespace SharpCompress.Common.GZip
{
}
#if !NO_FILE
public GZipVolume(FileInfo fileInfo, ReaderOptions options)
: base(fileInfo.OpenRead(), options)
{
options.LeaveStreamOpen = false;
}
#endif
public override bool IsFirstVolume => true;

View File

@@ -1,6 +1,4 @@
#if !NO_FILE
using System.IO;
using System.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common
@@ -48,4 +46,3 @@ namespace SharpCompress.Common
}
}
}
#endif

View File

@@ -10,6 +10,7 @@ namespace SharpCompress.Common
long Crc { get; }
DateTime? CreatedTime { get; }
string Key { get; }
string LinkTarget { get; }
bool IsDirectory { get; }
bool IsEncrypted { get; }
bool IsSplitAfter { get; }

View File

@@ -1,9 +1,5 @@
using System;
#if !NO_FILE
using System.IO;
#endif
namespace SharpCompress.Common
{
public interface IVolume : IDisposable

View File

@@ -104,24 +104,27 @@ namespace SharpCompress.Common.Rar.Headers
throw new InvalidFormatException("rar5 header size / extra size inconsistency");
}
isEncryptedRar5 = false;
while (RemainingHeaderBytes(reader) > 0) {
var size = reader.ReadRarVIntUInt16();
int n = RemainingHeaderBytes(reader);
var type = reader.ReadRarVIntUInt16();
switch (type) {
//TODO
// case 1: // file encryption
// {
// var version = reader.ReadRarVIntByte();
// if (version != 0) throw new InvalidFormatException("unknown encryption algorithm "+ version);
//
// }
// break;
// case 2: // file hash
// {
//
// }
// break;
//TODO
case 1: // file encryption
{
isEncryptedRar5 = true;
//var version = reader.ReadRarVIntByte();
//if (version != 0) throw new InvalidFormatException("unknown encryption algorithm " + version);
}
break;
// case 2: // file hash
// {
//
// }
// break;
case 3: // file time
{
ushort flags = reader.ReadRarVIntUInt16();
@@ -192,17 +195,12 @@ namespace SharpCompress.Common.Rar.Headers
private static string ConvertPathV5(string path)
{
#if NO_FILE
// not sure what to do here
throw new NotImplementedException("TODO");
#else
if (Path.DirectorySeparatorChar == '\\')
{
// replace embedded \\ with valid filename char
return path.Replace('\\', '-').Replace('/', '\\');
}
return path;
#endif
}
@@ -358,9 +356,6 @@ namespace SharpCompress.Common.Rar.Headers
private static string ConvertPathV4(string path)
{
#if NO_FILE
return path.Replace('\\', '/');
#else
if (Path.DirectorySeparatorChar == '/')
{
return path.Replace('\\', '/');
@@ -370,7 +365,6 @@ namespace SharpCompress.Common.Rar.Headers
return path.Replace('/', '\\');
}
return path;
#endif
}
public override string ToString()
@@ -435,8 +429,8 @@ namespace SharpCompress.Common.Rar.Headers
public bool IsDirectory => HasFlag(IsRar5 ? FileFlagsV5.DIRECTORY : FileFlagsV4.DIRECTORY);
//!!! TODO rar5
public bool IsEncrypted => HasFlag(FileFlagsV4.PASSWORD);
private bool isEncryptedRar5 = false;
public bool IsEncrypted => IsRar5 ? isEncryptedRar5: HasFlag(FileFlagsV4.PASSWORD);
internal DateTime? FileLastModifiedTime { get; private set; }

View File

@@ -48,15 +48,11 @@ namespace SharpCompress.Common.Rar.Headers
}
else
{
#if !NO_CRYPTO
if (Options.Password == null)
{
throw new CryptographicException("Encrypted Rar archive has no password specified.");
}
reader = new RarCryptoBinaryReader(stream, Options.Password);
#else
throw new CryptographicException("Rar encryption unsupported on this platform");
#endif
}
var header = RarHeader.TryReadBase(reader, _isRar5, Options.ArchiveEncoding);
@@ -138,11 +134,7 @@ namespace SharpCompress.Common.Rar.Headers
}
else
{
#if !NO_CRYPTO
fh.PackedStream = new RarCryptoWrapper(ms, Options.Password, fh.R4Salt);
#else
throw new NotSupportedException("RarCrypto not supported");
#endif
}
}
break;

View File

@@ -1,5 +1,4 @@
#if !NO_CRYPTO
using System.Collections.Generic;
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Common.Rar
@@ -111,5 +110,4 @@ namespace SharpCompress.Common.Rar
ClearQueue();
}
}
}
#endif
}

View File

@@ -1,5 +1,3 @@
#if !NO_CRYPTO
using System;
using System.Collections.Generic;
using System.IO;
@@ -52,10 +50,10 @@ namespace SharpCompress.Common.Rar
if (sizeToRead > 0)
{
int alignedSize = sizeToRead + ((~sizeToRead + 1) & 0xf);
byte[] cipherText = new byte[RarRijndael.CRYPTO_BLOCK_SIZE];
for (int i = 0; i < alignedSize / 16; i++)
{
//long ax = System.currentTimeMillis();
byte[] cipherText = new byte[RarRijndael.CRYPTO_BLOCK_SIZE];
_actualStream.Read(cipherText, 0, RarRijndael.CRYPTO_BLOCK_SIZE);
var readBytes = _rijndael.ProcessBlock(cipherText);
@@ -96,4 +94,3 @@ namespace SharpCompress.Common.Rar
}
}
}
#endif

View File

@@ -22,6 +22,8 @@ namespace SharpCompress.Common.Rar
/// </summary>
public override string Key => FileHeader.FileName;
public override string LinkTarget => null;
/// <summary>
/// The entry last modified time in the archive, if recorded
/// </summary>

View File

@@ -1,7 +1,4 @@
#if !NO_CRYPTO
using System;
using System.Collections.Generic;
using System.Linq;
using System;
using System.Security.Cryptography;
using System.Text;
using SharpCompress.Crypto;
@@ -23,12 +20,6 @@ namespace SharpCompress.Common.Rar
_salt = salt;
}
private byte[] ComputeHash(byte[] input)
{
var sha = SHA1.Create();
return sha.ComputeHash(input);
}
private void Initialize()
{
@@ -47,28 +38,27 @@ namespace SharpCompress.Common.Rar
rawPassword[i + rawLength] = _salt[i];
}
const int noOfRounds = (1 << 18);
IList<byte> bytes = new List<byte>();
const int iblock = 3;
byte[] digest;
byte[] data = new byte[(rawPassword.Length + iblock) * noOfRounds];
//TODO slow code below, find ways to optimize
for (int i = 0; i < noOfRounds; i++)
{
bytes.AddRange(rawPassword);
rawPassword.CopyTo(data, i * (rawPassword.Length + iblock));
bytes.AddRange(new[]
data[i * (rawPassword.Length + iblock) + rawPassword.Length + 0] = (byte)i;
data[i * (rawPassword.Length + iblock) + rawPassword.Length + 1] = (byte)(i >> 8);
data[i * (rawPassword.Length + iblock) + rawPassword.Length + 2] = (byte)(i >> CRYPTO_BLOCK_SIZE);
if (i % (noOfRounds / CRYPTO_BLOCK_SIZE) == 0)
{
(byte) i, (byte) (i >> 8), (byte) (i >> CRYPTO_BLOCK_SIZE)
});
if (i%(noOfRounds/CRYPTO_BLOCK_SIZE) == 0)
{
digest = ComputeHash(bytes.ToArray());
_aesInitializationVector[i/(noOfRounds/CRYPTO_BLOCK_SIZE)] = digest[19];
digest = SHA1.Create().ComputeHash(data, 0, (i + 1) * (rawPassword.Length + iblock));
_aesInitializationVector[i / (noOfRounds / CRYPTO_BLOCK_SIZE)] = digest[19];
}
}
digest = ComputeHash(bytes.ToArray());
digest = SHA1.Create().ComputeHash(data);
//slow code ends
byte[] aesKey = new byte[CRYPTO_BLOCK_SIZE];
@@ -98,19 +88,20 @@ namespace SharpCompress.Common.Rar
public byte[] ProcessBlock(byte[] cipherText)
{
var plainText = new byte[CRYPTO_BLOCK_SIZE];
var decryptedBytes = new List<byte>();
byte[] decryptedBytes = new byte[CRYPTO_BLOCK_SIZE];
_rijndael.ProcessBlock(cipherText, 0, plainText, 0);
for (int j = 0; j < plainText.Length; j++)
for (int j = 0; j < CRYPTO_BLOCK_SIZE; j++)
{
decryptedBytes.Add((byte) (plainText[j] ^ _aesInitializationVector[j%16])); //32:114, 33:101
decryptedBytes[j] = (byte)(plainText[j] ^ _aesInitializationVector[j % 16]); //32:114, 33:101
}
for (int j = 0; j < _aesInitializationVector.Length; j++)
{
_aesInitializationVector[j] = cipherText[j];
}
return decryptedBytes.ToArray();
return decryptedBytes;
}
public void Dispose()
@@ -118,4 +109,3 @@ namespace SharpCompress.Common.Rar
}
}
}
#endif

View File

@@ -22,7 +22,7 @@ namespace SharpCompress.Common.SevenZip
internal List<long> _packStreamStartPositions = new List<long>();
internal List<int> _folderStartFileIndex = new List<int>();
internal List<int> _fileIndexToFolderIndexMap = new List<int>();
internal IPasswordProvider PasswordProvider { get; }
public ArchiveDatabase(IPasswordProvider passwordProvider)
@@ -152,13 +152,14 @@ namespace SharpCompress.Common.SevenZip
{
int packStreamIndex = folder._firstPackStreamId;
long folderStartPackPos = GetFolderStreamPos(folder, 0);
List<long> packSizes = new List<long>();
for (int j = 0; j < folder._packStreams.Count; j++)
int count = folder._packStreams.Count;
long[] packSizes = new long[count];
for (int j = 0; j < count; j++)
{
packSizes.Add(_packSizes[packStreamIndex + j]);
packSizes[j] = _packSizes[packStreamIndex + j];
}
return DecoderStreamHelper.CreateDecoderStream(stream, folderStartPackPos, packSizes.ToArray(), folder, pw);
return DecoderStreamHelper.CreateDecoderStream(stream, folderStartPackPos, packSizes, folder, pw);
}
private long GetFolderPackStreamSize(int folderIndex, int streamIndex)
@@ -179,4 +180,4 @@ namespace SharpCompress.Common.SevenZip
return 0;
}
}
}
}

View File

@@ -1449,13 +1449,14 @@ namespace SharpCompress.Common.SevenZip
CFolder folderInfo = db._folders[folderIndex];
int packStreamIndex = db._folders[folderIndex]._firstPackStreamId;
long folderStartPackPos = db.GetFolderStreamPos(folderInfo, 0);
List<long> packSizes = new List<long>();
for (int j = 0; j < folderInfo._packStreams.Count; j++)
var count = folderInfo._packStreams.Count;
long[] packSizes = new long[count];
for (int j = 0; j < count; j++)
{
packSizes.Add(db._packSizes[packStreamIndex + j]);
packSizes[j] = db._packSizes[packStreamIndex + j];
}
s = DecoderStreamHelper.CreateDecoderStream(_stream, folderStartPackPos, packSizes.ToArray(), folderInfo,
s = DecoderStreamHelper.CreateDecoderStream(_stream, folderStartPackPos, packSizes, folderInfo,
db.PasswordProvider);
_cachedStreams.Add(folderIndex, s);
}
@@ -1553,15 +1554,16 @@ namespace SharpCompress.Common.SevenZip
int packStreamIndex = db._folders[folderIndex]._firstPackStreamId;
long folderStartPackPos = db.GetFolderStreamPos(folderInfo, 0);
List<long> packSizes = new List<long>();
for (int j = 0; j < folderInfo._packStreams.Count; j++)
var count = folderInfo._packStreams.Count;
long[] packSizes = new long[count];
for (int j = 0; j < count; j++)
{
packSizes.Add(db._packSizes[packStreamIndex + j]);
packSizes[j] = db._packSizes[packStreamIndex + j];
}
// TODO: If the decoding fails the last file may be extracted incompletely. Delete it?
Stream s = DecoderStreamHelper.CreateDecoderStream(_stream, folderStartPackPos, packSizes.ToArray(),
Stream s = DecoderStreamHelper.CreateDecoderStream(_stream, folderStartPackPos, packSizes,
folderInfo, db.PasswordProvider);
byte[] buffer = new byte[4 << 10];
for (;;)

View File

@@ -18,6 +18,8 @@ namespace SharpCompress.Common.SevenZip
public override string Key => FilePart.Header.Name;
public override string LinkTarget => null;
public override long CompressedSize => 0;
public override long Size => FilePart.Header.Size;

View File

@@ -1,7 +1,7 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Text;
using SharpCompress.Converters;
namespace SharpCompress.Common.Tar.Headers
{
@@ -15,6 +15,7 @@ namespace SharpCompress.Common.Tar.Headers
}
internal string Name { get; set; }
internal string LinkName { get; set; }
//internal int Mode { get; set; }
//internal int UserId { get; set; }
@@ -38,16 +39,17 @@ namespace SharpCompress.Common.Tar.Headers
WriteOctalBytes(0, buffer, 116, 8); // group ID
//ArchiveEncoding.UTF8.GetBytes("magic").CopyTo(buffer, 257);
if (Name.Length > 100)
var nameByteCount = ArchiveEncoding.GetEncoding().GetByteCount(Name);
if (nameByteCount > 100)
{
// Set mock filename and filetype to indicate the next block is the actual name of the file
WriteStringBytes("././@LongLink", buffer, 0, 100);
buffer[156] = (byte)EntryType.LongName;
WriteOctalBytes(Name.Length + 1, buffer, 124, 12);
WriteOctalBytes(nameByteCount + 1, buffer, 124, 12);
}
else
{
WriteStringBytes(Name, buffer, 0, 100);
WriteStringBytes(ArchiveEncoding.Encode(Name), buffer, 100);
WriteOctalBytes(Size, buffer, 124, 12);
var time = (long)(LastModifiedTime.ToUniversalTime() - EPOCH).TotalSeconds;
WriteOctalBytes(time, buffer, 136, 12);
@@ -55,11 +57,10 @@ namespace SharpCompress.Common.Tar.Headers
if (Size >= 0x1FFFFFFFF)
{
byte[] bytes = DataConverter.BigEndian.GetBytes(Size);
var bytes12 = new byte[12];
bytes.CopyTo(bytes12, 12 - bytes.Length);
Span<byte> bytes12 = stackalloc byte[12];
BinaryPrimitives.WriteInt64BigEndian(bytes12.Slice(4), Size);
bytes12[0] |= 0x80;
bytes12.CopyTo(buffer, 124);
bytes12.CopyTo(buffer.AsSpan(124));
}
}
@@ -68,10 +69,17 @@ namespace SharpCompress.Common.Tar.Headers
output.Write(buffer, 0, buffer.Length);
if (Name.Length > 100)
if (nameByteCount > 100)
{
WriteLongFilenameHeader(output);
Name = Name.Substring(0, 100);
// update to short name lower than 100 - [max bytes of one character].
// subtracting bytes is needed because preventing infinite loop(example code is here).
//
// var bytes = Encoding.UTF8.GetBytes(new string(0x3042, 100));
// var truncated = Encoding.UTF8.GetBytes(Encoding.UTF8.GetString(bytes, 0, 100));
//
// and then infinite recursion is occured in WriteLongFilenameHeader because truncated.Length is 102.
Name = ArchiveEncoding.Decode(ArchiveEncoding.Encode(Name), 0, 100 - ArchiveEncoding.GetEncoding().GetMaxByteCount(1));
Write(output);
}
}
@@ -98,6 +106,12 @@ namespace SharpCompress.Common.Tar.Headers
return false;
}
// for symlinks, additionally read the linkname
if (ReadEntryType(buffer) == EntryType.SymLink)
{
LinkName = ArchiveEncoding.Decode(buffer, 157, 100).TrimNulls();
}
if (ReadEntryType(buffer) == EntryType.LongName)
{
Name = ReadLongName(reader, buffer);
@@ -161,8 +175,9 @@ namespace SharpCompress.Common.Tar.Headers
{
if ((buffer[124] & 0x80) == 0x80) // if size in binary
{
return DataConverter.BigEndian.GetInt64(buffer, 0x80);
return BinaryPrimitives.ReadInt64BigEndian(buffer.AsSpan(0x80));
}
return ReadAsciiInt64Base8(buffer, 124, 11);
}
@@ -177,11 +192,18 @@ namespace SharpCompress.Common.Tar.Headers
return buffer;
}
private static void WriteStringBytes(ReadOnlySpan<byte> name, Span<byte> buffer, int length)
{
name.CopyTo(buffer);
int i = Math.Min(length, name.Length);
buffer.Slice(i, length - i).Fill(0);
}
private static void WriteStringBytes(string name, byte[] buffer, int offset, int length)
{
int i;
for (i = 0; i < length - 1 && i < name.Length; ++i)
for (i = 0; i < length && i < name.Length; ++i)
{
buffer[offset + i] = (byte)name[i];
}
@@ -272,4 +294,4 @@ namespace SharpCompress.Common.Tar.Headers
public string Magic { get; set; }
}
}
}

View File

@@ -23,6 +23,8 @@ namespace SharpCompress.Common.Tar
public override string Key => _filePart.Header.Name;
public override string LinkTarget => _filePart.Header.LinkName;
public override long CompressedSize => _filePart.Header.Size;
public override long Size => _filePart.Header.Size;

View File

@@ -1,16 +1,16 @@
using System;
using SharpCompress.IO;
using System;
using System.IO;
namespace SharpCompress.Common.Tar
{
internal class TarReadOnlySubStream : Stream
internal class TarReadOnlySubStream : NonDisposingStream
{
private bool _isDisposed;
private long _amountRead;
public TarReadOnlySubStream(Stream stream, long bytesToRead)
public TarReadOnlySubStream(Stream stream, long bytesToRead) : base(stream, throwOnDispose: false)
{
Stream = stream;
BytesLeftToRead = bytesToRead;
}
@@ -36,12 +36,11 @@ namespace SharpCompress.Common.Tar
var buffer = new byte[skipBytes];
Stream.ReadFully(buffer);
}
base.Dispose(disposing);
}
private long BytesLeftToRead { get; set; }
public Stream Stream { get; }
public override bool CanRead => true;
public override bool CanSeek => false;
@@ -72,6 +71,22 @@ namespace SharpCompress.Common.Tar
return read;
}
public override int ReadByte()
{
if (BytesLeftToRead <= 0)
{
return -1;
}
int value = Stream.ReadByte();
if (value != -1)
{
--BytesLeftToRead;
++_amountRead;
}
return value;
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();

View File

@@ -1,4 +1,5 @@
using System.IO;
using System;
using System.IO;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -33,15 +34,18 @@ namespace SharpCompress.Common
/// </summary>
public virtual bool IsMultiVolume => true;
private bool _disposed;
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
_actualStream.Dispose();
}
}
public void Dispose()
{
if (!_disposed)
{
_actualStream.Dispose();
_disposed = true;
}
Dispose(true);
GC.SuppressFinalize(this);
}
}
}

View File

@@ -34,15 +34,23 @@ namespace SharpCompress.Common.Zip.Headers
byte[] extra = reader.ReadBytes(extraLength);
byte[] comment = reader.ReadBytes(commentLength);
// According to .ZIP File Format Specification
//
// For example: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
//
// Bit 11: Language encoding flag (EFS). If this bit is set,
// the filename and comment fields for this file
// MUST be encoded using UTF-8. (see APPENDIX D)
if (Flags.HasFlag(HeaderFlags.Efs))
{
Name = ArchiveEncoding.Decode(name);
Comment = ArchiveEncoding.Decode(comment);
Name = ArchiveEncoding.DecodeUTF8(name);
Comment = ArchiveEncoding.DecodeUTF8(comment);
}
else
{
Name = ArchiveEncoding.Decode437(name);
Comment = ArchiveEncoding.Decode437(comment);
Name = ArchiveEncoding.Decode(name);
Comment = ArchiveEncoding.Decode(comment);
}
LoadExtra(extra);

View File

@@ -26,14 +26,21 @@ namespace SharpCompress.Common.Zip.Headers
byte[] name = reader.ReadBytes(nameLength);
byte[] extra = reader.ReadBytes(extraLength);
// According to .ZIP File Format Specification
//
// For example: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
//
// Bit 11: Language encoding flag (EFS). If this bit is set,
// the filename and comment fields for this file
// MUST be encoded using UTF-8. (see APPENDIX D)
if (Flags.HasFlag(HeaderFlags.Efs))
{
Name = ArchiveEncoding.Decode(name);
Name = ArchiveEncoding.DecodeUTF8(name);
}
else
{
// Use IBM Code Page 437 (IBM PC character encoding set)
Name = ArchiveEncoding.Decode437(name);
Name = ArchiveEncoding.Decode(name);
}
LoadExtra(extra);

View File

@@ -1,6 +1,6 @@
using System;
using System.Buffers.Binary;
using System.Text;
using SharpCompress.Converters;
namespace SharpCompress.Common.Zip.Headers
{
@@ -76,34 +76,34 @@ namespace SharpCompress.Common.Zip.Headers
switch (DataBytes.Length)
{
case 4:
VolumeNumber = DataConverter.LittleEndian.GetUInt32(DataBytes, 0);
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes);
return;
case 8:
RelativeOffsetOfEntryHeader = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
return;
case 12:
RelativeOffsetOfEntryHeader = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
VolumeNumber = DataConverter.LittleEndian.GetUInt32(DataBytes, 8);
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes.AsSpan(8));
return;
case 16:
UncompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
CompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 8);
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
return;
case 20:
UncompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
CompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 8);
VolumeNumber = DataConverter.LittleEndian.GetUInt32(DataBytes, 16);
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes.AsSpan(16));
return;
case 24:
UncompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
CompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 8);
RelativeOffsetOfEntryHeader = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 16);
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(16));
return;
case 28:
UncompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
CompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 8);
RelativeOffsetOfEntryHeader = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 16);
VolumeNumber = DataConverter.LittleEndian.GetUInt32(DataBytes, 24);
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(16));
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes.AsSpan(24));
return;
default:
throw new ArchiveException("Unexpected size of of Zip64 extended information extra field");
@@ -132,7 +132,7 @@ namespace SharpCompress.Common.Zip.Headers
case ExtraDataType.Zip64ExtendedInformationExtraField:
return new Zip64ExtendedInformationExtraField
(
type,
type,
length,
extraData
);
@@ -146,4 +146,4 @@ namespace SharpCompress.Common.Zip.Headers
}
}
}
}
}

View File

@@ -1,8 +1,7 @@
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Text;
using SharpCompress.Converters;
namespace SharpCompress.Common.Zip.Headers
{
@@ -30,7 +29,7 @@ namespace SharpCompress.Common.Zip.Headers
&& Name.EndsWith("\\");
}
}
internal Stream PackedStream { get; set; }
internal ArchiveEncoding ArchiveEncoding { get; }
@@ -66,9 +65,7 @@ namespace SharpCompress.Common.Zip.Headers
return encryptionData;
}
#if !NO_CRYPTO
internal WinzipAesEncryptionData WinzipAesEncryptionData { get; set; }
#endif
internal ushort LastModifiedDate { get; set; }
@@ -80,13 +77,22 @@ namespace SharpCompress.Common.Zip.Headers
{
for (int i = 0; i < extra.Length - 4;)
{
ExtraDataType type = (ExtraDataType)DataConverter.LittleEndian.GetUInt16(extra, i);
ExtraDataType type = (ExtraDataType)BinaryPrimitives.ReadUInt16LittleEndian(extra.AsSpan(i));
if (!Enum.IsDefined(typeof(ExtraDataType), type))
{
type = ExtraDataType.NotImplementedExtraData;
}
ushort length = DataConverter.LittleEndian.GetUInt16(extra, i + 2);
ushort length = BinaryPrimitives.ReadUInt16LittleEndian(extra.AsSpan(i + 2));
// 7zip has this same kind of check to ignore extras blocks that don't conform to the standard 2-byte ID, 2-byte length, N-byte value.
// CPP/7Zip/Zip/ZipIn.cpp: CInArchive::ReadExtra
if (length > extra.Length)
{
// bad extras block
return;
}
byte[] data = new byte[length];
Buffer.BlockCopy(extra, i + 4, data, 0, length);
Extra.Add(LocalEntryHeaderExtraFactory.Create(type, length, data));
@@ -99,4 +105,4 @@ namespace SharpCompress.Common.Zip.Headers
internal bool IsZip64 => CompressedSize == uint.MaxValue;
}
}
}

View File

@@ -1,5 +1,6 @@
using System.IO;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip
{
@@ -7,11 +8,13 @@ namespace SharpCompress.Common.Zip
{
private bool _isLocalHeaderLoaded;
private readonly SeekableZipHeaderFactory _headerFactory;
private readonly DirectoryEntryHeader _directoryEntryHeader;
internal SeekableZipFilePart(SeekableZipHeaderFactory headerFactory, DirectoryEntryHeader header, Stream stream)
: base(header, stream)
{
_headerFactory = headerFactory;
_directoryEntryHeader = header;
}
internal override Stream GetCompressedStream()
@@ -36,6 +39,15 @@ namespace SharpCompress.Common.Zip
protected override Stream CreateBaseStream()
{
BaseStream.Position = Header.DataStartPosition.Value;
if ((Header.CompressedSize == 0)
&& FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor)
&& (_directoryEntryHeader?.HasData == true)
&& (_directoryEntryHeader?.CompressedSize != 0))
{
return new ReadOnlySubStream(BaseStream, _directoryEntryHeader.CompressedSize);
}
return BaseStream;
}
}

View File

@@ -17,7 +17,7 @@ namespace SharpCompress.Common.Zip
{
}
internal IEnumerable<DirectoryEntryHeader> ReadSeekableHeader(Stream stream)
internal IEnumerable<ZipHeader> ReadSeekableHeader(Stream stream)
{
var reader = new BinaryReader(stream);
@@ -51,16 +51,22 @@ namespace SharpCompress.Common.Zip
{
stream.Position = position;
uint signature = reader.ReadUInt32();
var directoryEntryHeader = ReadHeader(signature, reader, _zip64) as DirectoryEntryHeader;
var nextHeader = ReadHeader(signature, reader, _zip64);
position = stream.Position;
if (directoryEntryHeader == null)
{
yield break;
}
//entry could be zero bytes so we need to know that.
directoryEntryHeader.HasData = directoryEntryHeader.CompressedSize != 0;
yield return directoryEntryHeader;
if (nextHeader == null)
yield break;
if (nextHeader is DirectoryEntryHeader entryHeader)
{
//entry could be zero bytes so we need to know that.
entryHeader.HasData = entryHeader.CompressedSize != 0;
yield return entryHeader;
}
else if (nextHeader is DirectoryEndHeader endHeader)
{
yield return endHeader;
}
}
}

View File

@@ -49,18 +49,35 @@ namespace SharpCompress.Common.Zip
_lastEntryHeader = null;
uint headerBytes = reader.ReadUInt32();
header = ReadHeader(headerBytes, reader);
if (header == null) { yield break; }
//entry could be zero bytes so we need to know that.
if (header.ZipHeaderType == ZipHeaderType.LocalEntry)
{
bool isRecording = rewindableStream.IsRecording;
if (!isRecording)
var local_header = ((LocalEntryHeader)header);
// If we have CompressedSize, there is data to be read
if( local_header.CompressedSize > 0 )
{
rewindableStream.StartRecording();
header.HasData = true;
} // Check if zip is streaming ( Length is 0 and is declared in PostDataDescriptor )
else if( local_header.Flags.HasFlag(HeaderFlags.UsePostDataDescriptor) )
{
bool isRecording = rewindableStream.IsRecording;
if (!isRecording)
{
rewindableStream.StartRecording();
}
uint nextHeaderBytes = reader.ReadUInt32();
// Check if next data is PostDataDescriptor, streamed file with 0 length
header.HasData = !IsHeader(nextHeaderBytes);
rewindableStream.Rewind(!isRecording);
}
else // We are not streaming and compressed size is 0, we have no data
{
header.HasData = false;
}
uint nextHeaderBytes = reader.ReadUInt32();
header.HasData = !IsHeader(nextHeaderBytes);
rewindableStream.Rewind(!isRecording);
}
yield return header;
}

View File

@@ -1,9 +1,7 @@
#if !NO_CRYPTO
using System;
using System.Buffers.Binary;
using System.IO;
using System.Security.Cryptography;
using SharpCompress.Converters;
namespace SharpCompress.Common.Zip
{
@@ -120,7 +118,7 @@ namespace SharpCompress.Common.Zip
: bytesRemaining;
// update the counter
DataConverter.LittleEndian.PutBytes(_counter, 0, _nonce++);
BinaryPrimitives.WriteInt32LittleEndian(_counter, _nonce++);
// Determine if this is the final block
if ((bytesToRead == bytesRemaining) && (_totalBytesLeftToRead == 0))
@@ -181,4 +179,3 @@ namespace SharpCompress.Common.Zip
}
}
}
#endif

View File

@@ -1,8 +1,6 @@
#if !NO_CRYPTO
using System;
using System.Buffers.Binary;
using System.Security.Cryptography;
using SharpCompress.Converters;
namespace SharpCompress.Common.Zip
{
@@ -64,10 +62,10 @@ namespace SharpCompress.Common.Zip
IvBytes = rfc2898.GetBytes(KeySizeInBytes);
_generatedVerifyValue = rfc2898.GetBytes(2);
short verify = DataConverter.LittleEndian.GetInt16(_passwordVerifyValue, 0);
short verify = BinaryPrimitives.ReadInt16LittleEndian(_passwordVerifyValue);
if (_password != null)
{
short generated = DataConverter.LittleEndian.GetInt16(_generatedVerifyValue, 0);
short generated = BinaryPrimitives.ReadInt16LittleEndian(_generatedVerifyValue);
if (verify != generated)
{
throw new InvalidFormatException("bad password");
@@ -76,4 +74,3 @@ namespace SharpCompress.Common.Zip
}
}
}
#endif

View File

@@ -60,6 +60,8 @@ namespace SharpCompress.Common.Zip
public override string Key => _filePart.Header.Name;
public override string LinkTarget => null;
public override long CompressedSize => _filePart.Header.CompressedSize;
public override long Size => _filePart.Header.UncompressedSize;

View File

@@ -1,4 +1,5 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Linq;
using SharpCompress.Common.Zip.Headers;
@@ -8,7 +9,6 @@ using SharpCompress.Compressors.Deflate;
using SharpCompress.Compressors.Deflate64;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.PPMd;
using SharpCompress.Converters;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip
@@ -108,19 +108,19 @@ namespace SharpCompress.Common.Zip
{
throw new InvalidFormatException("Winzip data length is not 7.");
}
ushort compressedMethod = DataConverter.LittleEndian.GetUInt16(data.DataBytes, 0);
ushort compressedMethod = BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes);
if (compressedMethod != 0x01 && compressedMethod != 0x02)
{
throw new InvalidFormatException("Unexpected vendor version number for WinZip AES metadata");
}
ushort vendorId = DataConverter.LittleEndian.GetUInt16(data.DataBytes, 2);
ushort vendorId = BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes.AsSpan(2));
if (vendorId != 0x4541)
{
throw new InvalidFormatException("Unexpected vendor ID for WinZip AES metadata");
}
return CreateDecompressionStream(stream, (ZipCompressionMethod)DataConverter.LittleEndian.GetUInt16(data.DataBytes, 5));
return CreateDecompressionStream(stream, (ZipCompressionMethod)BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes.AsSpan(5)));
}
default:
{
@@ -142,7 +142,7 @@ namespace SharpCompress.Common.Zip
&& FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor))
|| Header.IsZip64)
{
plainStream = new NonDisposingStream(plainStream); //make sure AES doesn't close
plainStream = new NonDisposingStream(plainStream); //make sure AES doesn't close
}
else
{
@@ -165,12 +165,10 @@ namespace SharpCompress.Common.Zip
case ZipCompressionMethod.WinzipAes:
{
#if !NO_FILE
if (Header.WinzipAesEncryptionData != null)
{
return new WinzipAesCryptoStream(plainStream, Header.WinzipAesEncryptionData, Header.CompressedSize - 10);
}
#endif
return plainStream;
}
@@ -184,4 +182,4 @@ namespace SharpCompress.Common.Zip
return plainStream;
}
}
}
}

View File

@@ -1,8 +1,6 @@
using System;
using System.IO;
#if !NO_CRYPTO
using System.Linq;
#endif
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
using System.Text;
@@ -91,7 +89,7 @@ namespace SharpCompress.Common.Zip
return entry;
}
default:
throw new NotSupportedException("Unknown header: " + headerBytes);
return null;
}
}
@@ -132,10 +130,6 @@ namespace SharpCompress.Common.Zip
if (entryHeader.CompressionMethod == ZipCompressionMethod.WinzipAes)
{
#if NO_CRYPTO
throw new NotSupportedException("Cannot decrypt Winzip AES with Silverlight or WP7.");
#else
ExtraData data = entryHeader.Extra.SingleOrDefault(x => x.Type == ExtraDataType.WinZipAes);
if (data != null)
{
@@ -150,7 +144,6 @@ namespace SharpCompress.Common.Zip
entryHeader.CompressedSize -= (uint)(salt.Length + 2);
}
#endif
}
}

View File

@@ -65,16 +65,16 @@ namespace SharpCompress.Compressors.ADC
}
}
private static int GetOffset(byte[] chunk, int position)
private static int GetOffset(ReadOnlySpan<byte> chunk)
{
switch (GetChunkType(chunk[position]))
switch (GetChunkType(chunk[0]))
{
case PLAIN:
return 0;
case TWO_BYTE:
return ((chunk[position] & 0x03) << 8) + chunk[position + 1];
return ((chunk[0] & 0x03) << 8) + chunk[1];
case THREE_BYTE:
return (chunk[position + 1] << 8) + chunk[position + 2];
return (chunk[1] << 8) + chunk[2];
default:
return -1;
}
@@ -116,7 +116,7 @@ namespace SharpCompress.Compressors.ADC
byte[] buffer = new byte[bufferSize];
int outPosition = 0;
bool full = false;
MemoryStream tempMs;
Span<byte> temp = stackalloc byte[3];
while (position < input.Length)
{
@@ -142,11 +142,10 @@ namespace SharpCompress.Compressors.ADC
position += chunkSize + 1;
break;
case TWO_BYTE:
tempMs = new MemoryStream();
chunkSize = GetChunkSize((byte)readByte);
tempMs.WriteByte((byte)readByte);
tempMs.WriteByte((byte)input.ReadByte());
offset = GetOffset(tempMs.ToArray(), 0);
temp[0] = (byte)readByte;
temp[1] = (byte)input.ReadByte();
offset = GetOffset(temp);
if (outPosition + chunkSize > bufferSize)
{
full = true;
@@ -173,12 +172,11 @@ namespace SharpCompress.Compressors.ADC
}
break;
case THREE_BYTE:
tempMs = new MemoryStream();
chunkSize = GetChunkSize((byte)readByte);
tempMs.WriteByte((byte)readByte);
tempMs.WriteByte((byte)input.ReadByte());
tempMs.WriteByte((byte)input.ReadByte());
offset = GetOffset(tempMs.ToArray(), 0);
temp[0] = (byte)readByte;
temp[1] = (byte)input.ReadByte();
temp[2] = (byte)input.ReadByte();
offset = GetOffset(temp);
if (outPosition + chunkSize > bufferSize)
{
full = true;
@@ -217,4 +215,4 @@ namespace SharpCompress.Compressors.ADC
return position - start;
}
}
}
}

View File

@@ -67,6 +67,11 @@ namespace SharpCompress.Compressors.BZip2
return stream.Read(buffer, offset, count);
}
public override int ReadByte()
{
return stream.ReadByte();
}
public override long Seek(long offset, SeekOrigin origin)
{
return stream.Seek(offset, origin);
@@ -82,6 +87,11 @@ namespace SharpCompress.Compressors.BZip2
stream.Write(buffer, offset, count);
}
public override void WriteByte(byte value)
{
stream.WriteByte(value);
}
/// <summary>
/// Consumes two bytes to test if there is a BZip2 header
/// </summary>

View File

@@ -1077,6 +1077,10 @@ namespace SharpCompress.Compressors.BZip2
{
}
public override void WriteByte(byte value)
{
}
public override bool CanRead => true;
public override bool CanSeek => false;

View File

@@ -1929,6 +1929,11 @@ namespace SharpCompress.Compressors.BZip2
return 0;
}
public override int ReadByte()
{
return -1;
}
public override long Seek(long offset, SeekOrigin origin)
{
return 0;

View File

@@ -282,6 +282,15 @@ namespace SharpCompress.Compressors.Deflate
return _baseStream.Read(buffer, offset, count);
}
public override int ReadByte()
{
if (_disposed)
{
throw new ObjectDisposedException("DeflateStream");
}
return _baseStream.ReadByte();
}
/// <summary>
/// Calling this method always throws a <see cref="NotImplementedException"/>.
/// </summary>
@@ -340,6 +349,15 @@ namespace SharpCompress.Compressors.Deflate
_baseStream.Write(buffer, offset, count);
}
public override void WriteByte(byte value)
{
if (_disposed)
{
throw new ObjectDisposedException("DeflateStream");
}
_baseStream.WriteByte(value);
}
#endregion
public MemoryStream InputBuffer => new MemoryStream(_baseStream._z.InputBuffer, _baseStream._z.NextIn,

View File

@@ -27,9 +27,8 @@
// ------------------------------------------------------------------
using System;
using System.Buffers.Binary;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Converters;
using System.Text;
namespace SharpCompress.Compressors.Deflate
@@ -442,7 +441,7 @@ namespace SharpCompress.Compressors.Deflate
}
TimeSpan delta = LastModified.Value - UNIX_EPOCH;
var timet = (Int32)delta.TotalSeconds;
DataConverter.LittleEndian.PutBytes(header, i, timet);
BinaryPrimitives.WriteInt32LittleEndian(header.AsSpan(i), timet);
i += 4;
// xflg
@@ -476,4 +475,4 @@ namespace SharpCompress.Compressors.Deflate
return header.Length; // bytes written
}
}
}
}

View File

@@ -25,11 +25,10 @@
// ------------------------------------------------------------------
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Converters;
using System.Text;
namespace SharpCompress.Compressors.Deflate
@@ -244,10 +243,12 @@ namespace SharpCompress.Compressors.Deflate
if (_wantCompress)
{
// Emit the GZIP trailer: CRC32 and size mod 2^32
int c1 = crc.Crc32Result;
_stream.Write(DataConverter.LittleEndian.GetBytes(c1), 0, 4);
byte[] intBuf = new byte[4];
BinaryPrimitives.WriteInt32LittleEndian(intBuf, crc.Crc32Result);
_stream.Write(intBuf, 0, 4);
int c2 = (Int32)(crc.TotalBytesRead & 0x00000000FFFFFFFF);
_stream.Write(DataConverter.LittleEndian.GetBytes(c2), 0, 4);
BinaryPrimitives.WriteInt32LittleEndian(intBuf, c2);
_stream.Write(intBuf, 0, 4);
}
else
{
@@ -293,9 +294,9 @@ namespace SharpCompress.Compressors.Deflate
Array.Copy(_z.InputBuffer, _z.NextIn, trailer, 0, trailer.Length);
}
Int32 crc32_expected = DataConverter.LittleEndian.GetInt32(trailer, 0);
Int32 crc32_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer);
Int32 crc32_actual = crc.Crc32Result;
Int32 isize_expected = DataConverter.LittleEndian.GetInt32(trailer, 4);
Int32 isize_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer.AsSpan(4));
Int32 isize_actual = (Int32)(_z.TotalBytesOut & 0x00000000FFFFFFFF);
if (crc32_actual != crc32_expected)
@@ -446,7 +447,7 @@ namespace SharpCompress.Compressors.Deflate
throw new ZlibException("Bad GZIP header.");
}
Int32 timet = DataConverter.LittleEndian.GetInt32(header, 4);
Int32 timet = BinaryPrimitives.ReadInt32LittleEndian(header.AsSpan(4));
_GzipMtime = TarHeader.EPOCH.AddSeconds(timet);
totalBytesRead += n;
if ((header[3] & 0x04) == 0x04)
@@ -647,4 +648,4 @@ namespace SharpCompress.Compressors.Deflate
Undefined
}
}
}
}

View File

@@ -270,6 +270,15 @@ namespace SharpCompress.Compressors.Deflate
return _baseStream.Read(buffer, offset, count);
}
public override int ReadByte()
{
if (_disposed)
{
throw new ObjectDisposedException("ZlibStream");
}
return _baseStream.ReadByte();
}
/// <summary>
/// Calling this method always throws a <see cref="NotImplementedException"/>.
/// </summary>
@@ -321,6 +330,15 @@ namespace SharpCompress.Compressors.Deflate
_baseStream.Write(buffer, offset, count);
}
public override void WriteByte(byte value)
{
if (_disposed)
{
throw new ObjectDisposedException("ZlibStream");
}
_baseStream.WriteByte(value);
}
#endregion System.IO.Stream methods
}
}

View File

@@ -1,6 +1,4 @@
#if !NO_CRYPTO
using System;
using System;
using System.IO;
using System.Security.Cryptography;
using System.Text;
@@ -195,7 +193,7 @@ namespace SharpCompress.Compressors.LZMA
}
else
{
#if NETSTANDARD1_3
#if NETSTANDARD1_3 || NETSTANDARD2_0
using (IncrementalHash sha = IncrementalHash.CreateHash(HashAlgorithmName.SHA256))
{
byte[] counter = new byte[8];
@@ -262,5 +260,3 @@ namespace SharpCompress.Compressors.LZMA
#endregion
}
}
#endif

View File

@@ -193,6 +193,22 @@ namespace SharpCompress.Compressors.LZMA
return count;
}
public override int ReadByte()
{
if (_mFinished)
{
return -1;
}
if (!_mIter.MoveNext())
{
_mFinished = true;
return -1;
}
return _mIter.Current;
}
public IEnumerable<byte> Run()
{
const uint kBurstSize = (1u << 18);

View File

@@ -1,6 +1,6 @@
using System;
using System.Buffers.Binary;
using System.IO;
using SharpCompress.Converters;
using SharpCompress.Crypto;
using SharpCompress.IO;
@@ -58,16 +58,17 @@ namespace SharpCompress.Compressors.LZMA
crc32Stream.WrappedStream.Dispose();
crc32Stream.Dispose();
var compressedCount = _countingWritableSubStream.Count;
var bytes = DataConverter.LittleEndian.GetBytes(crc32Stream.Crc);
_countingWritableSubStream.Write(bytes, 0, bytes.Length);
bytes = DataConverter.LittleEndian.GetBytes(_writeCount);
_countingWritableSubStream.Write(bytes, 0, bytes.Length);
byte[] intBuf = new byte[8];
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, crc32Stream.Crc);
_countingWritableSubStream.Write(intBuf, 0, 4);
BinaryPrimitives.WriteInt64LittleEndian(intBuf, _writeCount);
_countingWritableSubStream.Write(intBuf, 0, 8);
//total with headers
bytes = DataConverter.LittleEndian.GetBytes(compressedCount + 6 + 20);
_countingWritableSubStream.Write(bytes, 0, bytes.Length);
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, compressedCount + 6 + 20);
_countingWritableSubStream.Write(intBuf, 0, 8);
}
_finished = true;
}
@@ -101,7 +102,7 @@ namespace SharpCompress.Compressors.LZMA
{
_stream.Flush();
}
// TODO: Both Length and Position are sometimes feasible, but would require
// reading the output length when we initialize.
public override long Length => throw new NotImplementedException();
@@ -110,6 +111,8 @@ namespace SharpCompress.Compressors.LZMA
public override int Read(byte[] buffer, int offset, int count) => _stream.Read(buffer, offset, count);
public override int ReadByte() => _stream.ReadByte();
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
public override void SetLength(long value) => throw new NotImplementedException();
@@ -120,6 +123,12 @@ namespace SharpCompress.Compressors.LZMA
_writeCount += count;
}
public override void WriteByte(byte value)
{
_stream.WriteByte(value);
++_writeCount;
}
#endregion
/// <summary>

View File

@@ -34,34 +34,26 @@ namespace SharpCompress.Compressors.LZMA
if (NEEDS_INDENT)
{
NEEDS_INDENT = false;
#if !NO_FILE
Debug.Write(INDENT.Peek());
#endif
}
}
public static void Write(object value)
{
EnsureIndent();
#if !NO_FILE
Debug.Write(value);
#endif
}
public static void Write(string text)
{
EnsureIndent();
#if !NO_FILE
Debug.Write(text);
#endif
}
public static void Write(string format, params object[] args)
{
EnsureIndent();
#if !NO_FILE
Debug.Write(string.Format(format, args));
#endif
}
public static void WriteLine()

View File

@@ -996,7 +996,7 @@ namespace SharpCompress.Compressors.LZMA
}
}
UInt32 startLen = 2; // speed optimization
UInt32 startLen = 2; // speed optimization
for (UInt32 repIndex = 0; repIndex < Base.K_NUM_REP_DISTANCES; repIndex++)
{
@@ -1571,12 +1571,17 @@ namespace SharpCompress.Compressors.LZMA
public void WriteCoderProperties(Stream outStream)
{
_properties[0] = (Byte)((_posStateBits * 5 + _numLiteralPosStateBits) * 9 + _numLiteralContextBits);
WriteCoderProperties(_properties);
outStream.Write(_properties, 0, K_PROP_SIZE);
}
public void WriteCoderProperties(Span<byte> span)
{
span[0] = (byte)((_posStateBits * 5 + _numLiteralPosStateBits) * 9 + _numLiteralContextBits);
for (int i = 0; i < 4; i++)
{
_properties[1 + i] = (Byte)((_dictionarySize >> (8 * i)) & 0xFF);
span[1 + i] = (byte)((_dictionarySize >> (8 * i)) & 0xFF);
}
outStream.Write(_properties, 0, K_PROP_SIZE);
}
private readonly UInt32[] _tempPrices = new UInt32[Base.K_NUM_FULL_DISTANCES];
@@ -1794,4 +1799,4 @@ namespace SharpCompress.Compressors.LZMA
_trainSize = trainSize;
}
}
}
}

View File

@@ -1,7 +1,7 @@
using System;
using System.Buffers.Binary;
using System.IO;
using SharpCompress.Compressors.LZMA.LZ;
using SharpCompress.Converters;
namespace SharpCompress.Compressors.LZMA
{
@@ -56,7 +56,7 @@ namespace SharpCompress.Compressors.LZMA
if (!isLzma2)
{
_dictionarySize = DataConverter.LittleEndian.GetInt32(properties, 1);
_dictionarySize = BinaryPrimitives.ReadInt32LittleEndian(properties.AsSpan(1));
_outWindow.Create(_dictionarySize);
if (presetDictionary != null)
{
@@ -107,9 +107,9 @@ namespace SharpCompress.Compressors.LZMA
_encoder = new Encoder();
_encoder.SetCoderProperties(properties._propIDs, properties._properties);
MemoryStream propStream = new MemoryStream(5);
_encoder.WriteCoderProperties(propStream);
Properties = propStream.ToArray();
byte[] prop = new byte[5];
_encoder.WriteCoderProperties(prop);
Properties = prop;
_encoder.SetStreams(null, outputStream, -1, -1);
if (presetDictionary != null)
@@ -315,4 +315,4 @@ namespace SharpCompress.Compressors.LZMA
public byte[] Properties { get; } = new byte[5];
}
}
}

View File

@@ -36,10 +36,8 @@ namespace SharpCompress.Compressors.LZMA
case K_LZMA:
case K_LZMA2:
return new LzmaStream(info, inStreams.Single(), -1, limit);
#if !NO_CRYPTO
case CMethodId.K_AES_ID:
return new AesDecoderStream(inStreams.Single(), info, pass, limit);
#endif
case K_BCJ:
return new BCJFilter(false, inStreams.Single());
case K_BCJ2:

View File

@@ -1,6 +1,6 @@
using System;
using System.Buffers.Binary;
using System.Text;
using SharpCompress.Converters;
namespace SharpCompress.Compressors.PPMd.H
{
@@ -19,7 +19,11 @@ namespace SharpCompress.Compressors.PPMd.H
{
}
internal int SummFreq { get => DataConverter.LittleEndian.GetInt16(Memory, Address) & 0xffff; set => DataConverter.LittleEndian.PutBytes(Memory, Address, (short)value); }
internal int SummFreq
{
get => BinaryPrimitives.ReadInt16LittleEndian(Memory.AsSpan(Address)) & 0xffff;
set => BinaryPrimitives.WriteInt16LittleEndian(Memory.AsSpan(Address), (short)value);
}
internal FreqData Initialize(byte[] mem)
{
@@ -28,14 +32,12 @@ namespace SharpCompress.Compressors.PPMd.H
internal void IncrementSummFreq(int dSummFreq)
{
short summFreq = DataConverter.LittleEndian.GetInt16(Memory, Address);
summFreq += (short)dSummFreq;
DataConverter.LittleEndian.PutBytes(Memory, Address, summFreq);
SummFreq += (short)dSummFreq;
}
internal int GetStats()
{
return DataConverter.LittleEndian.GetInt32(Memory, Address + 2);
return BinaryPrimitives.ReadInt32LittleEndian(Memory.AsSpan(Address + 2));
}
internal virtual void SetStats(State state)
@@ -45,7 +47,7 @@ namespace SharpCompress.Compressors.PPMd.H
internal void SetStats(int state)
{
DataConverter.LittleEndian.PutBytes(Memory, Address + 2, state);
BinaryPrimitives.WriteInt32LittleEndian(Memory.AsSpan(Address + 2), state);
}
public override String ToString()
@@ -64,4 +66,4 @@ namespace SharpCompress.Compressors.PPMd.H
return buffer.ToString();
}
}
}
}

View File

@@ -137,7 +137,7 @@ namespace SharpCompress.Compressors.PPMd.H
private void RestartModelRare()
{
Utility.Fill(_charMask, 0);
new Span<int>(_charMask).Fill(0);
SubAlloc.InitSubAllocator();
_initRl = -(_maxOrder < 12 ? _maxOrder : 12) - 1;
int addr = SubAlloc.AllocContext();
@@ -228,7 +228,7 @@ namespace SharpCompress.Compressors.PPMd.H
private void ClearMask()
{
_escCount = 1;
Utility.Fill(_charMask, 0);
new Span<int>(_charMask).Fill(0);
}
internal bool DecodeInit(IRarUnpack unpackRead, int escChar)
@@ -912,4 +912,4 @@ namespace SharpCompress.Compressors.PPMd.H
}
}
}
}
}

View File

@@ -1,6 +1,6 @@
using System;
using System.Buffers.Binary;
using System.Text;
using SharpCompress.Converters;
namespace SharpCompress.Compressors.PPMd.H
{
@@ -22,7 +22,7 @@ namespace SharpCompress.Compressors.PPMd.H
{
if (Memory != null)
{
_numStats = DataConverter.LittleEndian.GetInt16(Memory, Address) & 0xffff;
_numStats = BinaryPrimitives.ReadInt16LittleEndian(Memory.AsSpan(Address)) & 0xffff;
}
return _numStats;
}
@@ -32,7 +32,7 @@ namespace SharpCompress.Compressors.PPMd.H
_numStats = value & 0xffff;
if (Memory != null)
{
DataConverter.LittleEndian.PutBytes(Memory, Address, (short)value);
BinaryPrimitives.WriteInt16LittleEndian(Memory.AsSpan(Address), (short)value);
}
}
}
@@ -109,7 +109,7 @@ namespace SharpCompress.Compressors.PPMd.H
{
if (Memory != null)
{
_suffix = DataConverter.LittleEndian.GetInt32(Memory, Address + 8);
_suffix = BinaryPrimitives.ReadInt32LittleEndian(Memory.AsSpan(Address + 8));
}
return _suffix;
}
@@ -124,7 +124,7 @@ namespace SharpCompress.Compressors.PPMd.H
_suffix = suffix;
if (Memory != null)
{
DataConverter.LittleEndian.PutBytes(Memory, Address + 8, suffix);
BinaryPrimitives.WriteInt32LittleEndian(Memory.AsSpan(Address + 8), suffix);
}
}
@@ -307,7 +307,7 @@ namespace SharpCompress.Compressors.PPMd.H
// byte[] bytes = model.getSubAlloc().getHeap();
// int p1 = state1.Address;
// int p2 = state2.Address;
//
//
// for (int i = 0; i < StatePtr.size; i++) {
// byte temp = bytes[p1+i];
// bytes[p1+i] = bytes[p2+i];
@@ -564,4 +564,4 @@ namespace SharpCompress.Compressors.PPMd.H
UNION_SIZE = Math.Max(FreqData.SIZE, State.SIZE);
}
}
}
}

View File

@@ -1,4 +1,5 @@
using SharpCompress.Converters;
using System;
using System.Buffers.Binary;
namespace SharpCompress.Compressors.PPMd.H
{
@@ -21,7 +22,7 @@ namespace SharpCompress.Compressors.PPMd.H
{
if (Memory != null)
{
_stamp = DataConverter.LittleEndian.GetInt16(Memory, Address) & 0xffff;
_stamp = BinaryPrimitives.ReadInt16LittleEndian(Memory.AsSpan(Address)) & 0xffff;
}
return _stamp;
}
@@ -31,7 +32,7 @@ namespace SharpCompress.Compressors.PPMd.H
_stamp = value;
if (Memory != null)
{
DataConverter.LittleEndian.PutBytes(Memory, Address, (short)value);
BinaryPrimitives.WriteInt16LittleEndian(Memory.AsSpan(Address), (short)value);
}
}
}
@@ -63,7 +64,7 @@ namespace SharpCompress.Compressors.PPMd.H
{
if (Memory != null)
{
_next = DataConverter.LittleEndian.GetInt32(Memory, Address + 4);
_next = BinaryPrimitives.ReadInt32LittleEndian(Memory.AsSpan(Address + 4));
}
return _next;
}
@@ -78,7 +79,7 @@ namespace SharpCompress.Compressors.PPMd.H
_next = next;
if (Memory != null)
{
DataConverter.LittleEndian.PutBytes(Memory, Address + 4, next);
BinaryPrimitives.WriteInt32LittleEndian(Memory.AsSpan(Address + 4), next);
}
}
@@ -86,7 +87,7 @@ namespace SharpCompress.Compressors.PPMd.H
{
if (Memory != null)
{
_nu = DataConverter.LittleEndian.GetInt16(Memory, Address + 2) & 0xffff;
_nu = BinaryPrimitives.ReadInt16LittleEndian(Memory.AsSpan(Address + 2)) & 0xffff;
}
return _nu;
}
@@ -96,7 +97,7 @@ namespace SharpCompress.Compressors.PPMd.H
_nu = nu & 0xffff;
if (Memory != null)
{
DataConverter.LittleEndian.PutBytes(Memory, Address + 2, (short)nu);
BinaryPrimitives.WriteInt16LittleEndian(Memory.AsSpan(Address + 2), (short)nu);
}
}
@@ -104,7 +105,7 @@ namespace SharpCompress.Compressors.PPMd.H
{
if (Memory != null)
{
_prev = DataConverter.LittleEndian.GetInt32(Memory, Address + 8);
_prev = BinaryPrimitives.ReadInt32LittleEndian(Memory.AsSpan(Address + 8));
}
return _prev;
}
@@ -119,8 +120,8 @@ namespace SharpCompress.Compressors.PPMd.H
_prev = prev;
if (Memory != null)
{
DataConverter.LittleEndian.PutBytes(Memory, Address + 8, prev);
BinaryPrimitives.WriteInt32LittleEndian(Memory.AsSpan(Address + 8), prev);
}
}
}
}
}

View File

@@ -1,5 +1,6 @@
using System;
using System.Buffers.Binary;
using System.Text;
using SharpCompress.Converters;
namespace SharpCompress.Compressors.PPMd.H
{
@@ -18,7 +19,7 @@ namespace SharpCompress.Compressors.PPMd.H
{
if (Memory != null)
{
_next = DataConverter.LittleEndian.GetInt32(Memory, Address);
_next = BinaryPrimitives.ReadInt32LittleEndian(Memory.AsSpan(Address));
}
return _next;
}
@@ -33,7 +34,7 @@ namespace SharpCompress.Compressors.PPMd.H
_next = next;
if (Memory != null)
{
DataConverter.LittleEndian.PutBytes(Memory, Address, next);
BinaryPrimitives.WriteInt32LittleEndian(Memory.AsSpan(Address), next);
}
}
@@ -51,4 +52,4 @@ namespace SharpCompress.Compressors.PPMd.H
return buffer.ToString();
}
}
}
}

View File

@@ -1,6 +1,6 @@
using System;
using System.Buffers.Binary;
using System.Text;
using SharpCompress.Converters;
namespace SharpCompress.Compressors.PPMd.H
{
@@ -29,7 +29,7 @@ namespace SharpCompress.Compressors.PPMd.H
internal int GetSuccessor()
{
return DataConverter.LittleEndian.GetInt32(Memory, Address + 2);
return BinaryPrimitives.ReadInt32LittleEndian(Memory.AsSpan(Address + 2));
}
internal void SetSuccessor(PpmContext successor)
@@ -39,7 +39,7 @@ namespace SharpCompress.Compressors.PPMd.H
internal void SetSuccessor(int successor)
{
DataConverter.LittleEndian.PutBytes(Memory, Address + 2, successor);
BinaryPrimitives.WriteInt32LittleEndian(Memory.AsSpan(Address + 2), successor);
}
internal void SetValues(StateRef state)
@@ -95,4 +95,4 @@ namespace SharpCompress.Compressors.PPMd.H
return buffer.ToString();
}
}
}
}

View File

@@ -166,7 +166,7 @@ namespace SharpCompress.Compressors.PPMd.H
_freeListPos = _heapStart + allocSize;
//UPGRADE_ISSUE: The following fragment of code could not be parsed and was not converted. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1156'"
//assert(realAllocSize - tempMemBlockPos == RarMemBlock.size): realAllocSize
//assert(realAllocSize - tempMemBlockPos == RarMemBlock.size): realAllocSize
//+ + tempMemBlockPos + + RarMemBlock.size;
// Init freeList
@@ -360,7 +360,7 @@ namespace SharpCompress.Compressors.PPMd.H
public virtual void InitSubAllocator()
{
int i, k;
Utility.Fill(_heap, _freeListPos, _freeListPos + SizeOfFreeList(), (byte)0);
new Span<byte>(_heap, _freeListPos, SizeOfFreeList()).Fill(0);
_pText = _heapStart;
@@ -448,4 +448,4 @@ namespace SharpCompress.Compressors.PPMd.H
UNIT_SIZE = Math.Max(PpmContext.SIZE, RarMemBlock.SIZE);
}
}
}
}

View File

@@ -1,5 +1,6 @@
using SharpCompress.Compressors.PPMd.I1;
using SharpCompress.Converters;
using System;
using System.Buffers.Binary;
using SharpCompress.Compressors.PPMd.I1;
namespace SharpCompress.Compressors.PPMd
{
@@ -25,7 +26,7 @@ namespace SharpCompress.Compressors.PPMd
ModelOrder = modelOrder;
RestorationMethod = modelRestorationMethod;
}
public int ModelOrder { get; }
public PpmdVersion Version { get; } = PpmdVersion.I1;
internal ModelRestorationMethod RestorationMethod { get; }
@@ -34,7 +35,7 @@ namespace SharpCompress.Compressors.PPMd
{
if (properties.Length == 2)
{
ushort props = DataConverter.LittleEndian.GetUInt16(properties, 0);
ushort props = BinaryPrimitives.ReadUInt16LittleEndian(properties);
AllocatorSize = (((props >> 4) & 0xff) + 1) << 20;
ModelOrder = (props & 0x0f) + 1;
RestorationMethod = (ModelRestorationMethod)(props >> 12);
@@ -42,7 +43,7 @@ namespace SharpCompress.Compressors.PPMd
else if (properties.Length == 5)
{
Version = PpmdVersion.H7Z;
AllocatorSize = DataConverter.LittleEndian.GetInt32(properties, 1);
AllocatorSize = BinaryPrimitives.ReadInt32LittleEndian(properties.AsSpan(1));
ModelOrder = properties[0];
}
}
@@ -64,8 +65,16 @@ namespace SharpCompress.Compressors.PPMd
}
}
public byte[] Properties => DataConverter.LittleEndian.GetBytes(
(ushort)
((ModelOrder - 1) + (((AllocatorSize >> 20) - 1) << 4) + ((ushort)RestorationMethod << 12)));
public byte[] Properties
{
get
{
byte[] bytes = new byte[2];
BinaryPrimitives.WriteUInt16LittleEndian(
bytes,
(ushort)((ModelOrder - 1) + (((AllocatorSize >> 20) - 1) << 4) + ((ushort)RestorationMethod << 12)));
return bytes;
}
}
}
}
}

View File

@@ -32,9 +32,9 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
}
}
public bool Suspended {
public bool Suspended {
get => suspended;
set => suspended = value;
set => suspended = value;
}
public int Char
@@ -139,12 +139,12 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
case 36: // alternative hash
Unpack29(fileHeader.IsSolid);
break;
case 50: // rar 5.x compression
Unpack5(fileHeader.IsSolid);
break;
default:
default:
throw new InvalidFormatException("unknown rar compression version " + fileHeader.CompressionAlgorithm);
}
}
@@ -729,13 +729,13 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
if (!solid)
{
tablesRead = false;
Utility.Fill(oldDist, 0); // memset(oldDist,0,sizeof(OldDist));
new Span<int>(oldDist).Fill(0); // memset(oldDist,0,sizeof(OldDist));
oldDistPtr = 0;
lastDist = 0;
lastLength = 0;
Utility.Fill(unpOldTable, (byte)0); // memset(UnpOldTable,0,sizeof(UnpOldTable));
new Span<byte>(unpOldTable).Fill(0); // memset(UnpOldTable,0,sizeof(UnpOldTable));
unpPtr = 0;
wrPtr = 0;
@@ -837,7 +837,7 @@ WriteBorder=Math.Min(MaxWinSize,UNPACK_MAX_WRITE)&MaxWinMask;
if ((bitField & 0x4000) == 0)
{
Utility.Fill(unpOldTable, (byte)0); // memset(UnpOldTable,0,sizeof(UnpOldTable));
new Span<byte>(unpOldTable).Fill(0); // memset(UnpOldTable,0,sizeof(UnpOldTable));
}
AddBits(2);
@@ -1109,7 +1109,7 @@ WriteBorder=Math.Min(MaxWinSize,UNPACK_MAX_WRITE)&MaxWinMask;
oldFilterLengths[FiltPos] = StackFilter.BlockLength;
// memset(StackFilter->Prg.InitR,0,sizeof(StackFilter->Prg.InitR));
Utility.Fill(StackFilter.Program.InitR, 0);
new Span<int>(StackFilter.Program.InitR).Fill(0);
StackFilter.Program.InitR[3] = RarVM.VM_GLOBALMEMADDR; // StackFilter->Prg.InitR[3]=VM_GLOBALMEMADDR;
StackFilter.Program.InitR[4] = StackFilter.BlockLength;
@@ -1267,4 +1267,4 @@ WriteBorder=Math.Min(MaxWinSize,UNPACK_MAX_WRITE)&MaxWinMask;
}
}
}
}
}

View File

@@ -3,7 +3,7 @@
* Original author: Edmund Wagner
* Creation date: 21.06.2007
*
* the unrar licence applies to all junrar source and binary distributions
* the unrar licence applies to all junrar source and binary distributions
* you are not allowed to use this source to re-create the RAR compression algorithm
*/
@@ -652,9 +652,9 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
ChSetC[I] = ((~I + 1) & 0xff) << 8;
}
Utility.Fill(NToPl, 0); // memset(NToPl,0,sizeof(NToPl));
Utility.Fill(NToPlB, 0); // memset(NToPlB,0,sizeof(NToPlB));
Utility.Fill(NToPlC, 0); // memset(NToPlC,0,sizeof(NToPlC));
new Span<int>(NToPl).Fill(0); // memset(NToPl,0,sizeof(NToPl));
new Span<int>(NToPlB).Fill(0); // memset(NToPlB,0,sizeof(NToPlB));
new Span<int>(NToPlC).Fill(0); // memset(NToPlC,0,sizeof(NToPlC));
corrHuff(ChSetB, NToPlB);
}
@@ -670,7 +670,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
// & ~0xff) | I;
}
}
Utility.Fill(NumToPlace, 0); // memset(NumToPlace,0,sizeof(NToPl));
new Span<int>(NumToPlace).Fill(0); // memset(NumToPlace,0,sizeof(NToPl));
for (I = 6; I >= 0; I--)
{
NumToPlace[I] = (7 - I) * 32;
@@ -717,4 +717,4 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
wrPtr = unpPtr;
}
}
}
}

View File

@@ -3,7 +3,7 @@
* Original author: Edmund Wagner
* Creation date: 21.06.2007
*
* the unrar licence applies to all junrar source and binary distributions
* the unrar licence applies to all junrar source and binary distributions
* you are not allowed to use this source to re-create the RAR compression algorithm
*/
@@ -263,7 +263,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
if (0 == (BitField & 0x4000))
{
// memset(UnpOldTable20,0,sizeof(UnpOldTable20));
Utility.Fill(UnpOldTable20, (byte)0);
new Span<byte>(UnpOldTable20).Fill(0);
}
AddBits(2);
@@ -371,7 +371,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
AudV[3] = new AudioVariables();
// memset(UnpOldTable20,0,sizeof(UnpOldTable20));
Utility.Fill(UnpOldTable20, (byte)0);
new Span<byte>(UnpOldTable20).Fill(0);
}
}
@@ -521,4 +521,4 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
return ((byte)Ch);
}
}
}
}

View File

@@ -1,4 +1,5 @@
using SharpCompress.Compressors.Rar.VM;
using System;
using SharpCompress.Compressors.Rar.VM;
namespace SharpCompress.Compressors.Rar.UnpackV1
{
@@ -186,9 +187,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
int i;
long M, N;
Utility.Fill(lenCount, 0); // memset(LenCount,0,sizeof(LenCount));
Utility.Fill(dec.DecodeNum, 0); // memset(Dec->DecodeNum,0,Size*sizeof(*Dec->DecodeNum));
new Span<int>(dec.DecodeNum).Fill(0); // memset(Dec->DecodeNum,0,Size*sizeof(*Dec->DecodeNum));
for (i = 0; i < size; i++)
{
@@ -217,4 +216,4 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
dec.MaxNum = size;
}
}
}
}

View File

@@ -1,4 +1,5 @@
using static SharpCompress.Compressors.Rar.UnpackV2017.Unpack.Unpack15Local;
using System;
using static SharpCompress.Compressors.Rar.UnpackV2017.Unpack.Unpack15Local;
namespace SharpCompress.Compressors.Rar.UnpackV2017
{
@@ -461,9 +462,9 @@ internal static class Unpack15Local {
ChSetA[I]=(ushort)I;
ChSetC[I]=(ushort)(((~I+1) & 0xff)<<8);
}
Utility.Memset(NToPl,0,NToPl.Length);
Utility.Memset(NToPlB,0,NToPlB.Length);
Utility.Memset(NToPlC,0,NToPlC.Length);
new Span<byte>(NToPl).Fill(0);
new Span<byte>(NToPlB).Fill(0);
new Span<byte>(NToPlC).Fill(0);
CorrHuff(ChSetB,NToPlB);
}
@@ -473,7 +474,7 @@ internal static class Unpack15Local {
for (I=7;I>=0;I--)
for (J=0;J<32;J++)
CharSet[J]=(ushort)((CharSet[J] & ~0xff) | I);
Utility.Memset(NumToPlace,0,NToPl.Length);
new Span<byte>(NumToPlace, 0, NToPl.Length).Fill(0);
for (I=6;I>=0;I--)
NumToPlace[I]=(byte)((7-I)*32);
}

View File

@@ -191,7 +191,7 @@ internal static class Unpack20Local {
UnpAudioBlock=(BitField & 0x8000)!=0;
if ((BitField & 0x4000) != 0)
Utility.Memset(UnpOldTable20,0,UnpOldTable20.Length);
new Span<byte>(UnpOldTable20).Fill(0);
Inp.addbits(2);
uint TableSize;
@@ -296,7 +296,7 @@ internal static class Unpack20Local {
//memset(AudV,0,sizeof(AudV));
AudV = new AudioVariables[4];
Utility.Memset(UnpOldTable20, 0, UnpOldTable20.Length);
new Span<byte>(UnpOldTable20).Fill(0);
//memset(MD,0,sizeof(MD));
MD = new DecodeTable[4];
}

View File

@@ -30,7 +30,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
// Check TablesRead5 to be sure that we read tables at least once
// regardless of current block header TablePresent flag.
// So we can safefly use these tables below.
if (!ReadBlockHeader(Inp,ref BlockHeader) ||
if (!ReadBlockHeader(Inp,ref BlockHeader) ||
!ReadTables(Inp,ref BlockHeader, ref BlockTables) || !TablesRead5)
return;
}
@@ -45,8 +45,8 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
// We use 'while', because for empty block containing only Huffman table,
// we'll be on the block border once again just after reading the table.
while (Inp.InAddr>BlockHeader.BlockStart+BlockHeader.BlockSize-1 ||
Inp.InAddr==BlockHeader.BlockStart+BlockHeader.BlockSize-1 &&
while (Inp.InAddr>BlockHeader.BlockStart+BlockHeader.BlockSize-1 ||
Inp.InAddr==BlockHeader.BlockStart+BlockHeader.BlockSize-1 &&
Inp.InBit>=BlockHeader.BlockBitSize)
{
if (BlockHeader.LastBlockInFile)
@@ -415,7 +415,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
// Choose the nearest among WriteBorder and WrPtr actual written border.
// If border is equal to UnpPtr, it means that we have MaxWinSize data ahead.
if (WriteBorder==UnpPtr ||
if (WriteBorder==UnpPtr ||
WrPtr!=UnpPtr && ((WrPtr-UnpPtr)&MaxWinMask)<((WriteBorder-UnpPtr)&MaxWinMask))
WriteBorder=WrPtr;
}
@@ -563,11 +563,11 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
if (!UnpReadBuf())
return false;
Inp.faddbits((uint)((8-Inp.InBit)&7));
byte BlockFlags=(byte)(Inp.fgetbits()>>8);
Inp.faddbits(8);
uint ByteCount=(uint)(((BlockFlags>>3)&3)+1); // Block size byte count.
if (ByteCount==4)
return false;

View File

@@ -196,7 +196,7 @@ public Unpack(/* ComprDataIO *DataIO */)
{
if (!Solid)
{
Utility.Memset<uint>(OldDist, 0, OldDist.Length);
new Span<uint>(OldDist).Fill(0);
OldDistPtr=0;
LastDist=LastLength=0;
// memset(Window,0,MaxWinSize);
@@ -246,7 +246,7 @@ public Unpack(/* ComprDataIO *DataIO */)
// Set the entire DecodeNum to zero.
//memset(Dec->DecodeNum,0,Size*sizeof(*Dec->DecodeNum));
Utility.FillFast<ushort>(Dec.DecodeNum, 0);
new Span<ushort>(Dec.DecodeNum).Fill(0);
// Initialize not really used entry for zero length code.
Dec.DecodePos[0]=0;
@@ -272,7 +272,7 @@ public Unpack(/* ComprDataIO *DataIO */)
Dec.DecodeLen[I]=(uint)LeftAligned;
// Every item of this array contains the sum of all preceding items.
// So it contains the start position in code list for every bit length.
// So it contains the start position in code list for every bit length.
Dec.DecodePos[I]=Dec.DecodePos[I-1]+LengthCount[I-1];
}
@@ -337,7 +337,7 @@ public Unpack(/* ComprDataIO *DataIO */)
uint BitField=Code<<(int)(16-Dec.QuickBits);
// Prepare the table for quick decoding of bit lengths.
// Find the upper limit for current bit field and adjust the bit length
// accordingly if necessary.
while (CurBitLength<Dec.DecodeLen.Length && BitField>=Dec.DecodeLen[CurBitLength])

View File

@@ -1,6 +1,6 @@
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using SharpCompress.Converters;
namespace SharpCompress.Compressors.Rar.VM
{
@@ -72,9 +72,9 @@ namespace SharpCompress.Compressors.Rar.VM
}
if (IsVMMem(mem))
{
return DataConverter.LittleEndian.GetInt32(mem, offset);
return BinaryPrimitives.ReadInt32LittleEndian(mem.AsSpan(offset));
}
return DataConverter.BigEndian.GetInt32(mem, offset);
return BinaryPrimitives.ReadInt32BigEndian(mem.AsSpan(offset));
}
private void SetValue(bool byteMode, byte[] mem, int offset, int value)
@@ -94,11 +94,11 @@ namespace SharpCompress.Compressors.Rar.VM
{
if (IsVMMem(mem))
{
DataConverter.LittleEndian.PutBytes(mem, offset, value);
BinaryPrimitives.WriteInt32LittleEndian(mem.AsSpan(offset), value);
}
else
{
DataConverter.BigEndian.PutBytes(mem, offset, value);
BinaryPrimitives.WriteInt32BigEndian(mem.AsSpan(offset), value);
}
}
@@ -120,12 +120,12 @@ namespace SharpCompress.Compressors.Rar.VM
if (cmdOp.Type == VMOpType.VM_OPREGMEM)
{
int pos = (cmdOp.Offset + cmdOp.Base) & VM_MEMMASK;
ret = DataConverter.LittleEndian.GetInt32(Mem, pos);
ret = BinaryPrimitives.ReadInt32LittleEndian(Mem.AsSpan(pos));
}
else
{
int pos = cmdOp.Offset;
ret = DataConverter.LittleEndian.GetInt32(Mem, pos);
ret = BinaryPrimitives.ReadInt32LittleEndian(Mem.AsSpan(pos));
}
return ret;
}
@@ -190,12 +190,12 @@ namespace SharpCompress.Compressors.Rar.VM
{
//prg.GlobalData.Clear();
// ->GlobalData.Add(dataSize+VM_FIXEDGLOBALSIZE);
prg.GlobalData.SetSize(dataSize + VM_FIXEDGLOBALSIZE);
prg.GlobalData.Capacity = dataSize + VM_FIXEDGLOBALSIZE;
for (int i = 0; i < dataSize + VM_FIXEDGLOBALSIZE; i++)
// memcpy(&Prg->GlobalData[0],&Mem[VM_GLOBALMEMADDR],DataSize+VM_FIXEDGLOBALSIZE);
{
prg.GlobalData[i] = Mem[VM_GLOBALMEMADDR + i];
prg.GlobalData.Add(Mem[VM_GLOBALMEMADDR + i]);
}
}
}
@@ -1449,4 +1449,4 @@ namespace SharpCompress.Compressors.Rar.VM
}
//
}
}

View File

@@ -51,5 +51,10 @@ namespace SharpCompress.Compressors.Xz.Filters
{
return BaseStream.Read(buffer, offset, count);
}
public override int ReadByte()
{
return BaseStream.ReadByte();
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -47,6 +47,12 @@ namespace SharpCompress.Crypto
hash = CalculateCrc(table, hash, buffer, offset, count);
}
public override void WriteByte(byte value)
{
stream.WriteByte(value);
hash = CalculateCrc(table, hash, value);
}
public override bool CanRead => stream.CanRead;
public override bool CanSeek => false;
public override bool CanWrite => stream.CanWrite;
@@ -98,9 +104,16 @@ namespace SharpCompress.Crypto
unchecked
{
for (int i = offset, end = offset + count; i < end; i++)
crc = (crc >> 8) ^ table[(crc ^ buffer[i]) & 0xFF];
{
crc = CalculateCrc(table, crc, buffer[i]);
}
}
return crc;
}
private static uint CalculateCrc(uint[] table, uint crc, byte b)
{
return (crc >> 8) ^ table[(crc ^ b) & 0xFF];
}
}
}

View File

@@ -1,18 +0,0 @@
#if NET35
using System;
namespace SharpCompress
{
internal static class EnumExtensions
{
public static bool HasFlag(this Enum enumRef, Enum flag)
{
long value = Convert.ToInt64(enumRef);
long flagVal = Convert.ToInt64(flag);
return (value & flagVal) == flagVal;
}
}
}
#endif

Some files were not shown because too many files have changed in this diff Show More