Compare commits

...

102 Commits
0.21 ... 0.24

Author SHA1 Message Date
Adam Hathcock
5072a0f6f5 Merge pull request #471 from adamhathcock/release-024
Bump version and dependencies
2019-08-20 20:36:38 +01:00
Adam Hathcock
357dff1403 Bump version and dependencies 2019-08-20 14:29:47 -05:00
Adam Hathcock
a2bd66ded8 Merge pull request #460 from itn3000/tar-fix-garbled2
fix filename garbling in tar(#414)
2019-06-27 12:16:53 +01:00
itn3000
6bfa3c25a4 add more comments 2019-06-27 20:01:40 +09:00
itn3000
1ea9ab72c1 add comment for subtracting 2019-06-27 19:59:16 +09:00
itn3000
07c42b8725 replace magic number 2019-06-27 10:59:21 +09:00
itn3000
70392c32e2 use Buffer.BlockCopy for performance 2019-06-27 09:47:26 +09:00
itn3000
9b4b2a9f7c considering encoding in processing filename(#414)
modify test tar archive because it was not expected one.
(expected "тест.txt" in encoding 866, but actual is omitted upper byte)
2019-06-26 17:34:12 +09:00
Adam Hathcock
d3dd708b58 Merge pull request #457 from DannyBoyk/issue_456_zip_bounded_substreams_data_descriptors
Return a bounded substream when data descriptors are used in seekable zips
2019-06-04 13:42:24 +01:00
Daniel Nash
af264cdc58 Return a bounded substream when data descriptors are used in seekable zips 2019-06-04 08:31:42 -04:00
Adam Hathcock
cfd6df976f Merge pull request #455 from DannyBoyk/issue_454_zip_bad_extra_field
Handle a bad extra field in a local file header in zip files
2019-06-04 09:24:55 +01:00
Daniel Nash
b2bd20b47e Handle a bad extra field in a local file header in zip files 2019-06-03 13:02:28 -04:00
Adam Hathcock
ffea093e95 Merge pull request #453 from Lssikkes/master
Fix for clients failing on just having a 64 bit offset in ZIP64
2019-05-24 19:33:59 +01:00
Leroy Sikkes
78eb8fcf92 Fix for clients that don't support ZIP64 standard correctly in case headers are only pointed to in ZIP64 directory structure 2019-05-24 18:27:49 +02:00
Adam Hathcock
a052956881 Merge pull request #452 from Lssikkes/master
Various fixes for ZIP64 writer (zero byte entries, 32 bit where supported)
2019-05-24 16:17:48 +01:00
Lssikkes
9319ea6992 Updated ZIP64 writer to write 32 bit values to directory entries for better compatibility.
Support for zero byte files without corruption errors from WinRAR/7-zip
2019-05-24 16:14:30 +02:00
Adam Hathcock
4e5b70dbfa Merge pull request #444 from eugeny-trunin/mem-opt
Memory and speed optimization
2019-03-20 15:13:00 +00:00
evgeny
c68eaa8397 Memory and speed optimization 2019-03-20 17:46:57 +03:00
Adam Hathcock
bbb7c85ba7 Merge pull request #442 from turbolocust/master
Fix: ArchiveEncoding was ignored in TarWriterOptions
2019-03-19 08:31:31 +00:00
Matthias Fussenegger
8174359228 Fix: ArchiveEncoding was ignored in TarWriterOptions 2019-03-18 18:25:00 +01:00
Adam Hathcock
acf66c5195 Merge pull request #440 from adamhathcock/release-023
Bump release for 0.23 and update dependencies
2019-02-27 12:57:30 +00:00
Adam Hathcock
880c9fa97a Bump release and update dependencies 2019-02-27 12:55:16 +00:00
Adam Hathcock
e5c111f2be Merge pull request #436 from Numpsy/populate_zip_volume_comment
Changes to populate ZipArchive.ZipVolumne.Comment
2019-01-14 08:14:03 +00:00
Richard Webb
4e9cd064dd Unit test to show reading of a Zip volume/archive comment 2019-01-13 21:05:55 +00:00
Richard Webb
12a6d3977e Return the DirectoryEndHeader from SeekableZipHeaderFactory.ReadSeekable so that it can be used by ZipArchive 2018-12-14 22:44:44 +00:00
Adam Hathcock
a95bbaf820 Merge pull request #423 from markfinal/tarsymlink
Tar symlink support
2018-11-05 08:31:48 +00:00
Mark Final
70bafa653b Tar symlink extraction
Extended SharpCompress.Common.ExtractionOptions with a delegate to write symbolic links. If not is null, and a symbolic link is encountered, an exception is thrown.
Removed Mono.Posix.NETStandard from the library, but added to the .NET Core 2.1 test application.
Extended the test to implement the delegate.
2018-11-03 09:45:12 +00:00
Mark Final
3f4338489c Removed unnecessary code 2018-11-01 21:57:49 +00:00
Mark Final
d91e58f2cc Refs #248. Refs #132. Added a test case of a tar containing symlinks
This is a source archive of the MoltenVK project from github, which is my use-case for SharpCompress.
I added a test case in the project, which should extract the tar, and validate any symlink targets with what the tar thinks it ought to be.
2018-11-01 21:51:14 +00:00
Mark Final
192b9c1e8b Ref #248. Ref #132. Tar reader support for symlinks for .NET standard 2 and Posix platforms
Extracts linkname from the tar header, and exposes this on IEntry as the LinkTarget (string) property. If an entry is not a symlink, then that property is null.

Uses Mono.Posix.NETStandard nuget to create a symlink. However, this is only applicable to .NET standard 2.0+. So far, unable to find a nuget that works for older versions.

Also, not sure what to do on Windows.
2018-11-01 21:48:51 +00:00
Adam Hathcock
0941239454 Merge pull request #417 from KyotoFox/fix-entrystream-flush
Allow Flush on EntryStream
2018-10-04 12:48:08 +01:00
Knut Ørland
53ad00cdc4 Use soft tabs 2018-10-04 13:13:14 +02:00
Knut Ørland
6dd5da48f7 Added test that calls EntryStream.Flush() 2018-10-04 13:08:53 +02:00
Knut Ørland
efae8328a9 Don't throw an exception when flushing an EntryStream
From Microsoft docs: “In a class derived from Stream that doesn't
support writing, Flush is typically implemented as an empty method to
ensure full compatibility with other Stream types since it's valid to
flush a read-only stream.”
2018-10-04 13:05:36 +02:00
Adam Hathcock
f1facc51de Merge pull request #409 from G-Research/RecogniseEmptyTarArchives
Recognise empty tar archives.
2018-09-25 13:20:59 +01:00
Adam Hathcock
a471ca6a76 Use Cake tool on circle. Update test packages 2018-08-31 09:27:04 +01:00
Elliot Prior
83f6690576 Recognise empty tar archives.
Currently, when ArchiveFactory.Open is called on an empty tar archive, it throws due to being unable to determine the stream type. This fix allows it to recognise empty tar files by checking for whether the filename is empty, the size is empty and the entry type is defined. Add a test to try opening an empty archive.
2018-08-16 10:25:47 +01:00
Adam Hathcock
1850ea67f6 Merge pull request #408 from majoreffort/master
Test and fix for #407
2018-07-24 09:42:03 +01:00
majoreffort
2fd6178aa9 Fixed length in TarHeader#WriteStringBytes 2018-07-23 19:58:37 +02:00
majoreffort
ec044e6f42 Added Tar test for 100 char filename issue. 2018-07-23 19:48:01 +02:00
Adam Hathcock
bd96279649 Merge pull request #404 from MattKotsenas/bugfix/idisposable
Enable parallel test execution
2018-07-12 19:53:50 +01:00
Matt Kotsenas
f7ad595945 Enable test parallelization and remove garbage collection workaround
Now that the sources of file locking are fixed, enable test parallelization
and the forced garbage collection workaround.

Lastly, remove the `IsLocked` check because it doesn't work in a
parallel test running world - the file may be locked due to another test
running.
2018-07-12 10:33:19 -07:00
Matt Kotsenas
93c0b91de9 Refactor TestSharpCompressWithEmptyStream
Refactor `TestSharpCompressWithEmptyStream` so it asserts that the files
and bytes are the same.
2018-07-12 10:32:08 -07:00
Matt Kotsenas
138038b08f Move RarReaderTests over to user ReaderFactory
- Refactor `RarReaderTests` to use `ReaderFactory`
- Update `ReaderTests.Read` to support Rar tests
2018-07-12 10:32:08 -07:00
Matt Kotsenas
e9a6fed607 FIXUP decouple UseReader from VerifyFiles 2018-07-11 16:53:34 -07:00
Matt Kotsenas
87a1440382 Decouple UseReader from VerifyFiles 2018-07-11 16:49:49 -07:00
Matt Kotsenas
3c2f4ebe9b Combine ForwardOnlyStream and NonSeekableStream
Delete `NonSeekableStream` used in Zip64 tests in favor
of `ForwardOnlyStream` used in Mocks.

Additionally, delete the `ForwardOnlyStream.ReadByte` implementation
as the implementation on the base Stream is sufficient.
2018-07-11 16:42:03 -07:00
Matt Kotsenas
933ffe7828 Remove unused code from ArchiveTests 2018-07-11 16:33:46 -07:00
Matt Kotsenas
7d20ba5243 Simplify RarHeaderTests 2018-07-11 16:21:19 -07:00
Matt Kotsenas
44dc36af48 Update ReaderTests base class to validate Dispose
Update the `ReaderTests` base class to validate that `Dispose` is
called appropriately in both the close and the leave open cases.
2018-07-11 16:21:19 -07:00
Matt Kotsenas
98558c5ba9 Refactor TestStream constructor
Refactor the `TestStream` constructor so by default it defers to
the underlying Stream
2018-07-11 16:21:19 -07:00
Matt Kotsenas
6c25322465 Follow best-practices for Dispose in Volume and ForwardOnlyStream
Update `Volume` and `ForwardOnlyStream` to follow the project's
general pattern and best-practices for `Dispose`
2018-07-11 16:21:19 -07:00
Matt Kotsenas
6d1d62fd32 Delete AppendingStream
`AppendingStream` is unused, so rather than refactor it, just delete it.
2018-07-11 16:21:19 -07:00
Matt Kotsenas
ee4ae661d7 Refactor ListeningStream
Refactor `ListeningStream`:

- Override of `WriteByte` was redundant and removed
- Make `Dispose` delegate to base class
2018-07-11 16:21:19 -07:00
Matt Kotsenas
0473ec1626 Open test archives as read
Update `RarHeaderFactoryTests` and `GZipArchiveTests` to open the test
readers as `FileAccess.Read` and `FileShare.Read` to prevent issues with
multiple test from trying to open exclusive access to files.
2018-07-11 16:21:19 -07:00
Matt Kotsenas
c6cf0d40ee Simplify ReaderTests
The `IEnumerable<string>` version of `ReaderTests` is unused, so delete
it to simplify the code.
2018-07-11 16:21:19 -07:00
Matt Kotsenas
4cd80e96f3 Simplify GZip bad compression test 2018-07-11 16:21:19 -07:00
Matt Kotsenas
16524717ba Fix Stream leak in ArchiveFactory
`ArchiveFactory.Open` has two overloads that take `string` or
`FileInfo` (string delegates to FileInfo). Both of these implementations
open a `Stream` with the default `ReaderOptions`, which leaves the
stream open, resulting in a leak.

The fix is to set `LeaveOpen` to `false` if no options were provided.
Note that if a user was provding options and `LeaveOpen` was set to
`true`, the code did and will still leak.
2018-07-11 16:21:19 -07:00
Matt Kotsenas
cab1ce3d0c Update sub-streams to uniformly inherit from NonDisposingStream
Update the sub-stream classes to all inherit from `NonDisposingStream`.
This allows them to correctly implement the `Dispose` pattern, and delegate
the actual disposal to `NonDisposingStream`.

In doing so, we need to remove some redundant overrides from
`NonDisposingStream`, otherwise `BufferedSubStream` would use the
overrides inherited from `NonDisposingStream` instead of the ones
inherited from `Stream` (i.e. delegate `ReadByte` to `Read`).
2018-07-11 16:17:49 -07:00
Matt Kotsenas
6c2e5e1164 Cleanup NonDisposingStream for reuse
- Remove the duplicate `GC.SuppressFinalization` call
(called in `System.IO.Stream)
- Improve the `ThrowOnDispose` error message
2018-07-11 12:19:34 -07:00
Matt Kotsenas
c2bf540057 Close verification streams in TestBase.CompareArchivesByPath 2018-07-11 12:12:30 -07:00
Matt Kotsenas
a35c66e166 Move RewindableStreamTest.cs to the Streams/ folder 2018-07-10 12:07:33 -07:00
Matt Kotsenas
084c5e2686 Rename StreamTests.cs --> LzmaStreamTests.cs 2018-07-10 12:07:32 -07:00
Matt Kotsenas
6ae715b153 Move the TestStream and ForwardOnlyStream to Mocks folder
Move the `TestStream` and `ForwardOnlyStream` to Mocks/ to separate them
from the test classes.
2018-07-10 12:07:32 -07:00
Adam Hathcock
9c8692806a Merge pull request #403 from MattKotsenas/bugfix/parallel-tests
Fix and re-enable tests
2018-07-10 20:01:20 +01:00
Matt Kotsenas
2d87351d2a Add tests back to AppVeyor 2018-07-10 11:52:00 -07:00
Matt Kotsenas
3114afde0e Add workaround for in-use files
The `TestBase` is not always able to delete the scratch folder in
`Dispose()` because sometimes the files are still in use.

This problem appears to be leaked file handles (likely due to incorrect
handling of `IDisposable`). To avoid the problem for now, force a
garbage collection prior to deleting the scratch folder.
2018-07-10 11:49:38 -07:00
Matt Kotsenas
7b338511cc Create unique scratch path per test
Tests fail in Visual Studio because they try to reuse the same scratch
working space, and each test is responsible for resetting the space. To
simplify the test code:

1. Make `TestBase` `IDisposable` and have it create the scratch space
2. Remove `ResetScratch()` as it is now handled by the base class
3. Add a unique ID to each scrach space folder to prevent collisions
2018-07-10 11:46:44 -07:00
Adam Hathcock
09c27681e1 Merge pull request #402 from a764578566/master
file search support linq Pattern
2018-07-10 13:21:09 +01:00
zhoujr
4ebc1f82b7 file search support linq Pattern 2018-07-10 19:58:59 +08:00
Adam Hathcock
4640ca497a Merge pull request #400 from MattKotsenas/feature/avoid-exception-in-readerfactory
Avoid throwing NotSupportedException in ReaderFactory hot path
2018-07-10 08:47:13 +01:00
Matt Kotsenas
bebccaae28 Avoid throwing NotSupportedException in ReaderFactory hot path
`ReaderFactory.Open()` calls `ZipArchive.IsZipFile()` to determine if
the `Stream` is a zip archive, which calls into
`ZipHeaderFactory.ReadHeader()`, which throws a `NotSupportedException`
when the `Stream` is not a zip archive.

To be clear, this exception is caught and `IsZipFile()` returns `false`,
but when called in a hot-path, these exceptions can become expensive.

To address this issue, `ReadHeader` now returns `null` in the default
cause instead of throwing. All callsites were already checking for and
handling `null`, so no behavior changes.
2018-07-09 18:44:46 -07:00
Adam Hathcock
7ee53373c6 Remove tests as AppVeyor can’t handle them at the moment 2018-07-09 09:05:10 +01:00
Adam Hathcock
d577fe1ac6 Merge pull request #385 from twirpx/master
Fixed EFS flag handling
2018-07-09 08:48:34 +01:00
Adam Hathcock
9f976aaf78 Merge branch 'master' into master 2018-07-09 08:48:26 +01:00
Adam Hathcock
8a7d7e366f Merge pull request #396 from andreas-eriksson/Rar5IsEncrypted
Correctly set IsEncrypted for entries in Rar5.
2018-07-09 08:48:12 +01:00
Adam Hathcock
540ab1c6fa Merge branch 'master' into master 2018-07-09 08:47:32 +01:00
Adam Hathcock
6792afbdb1 Merge branch 'master' into Rar5IsEncrypted 2018-07-09 08:44:32 +01:00
Adam Hathcock
e5a7185671 Mark for 0.22 2018-07-09 08:42:45 +01:00
Adam Hathcock
cdaf453b2d Update dependencies and tests to .NET Core 2.1 2018-07-09 08:39:37 +01:00
Andreas Eriksson
f9cc80e1de Correctly set IsEncrypted for entries in Rar5. 2018-06-29 15:51:40 +02:00
Adam Hathcock
7beff9e83c Merge pull request #395 from adamhathcock/zip-slip-readers
Zip slip for Readers
2018-06-28 11:56:44 +01:00
Adam Hathcock
8f49f1b6f8 Merge remote-tracking branch 'origin/master' into zip-slip-readers 2018-06-28 11:52:43 +01:00
Adam Hathcock
7e336a0247 Slip in new SDK 2018-06-28 11:51:17 +01:00
Adam Hathcock
e37e8bdadc Move path handling for extraction to be common
Reader and Archive now share more extraction logic
2018-06-28 11:46:51 +01:00
Adam Hathcock
40bd61b16b Merge pull request #389 from frankyjuang/patch-1
Fix comment
2018-06-08 08:59:52 +01:00
Juang, Yi-Lin
87fbb45099 Fix comment 2018-06-08 11:27:43 +08:00
twirpx
e822f9a95c Tests fixed to use explicit use of 866 encoding because of usage file named in Russian in several tests 2018-05-30 22:17:27 +05:00
twirpx
8a5a9159e1 Fixed DirectoryEntryHeader Name/Comment decoding in case of EFS flags set 2018-05-30 21:47:31 +05:00
twirpx
73b3c6b419 Merge branch 'master' of https://github.com/adamhathcock/sharpcompress 2018-05-30 20:28:15 +05:00
Adam Hathcock
f9bd7ebdb0 Merge pull request #384 from MrJul/perf-readbyte
Implemented ReadByte/WriteByte on streams to improve performance
2018-05-28 09:21:28 +01:00
Julien Lebosquain
540618c062 Implemented ReadByte/WriteByte on streams to improve performance 2018-05-27 16:31:44 +02:00
Adam Hathcock
9e96dec8c9 Merge pull request #383 from itn3000/add-filename-encoding-example
add example for custom file encoding
2018-05-23 09:14:46 +01:00
itn3000
7b7af612ba add example for custom file encoding 2018-05-23 09:46:36 +09:00
Adam Hathcock
3a747ba87e Update USAGE with new stream handling 2018-05-16 08:51:33 +01:00
Adam Hathcock
87e57e3a9a Mark for 0.21.1 2018-05-15 09:14:56 +01:00
Adam Hathcock
785d0dcebf Merge pull request #381 from adamhathcock/issue-380
Allow forced encoding to override default encoding
2018-05-15 09:13:16 +01:00
Adam Hathcock
2314776f55 Also check for CustomDecoder 2018-05-15 08:28:11 +01:00
Adam Hathcock
473f5d8189 Make GetDecoder use GetEncoding for forced 2018-05-14 16:20:57 +01:00
Adam Hathcock
be971cb6f7 Allow forced encoding to override default encoding 2018-05-14 16:08:31 +01:00
twirpx
149f5e4fb5 Minor fixes 2017-08-22 11:46:32 +05:00
twirpx
1793fc949d Fixed bug: Passing default ReaderOptions when creating ZipReader for solid extraction 2017-08-16 08:57:36 +05:00
82 changed files with 1092 additions and 756 deletions

View File

@@ -2,14 +2,15 @@ version: 2
jobs:
build:
docker:
- image: microsoft/dotnet:2.0.7-sdk-2.1.105
- image: microsoft/dotnet:2.2.104-sdk
steps:
- checkout
- run:
name: Install unzip
name: Install Cake
command: |
apt-get update
apt-get install -y unzip
dotnet tool install -g Cake.Tool
echo 'export PATH=$PATH:/root/.dotnet/tools' >> $BASH_ENV
source $BASH_ENV
- run:
name: Build
command: ./build.sh
command: dotnet cake build.cake

View File

@@ -122,6 +122,7 @@
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpKeepExistingMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpPlaceEmbeddedOnSameLineMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpRenamePlacementToArrangementMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpUseContinuousIndentInsideBracesMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EAddAccessorOwnerDeclarationBracesMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002ECSharpPlaceAttributeOnSameLineMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateBlankLinesAroundFieldToBlankLinesAroundProperty/@EntryIndexedValue">True</s:Boolean>

View File

@@ -1,8 +1,8 @@
# SharpCompress Usage
## Stream Rules
## Stream Rules (changed with 0.21)
When dealing with Streams, the rule should be that you don't close a stream you didn't create. This, in effect, should mean you should always put a Stream in a using block to dispose it.
When dealing with Streams, the rule should be that you don't close a stream you didn't create. This, in effect, should mean you should always put a Stream in a using block to dispose it.
However, the .NET Framework often has classes that will dispose streams by default to make things "easy" like the following:
@@ -23,13 +23,12 @@ using (var reader = new StreamReader(fileStream))
}
```
To deal with the "correct" rules as well as the expectations of users, I've decided on this:
* When writing, leave streams open.
* When reading, close streams
To deal with the "correct" rules as well as the expectations of users, I've decided to always close wrapped streams as of 0.21.
To be explicit though, consider always using the overloads that use `ReaderOptions` or `WriterOptions` and explicitly set `LeaveStreamOpen` the way you want.
If using Compression Stream classes directly and you don't want the wrapped stream to be closed. Use the `NonDisposingStream` as a wrapped to prevent the stream being disposed. The change in 0.21 simplified a lot even though the usage is a bit more convoluted.
## Samples
Also, look over the tests for more thorough [examples](https://github.com/adamhathcock/sharpcompress/tree/master/tests/SharpCompress.Test)
@@ -128,3 +127,20 @@ using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, new WriterOption
writer.WriteAll("D:\\temp", "*", SearchOption.AllDirectories);
}
```
### Extract zip which has non-utf8 encoded filename(cp932)
```C#
var opts = new SharpCompress.Readers.ReaderOptions();
var encoding = Encoding.GetEncoding(932);
opts.ArchiveEncoding = new SharpCompress.Common.ArchiveEncoding();
opts.ArchiveEncoding.CustomDecoder = (data, x, y) =>
{
return encoding.GetString(data);
};
var tr = SharpCompress.Archives.Zip.ZipArchive.Open("test.zip", opts);
foreach(var entry in tr.Entries)
{
Console.WriteLine($"{entry.Key}");
}
```

View File

@@ -49,7 +49,7 @@ Task("Test")
var settings = new DotNetCoreTestSettings
{
Configuration = "Release",
Framework = "netcoreapp2.0"
Framework = "netcoreapp2.2"
};
DotNetCoreTest(file.ToString(), settings);
}

View File

@@ -92,7 +92,7 @@ namespace SharpCompress.Archives
public static IArchive Open(string filePath, ReaderOptions options = null)
{
filePath.CheckNotNullOrEmpty("filePath");
return Open(new FileInfo(filePath), options ?? new ReaderOptions());
return Open(new FileInfo(filePath), options);
}
/// <summary>
@@ -103,36 +103,31 @@ namespace SharpCompress.Archives
public static IArchive Open(FileInfo fileInfo, ReaderOptions options = null)
{
fileInfo.CheckNotNull("fileInfo");
options = options ?? new ReaderOptions();
options = options ?? new ReaderOptions { LeaveStreamOpen = false };
using (var stream = fileInfo.OpenRead())
{
if (ZipArchive.IsZipFile(stream, null))
{
stream.Dispose();
return ZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (SevenZipArchive.IsSevenZipFile(stream))
{
stream.Dispose();
return SevenZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (GZipArchive.IsGZipFile(stream))
{
stream.Dispose();
return GZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (RarArchive.IsRarFile(stream, options))
{
stream.Dispose();
return RarArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (TarArchive.IsTarFile(stream))
{
stream.Dispose();
return TarArchive.Open(fileInfo, options);
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");

View File

@@ -1,7 +1,6 @@
using System.IO;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives
{
@@ -46,50 +45,8 @@ namespace SharpCompress.Archives
public static void WriteToDirectory(this IArchiveEntry entry, string destinationDirectory,
ExtractionOptions options = null)
{
string destinationFileName;
string file = Path.GetFileName(entry.Key);
string fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
options = options ?? new ExtractionOptions()
{
Overwrite = true
};
if (options.ExtractFullPath)
{
string folder = Path.GetDirectoryName(entry.Key);
string destdir = Path.GetFullPath(
Path.Combine(fullDestinationDirectoryPath, folder)
);
if (!Directory.Exists(destdir))
{
if (!destdir.StartsWith(fullDestinationDirectoryPath))
{
throw new ExtractionException("Entry is trying to create a directory outside of the destination directory.");
}
Directory.CreateDirectory(destdir);
}
destinationFileName = Path.Combine(destdir, file);
}
else
{
destinationFileName = Path.Combine(fullDestinationDirectoryPath, file);
}
if (!entry.IsDirectory)
{
destinationFileName = Path.GetFullPath(destinationFileName);
if (!destinationFileName.StartsWith(fullDestinationDirectoryPath))
{
throw new ExtractionException("Entry is trying to write a file outside of the destination directory.");
}
entry.WriteToFile(destinationFileName, options);
}
ExtractionMethods.WriteEntryToDirectory(entry, destinationDirectory, options,
entry.WriteToFile);
}
/// <summary>
@@ -98,23 +55,15 @@ namespace SharpCompress.Archives
public static void WriteToFile(this IArchiveEntry entry, string destinationFileName,
ExtractionOptions options = null)
{
FileMode fm = FileMode.Create;
options = options ?? new ExtractionOptions()
{
Overwrite = true
};
if (!options.Overwrite)
{
fm = FileMode.CreateNew;
}
using (FileStream fs = File.Open(destinationFileName, fm))
{
entry.WriteTo(fs);
}
entry.PreserveExtractionOptions(destinationFileName, options);
ExtractionMethods.WriteEntryToFile(entry, destinationFileName, options,
(x, fm) =>
{
using (FileStream fs = File.Open(destinationFileName, fm))
{
entry.WriteTo(fs);
}
});
}
#endif
}

View File

@@ -1,6 +1,6 @@
#if !NO_FILE
using System.Linq;
using SharpCompress.Readers;
using SharpCompress.Common;
#endif

View File

@@ -75,9 +75,10 @@ namespace SharpCompress.Archives.Tar
{
try
{
TarHeader tar = new TarHeader(new ArchiveEncoding());
tar.Read(new BinaryReader(stream));
return tar.Name.Length > 0 && Enum.IsDefined(typeof(EntryType), tar.EntryType);
TarHeader tarHeader = new TarHeader(new ArchiveEncoding());
bool readSucceeded = tarHeader.Read(new BinaryReader(stream));
bool isEmptyArchive = tarHeader.Name.Length == 0 && tarHeader.Size == 0 && Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
return readSucceeded || isEmptyArchive;
}
catch
{

View File

@@ -28,8 +28,13 @@ namespace SharpCompress.Common
public ArchiveEncoding()
{
Default = Encoding.UTF8;
Password = Encoding.UTF8;
#if NETSTANDARD1_0
Default = Encoding.GetEncoding("cp437");
Password = Encoding.GetEncoding("cp437");
#else
Default = Encoding.GetEncoding(437);
Password = Encoding.GetEncoding(437);
#endif
}
#if NETSTANDARD1_3 || NETSTANDARD2_0
@@ -44,21 +49,16 @@ namespace SharpCompress.Common
return Decode(bytes, 0, bytes.Length);
}
public string Decode437(byte[] bytes)
{
#if NETSTANDARD1_0
return Decode(bytes, 0, bytes.Length);
#else
var extendedAsciiEncoding = Encoding.GetEncoding(437);
return extendedAsciiEncoding.GetString(bytes, 0, bytes.Length);
#endif
}
public string Decode(byte[] bytes, int start, int length)
{
return GetDecoder().Invoke(bytes, start, length);
}
public string DecodeUTF8(byte[] bytes)
{
return Encoding.UTF8.GetString(bytes, 0, bytes.Length);
}
public byte[] Encode(string str)
{
return GetEncoding().GetBytes(str);
@@ -71,7 +71,7 @@ namespace SharpCompress.Common
public Func<byte[], int, int, string> GetDecoder()
{
return CustomDecoder ?? ((bytes, index, count) => (Default ?? Encoding.UTF8).GetString(bytes, index, count));
return CustomDecoder ?? ((bytes, index, count) => GetEncoding().GetString(bytes, index, count));
}
}
}

View File

@@ -15,6 +15,11 @@ namespace SharpCompress.Common
/// </summary>
public abstract string Key { get; }
/// <summary>
/// The target of a symlink entry internal to the Archive. Will be null if not a symlink.
/// </summary>
public abstract string LinkTarget { get; }
/// <summary>
/// The compressed file size
/// </summary>
@@ -56,7 +61,7 @@ namespace SharpCompress.Common
public abstract bool IsEncrypted { get; }
/// <summary>
/// Entry is password protected and encrypted and cannot be extracted.
/// Entry is directory.
/// </summary>
public abstract bool IsDirectory { get; }
@@ -83,4 +88,4 @@ namespace SharpCompress.Common
/// </summary>
public virtual int? Attrib => throw new NotImplementedException();
}
}
}

View File

@@ -47,9 +47,7 @@ namespace SharpCompress.Common
public override bool CanWrite => false;
public override void Flush()
{
throw new NotSupportedException();
public override void Flush() {
}
public override long Length => _stream.Length;
@@ -66,6 +64,16 @@ namespace SharpCompress.Common
return read;
}
public override int ReadByte()
{
int value = _stream.ReadByte();
if (value == -1)
{
_completed = true;
}
return value;
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();

View File

@@ -0,0 +1,98 @@
#if !NO_FILE
using System;
using System.IO;
#endif
namespace SharpCompress.Common
{
internal static class ExtractionMethods
{
#if !NO_FILE
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteEntryToDirectory(IEntry entry, string destinationDirectory,
ExtractionOptions options, Action<string, ExtractionOptions> write)
{
string destinationFileName;
string file = Path.GetFileName(entry.Key);
string fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
options = options ?? new ExtractionOptions()
{
Overwrite = true
};
if (options.ExtractFullPath)
{
string folder = Path.GetDirectoryName(entry.Key);
string destdir = Path.GetFullPath(
Path.Combine(fullDestinationDirectoryPath, folder)
);
if (!Directory.Exists(destdir))
{
if (!destdir.StartsWith(fullDestinationDirectoryPath))
{
throw new ExtractionException("Entry is trying to create a directory outside of the destination directory.");
}
Directory.CreateDirectory(destdir);
}
destinationFileName = Path.Combine(destdir, file);
}
else
{
destinationFileName = Path.Combine(fullDestinationDirectoryPath, file);
}
if (!entry.IsDirectory)
{
destinationFileName = Path.GetFullPath(destinationFileName);
if (!destinationFileName.StartsWith(fullDestinationDirectoryPath))
{
throw new ExtractionException("Entry is trying to write a file outside of the destination directory.");
}
write(destinationFileName, options);
}
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
{
Directory.CreateDirectory(destinationFileName);
}
}
public static void WriteEntryToFile(IEntry entry, string destinationFileName,
ExtractionOptions options,
Action<string, FileMode> openAndWrite)
{
if (entry.LinkTarget != null)
{
if (null == options.WriteSymbolicLink)
{
throw new ExtractionException("Entry is a symbolic link but ExtractionOptions.WriteSymbolicLink delegate is null");
}
options.WriteSymbolicLink(destinationFileName, entry.LinkTarget);
}
else
{
FileMode fm = FileMode.Create;
options = options ?? new ExtractionOptions()
{
Overwrite = true
};
if (!options.Overwrite)
{
fm = FileMode.CreateNew;
}
openAndWrite(destinationFileName, fm);
entry.PreserveExtractionOptions(destinationFileName, options);
}
}
#endif
}
}

View File

@@ -1,4 +1,4 @@
namespace SharpCompress.Readers
namespace SharpCompress.Common
{
public class ExtractionOptions
{
@@ -21,5 +21,14 @@
/// preserve windows file attributes
/// </summary>
public bool PreserveAttributes { get; set; }
/// <summary>
/// Delegate for writing symbolic links to disk.
/// sourcePath is where the symlink is created.
/// targetPath is what the symlink refers to.
/// </summary>
public delegate void SymbolicLinkWriterDelegate(string sourcePath, string targetPath);
public SymbolicLinkWriterDelegate WriteSymbolicLink;
}
}

View File

@@ -20,6 +20,8 @@ namespace SharpCompress.Common.GZip
public override string Key => _filePart.FilePartName;
public override string LinkTarget => null;
public override long CompressedSize => 0;
public override long Size => 0;

View File

@@ -10,6 +10,7 @@ namespace SharpCompress.Common
long Crc { get; }
DateTime? CreatedTime { get; }
string Key { get; }
string LinkTarget { get; }
bool IsDirectory { get; }
bool IsEncrypted { get; }
bool IsSplitAfter { get; }

View File

@@ -104,24 +104,27 @@ namespace SharpCompress.Common.Rar.Headers
throw new InvalidFormatException("rar5 header size / extra size inconsistency");
}
isEncryptedRar5 = false;
while (RemainingHeaderBytes(reader) > 0) {
var size = reader.ReadRarVIntUInt16();
int n = RemainingHeaderBytes(reader);
var type = reader.ReadRarVIntUInt16();
switch (type) {
//TODO
// case 1: // file encryption
// {
// var version = reader.ReadRarVIntByte();
// if (version != 0) throw new InvalidFormatException("unknown encryption algorithm "+ version);
//
// }
// break;
// case 2: // file hash
// {
//
// }
// break;
//TODO
case 1: // file encryption
{
isEncryptedRar5 = true;
//var version = reader.ReadRarVIntByte();
//if (version != 0) throw new InvalidFormatException("unknown encryption algorithm " + version);
}
break;
// case 2: // file hash
// {
//
// }
// break;
case 3: // file time
{
ushort flags = reader.ReadRarVIntUInt16();
@@ -435,8 +438,8 @@ namespace SharpCompress.Common.Rar.Headers
public bool IsDirectory => HasFlag(IsRar5 ? FileFlagsV5.DIRECTORY : FileFlagsV4.DIRECTORY);
//!!! TODO rar5
public bool IsEncrypted => HasFlag(FileFlagsV4.PASSWORD);
private bool isEncryptedRar5 = false;
public bool IsEncrypted => IsRar5 ? isEncryptedRar5: HasFlag(FileFlagsV4.PASSWORD);
internal DateTime? FileLastModifiedTime { get; private set; }

View File

@@ -22,6 +22,8 @@ namespace SharpCompress.Common.Rar
/// </summary>
public override string Key => FileHeader.FileName;
public override string LinkTarget => null;
/// <summary>
/// The entry last modified time in the archive, if recorded
/// </summary>

View File

@@ -1,7 +1,6 @@
#if !NO_CRYPTO
using System;
using System.Collections.Generic;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using SharpCompress.Crypto;
@@ -23,12 +22,6 @@ namespace SharpCompress.Common.Rar
_salt = salt;
}
private byte[] ComputeHash(byte[] input)
{
var sha = SHA1.Create();
return sha.ComputeHash(input);
}
private void Initialize()
{
@@ -47,28 +40,27 @@ namespace SharpCompress.Common.Rar
rawPassword[i + rawLength] = _salt[i];
}
const int noOfRounds = (1 << 18);
IList<byte> bytes = new List<byte>();
const int iblock = 3;
byte[] digest;
byte[] data = new byte[(rawPassword.Length + iblock) * noOfRounds];
//TODO slow code below, find ways to optimize
for (int i = 0; i < noOfRounds; i++)
{
bytes.AddRange(rawPassword);
rawPassword.CopyTo(data, i * (rawPassword.Length + iblock));
bytes.AddRange(new[]
data[i * (rawPassword.Length + iblock) + rawPassword.Length + 0] = (byte)i;
data[i * (rawPassword.Length + iblock) + rawPassword.Length + 1] = (byte)(i >> 8);
data[i * (rawPassword.Length + iblock) + rawPassword.Length + 2] = (byte)(i >> CRYPTO_BLOCK_SIZE);
if (i % (noOfRounds / CRYPTO_BLOCK_SIZE) == 0)
{
(byte) i, (byte) (i >> 8), (byte) (i >> CRYPTO_BLOCK_SIZE)
});
if (i%(noOfRounds/CRYPTO_BLOCK_SIZE) == 0)
{
digest = ComputeHash(bytes.ToArray());
_aesInitializationVector[i/(noOfRounds/CRYPTO_BLOCK_SIZE)] = digest[19];
digest = SHA1.Create().ComputeHash(data, 0, (i + 1) * (rawPassword.Length + iblock));
_aesInitializationVector[i / (noOfRounds / CRYPTO_BLOCK_SIZE)] = digest[19];
}
}
digest = ComputeHash(bytes.ToArray());
digest = SHA1.Create().ComputeHash(data);
//slow code ends
byte[] aesKey = new byte[CRYPTO_BLOCK_SIZE];

View File

@@ -18,6 +18,8 @@ namespace SharpCompress.Common.SevenZip
public override string Key => FilePart.Header.Name;
public override string LinkTarget => null;
public override long CompressedSize => 0;
public override long Size => FilePart.Header.Size;

View File

@@ -15,6 +15,7 @@ namespace SharpCompress.Common.Tar.Headers
}
internal string Name { get; set; }
internal string LinkName { get; set; }
//internal int Mode { get; set; }
//internal int UserId { get; set; }
@@ -38,16 +39,17 @@ namespace SharpCompress.Common.Tar.Headers
WriteOctalBytes(0, buffer, 116, 8); // group ID
//ArchiveEncoding.UTF8.GetBytes("magic").CopyTo(buffer, 257);
if (Name.Length > 100)
var nameByteCount = ArchiveEncoding.GetEncoding().GetByteCount(Name);
if (nameByteCount > 100)
{
// Set mock filename and filetype to indicate the next block is the actual name of the file
WriteStringBytes("././@LongLink", buffer, 0, 100);
buffer[156] = (byte)EntryType.LongName;
WriteOctalBytes(Name.Length + 1, buffer, 124, 12);
WriteOctalBytes(nameByteCount + 1, buffer, 124, 12);
}
else
{
WriteStringBytes(Name, buffer, 0, 100);
WriteStringBytes(ArchiveEncoding.Encode(Name), buffer, 0, 100);
WriteOctalBytes(Size, buffer, 124, 12);
var time = (long)(LastModifiedTime.ToUniversalTime() - EPOCH).TotalSeconds;
WriteOctalBytes(time, buffer, 136, 12);
@@ -68,10 +70,17 @@ namespace SharpCompress.Common.Tar.Headers
output.Write(buffer, 0, buffer.Length);
if (Name.Length > 100)
if (nameByteCount > 100)
{
WriteLongFilenameHeader(output);
Name = Name.Substring(0, 100);
// update to short name lower than 100 - [max bytes of one character].
// subtracting bytes is needed because preventing infinite loop(example code is here).
//
// var bytes = Encoding.UTF8.GetBytes(new string(0x3042, 100));
// var truncated = Encoding.UTF8.GetBytes(Encoding.UTF8.GetString(bytes, 0, 100));
//
// and then infinite recursion is occured in WriteLongFilenameHeader because truncated.Length is 102.
Name = ArchiveEncoding.Decode(ArchiveEncoding.Encode(Name), 0, 100 - ArchiveEncoding.GetEncoding().GetMaxByteCount(1));
Write(output);
}
}
@@ -98,6 +107,12 @@ namespace SharpCompress.Common.Tar.Headers
return false;
}
// for symlinks, additionally read the linkname
if (ReadEntryType(buffer) == EntryType.SymLink)
{
LinkName = ArchiveEncoding.Decode(buffer, 157, 100).TrimNulls();
}
if (ReadEntryType(buffer) == EntryType.LongName)
{
Name = ReadLongName(reader, buffer);
@@ -177,11 +192,22 @@ namespace SharpCompress.Common.Tar.Headers
return buffer;
}
private static void WriteStringBytes(byte[] name, byte[] buffer, int offset, int length)
{
int i = Math.Min(length, name.Length);
Buffer.BlockCopy(name, 0, buffer, offset, i);
// if Span<byte>.Fill can be used, it is more efficient
for (; i < length; ++i)
{
buffer[offset + i] = 0;
}
}
private static void WriteStringBytes(string name, byte[] buffer, int offset, int length)
{
int i;
for (i = 0; i < length - 1 && i < name.Length; ++i)
for (i = 0; i < length && i < name.Length; ++i)
{
buffer[offset + i] = (byte)name[i];
}
@@ -272,4 +298,4 @@ namespace SharpCompress.Common.Tar.Headers
public string Magic { get; set; }
}
}
}

View File

@@ -23,6 +23,8 @@ namespace SharpCompress.Common.Tar
public override string Key => _filePart.Header.Name;
public override string LinkTarget => _filePart.Header.LinkName;
public override long CompressedSize => _filePart.Header.Size;
public override long Size => _filePart.Header.Size;

View File

@@ -1,16 +1,16 @@
using System;
using SharpCompress.IO;
using System;
using System.IO;
namespace SharpCompress.Common.Tar
{
internal class TarReadOnlySubStream : Stream
internal class TarReadOnlySubStream : NonDisposingStream
{
private bool _isDisposed;
private long _amountRead;
public TarReadOnlySubStream(Stream stream, long bytesToRead)
public TarReadOnlySubStream(Stream stream, long bytesToRead) : base(stream, throwOnDispose: false)
{
Stream = stream;
BytesLeftToRead = bytesToRead;
}
@@ -36,12 +36,11 @@ namespace SharpCompress.Common.Tar
var buffer = new byte[skipBytes];
Stream.ReadFully(buffer);
}
base.Dispose(disposing);
}
private long BytesLeftToRead { get; set; }
public Stream Stream { get; }
public override bool CanRead => true;
public override bool CanSeek => false;
@@ -72,6 +71,22 @@ namespace SharpCompress.Common.Tar
return read;
}
public override int ReadByte()
{
if (BytesLeftToRead <= 0)
{
return -1;
}
int value = Stream.ReadByte();
if (value != -1)
{
--BytesLeftToRead;
++_amountRead;
}
return value;
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();

View File

@@ -1,4 +1,5 @@
using System.IO;
using System;
using System.IO;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -33,15 +34,18 @@ namespace SharpCompress.Common
/// </summary>
public virtual bool IsMultiVolume => true;
private bool _disposed;
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
_actualStream.Dispose();
}
}
public void Dispose()
{
if (!_disposed)
{
_actualStream.Dispose();
_disposed = true;
}
Dispose(true);
GC.SuppressFinalize(this);
}
}
}

View File

@@ -34,15 +34,23 @@ namespace SharpCompress.Common.Zip.Headers
byte[] extra = reader.ReadBytes(extraLength);
byte[] comment = reader.ReadBytes(commentLength);
// According to .ZIP File Format Specification
//
// For example: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
//
// Bit 11: Language encoding flag (EFS). If this bit is set,
// the filename and comment fields for this file
// MUST be encoded using UTF-8. (see APPENDIX D)
if (Flags.HasFlag(HeaderFlags.Efs))
{
Name = ArchiveEncoding.Decode(name);
Comment = ArchiveEncoding.Decode(comment);
Name = ArchiveEncoding.DecodeUTF8(name);
Comment = ArchiveEncoding.DecodeUTF8(comment);
}
else
{
Name = ArchiveEncoding.Decode437(name);
Comment = ArchiveEncoding.Decode437(comment);
Name = ArchiveEncoding.Decode(name);
Comment = ArchiveEncoding.Decode(comment);
}
LoadExtra(extra);

View File

@@ -26,14 +26,21 @@ namespace SharpCompress.Common.Zip.Headers
byte[] name = reader.ReadBytes(nameLength);
byte[] extra = reader.ReadBytes(extraLength);
// According to .ZIP File Format Specification
//
// For example: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
//
// Bit 11: Language encoding flag (EFS). If this bit is set,
// the filename and comment fields for this file
// MUST be encoded using UTF-8. (see APPENDIX D)
if (Flags.HasFlag(HeaderFlags.Efs))
{
Name = ArchiveEncoding.Decode(name);
Name = ArchiveEncoding.DecodeUTF8(name);
}
else
{
// Use IBM Code Page 437 (IBM PC character encoding set)
Name = ArchiveEncoding.Decode437(name);
Name = ArchiveEncoding.Decode(name);
}
LoadExtra(extra);

View File

@@ -87,6 +87,15 @@ namespace SharpCompress.Common.Zip.Headers
}
ushort length = DataConverter.LittleEndian.GetUInt16(extra, i + 2);
// 7zip has this same kind of check to ignore extras blocks that don't conform to the standard 2-byte ID, 2-byte length, N-byte value.
// CPP/7Zip/Zip/ZipIn.cpp: CInArchive::ReadExtra
if (length > extra.Length)
{
// bad extras block
return;
}
byte[] data = new byte[length];
Buffer.BlockCopy(extra, i + 4, data, 0, length);
Extra.Add(LocalEntryHeaderExtraFactory.Create(type, length, data));
@@ -99,4 +108,4 @@ namespace SharpCompress.Common.Zip.Headers
internal bool IsZip64 => CompressedSize == uint.MaxValue;
}
}
}

View File

@@ -1,5 +1,6 @@
using System.IO;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip
{
@@ -7,11 +8,13 @@ namespace SharpCompress.Common.Zip
{
private bool _isLocalHeaderLoaded;
private readonly SeekableZipHeaderFactory _headerFactory;
private readonly DirectoryEntryHeader _directoryEntryHeader;
internal SeekableZipFilePart(SeekableZipHeaderFactory headerFactory, DirectoryEntryHeader header, Stream stream)
: base(header, stream)
{
_headerFactory = headerFactory;
_directoryEntryHeader = header;
}
internal override Stream GetCompressedStream()
@@ -36,6 +39,15 @@ namespace SharpCompress.Common.Zip
protected override Stream CreateBaseStream()
{
BaseStream.Position = Header.DataStartPosition.Value;
if ((Header.CompressedSize == 0)
&& FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor)
&& (_directoryEntryHeader?.HasData == true)
&& (_directoryEntryHeader?.CompressedSize != 0))
{
return new ReadOnlySubStream(BaseStream, _directoryEntryHeader.CompressedSize);
}
return BaseStream;
}
}

View File

@@ -17,7 +17,7 @@ namespace SharpCompress.Common.Zip
{
}
internal IEnumerable<DirectoryEntryHeader> ReadSeekableHeader(Stream stream)
internal IEnumerable<ZipHeader> ReadSeekableHeader(Stream stream)
{
var reader = new BinaryReader(stream);
@@ -51,16 +51,22 @@ namespace SharpCompress.Common.Zip
{
stream.Position = position;
uint signature = reader.ReadUInt32();
var directoryEntryHeader = ReadHeader(signature, reader, _zip64) as DirectoryEntryHeader;
var nextHeader = ReadHeader(signature, reader, _zip64);
position = stream.Position;
if (directoryEntryHeader == null)
{
yield break;
}
//entry could be zero bytes so we need to know that.
directoryEntryHeader.HasData = directoryEntryHeader.CompressedSize != 0;
yield return directoryEntryHeader;
if (nextHeader == null)
yield break;
if (nextHeader is DirectoryEntryHeader entryHeader)
{
//entry could be zero bytes so we need to know that.
entryHeader.HasData = entryHeader.CompressedSize != 0;
yield return entryHeader;
}
else if (nextHeader is DirectoryEndHeader endHeader)
{
yield return endHeader;
}
}
}

View File

@@ -49,6 +49,7 @@ namespace SharpCompress.Common.Zip
_lastEntryHeader = null;
uint headerBytes = reader.ReadUInt32();
header = ReadHeader(headerBytes, reader);
if (header == null) { yield break; }
//entry could be zero bytes so we need to know that.
if (header.ZipHeaderType == ZipHeaderType.LocalEntry)

View File

@@ -60,6 +60,8 @@ namespace SharpCompress.Common.Zip
public override string Key => _filePart.Header.Name;
public override string LinkTarget => null;
public override long CompressedSize => _filePart.Header.CompressedSize;
public override long Size => _filePart.Header.UncompressedSize;

View File

@@ -91,7 +91,7 @@ namespace SharpCompress.Common.Zip
return entry;
}
default:
throw new NotSupportedException("Unknown header: " + headerBytes);
return null;
}
}

View File

@@ -67,6 +67,11 @@ namespace SharpCompress.Compressors.BZip2
return stream.Read(buffer, offset, count);
}
public override int ReadByte()
{
return stream.ReadByte();
}
public override long Seek(long offset, SeekOrigin origin)
{
return stream.Seek(offset, origin);
@@ -82,6 +87,11 @@ namespace SharpCompress.Compressors.BZip2
stream.Write(buffer, offset, count);
}
public override void WriteByte(byte value)
{
stream.WriteByte(value);
}
/// <summary>
/// Consumes two bytes to test if there is a BZip2 header
/// </summary>

View File

@@ -1077,6 +1077,10 @@ namespace SharpCompress.Compressors.BZip2
{
}
public override void WriteByte(byte value)
{
}
public override bool CanRead => true;
public override bool CanSeek => false;

View File

@@ -1929,6 +1929,11 @@ namespace SharpCompress.Compressors.BZip2
return 0;
}
public override int ReadByte()
{
return -1;
}
public override long Seek(long offset, SeekOrigin origin)
{
return 0;

View File

@@ -282,6 +282,15 @@ namespace SharpCompress.Compressors.Deflate
return _baseStream.Read(buffer, offset, count);
}
public override int ReadByte()
{
if (_disposed)
{
throw new ObjectDisposedException("DeflateStream");
}
return _baseStream.ReadByte();
}
/// <summary>
/// Calling this method always throws a <see cref="NotImplementedException"/>.
/// </summary>
@@ -340,6 +349,15 @@ namespace SharpCompress.Compressors.Deflate
_baseStream.Write(buffer, offset, count);
}
public override void WriteByte(byte value)
{
if (_disposed)
{
throw new ObjectDisposedException("DeflateStream");
}
_baseStream.WriteByte(value);
}
#endregion
public MemoryStream InputBuffer => new MemoryStream(_baseStream._z.InputBuffer, _baseStream._z.NextIn,

View File

@@ -270,6 +270,15 @@ namespace SharpCompress.Compressors.Deflate
return _baseStream.Read(buffer, offset, count);
}
public override int ReadByte()
{
if (_disposed)
{
throw new ObjectDisposedException("ZlibStream");
}
return _baseStream.ReadByte();
}
/// <summary>
/// Calling this method always throws a <see cref="NotImplementedException"/>.
/// </summary>
@@ -321,6 +330,15 @@ namespace SharpCompress.Compressors.Deflate
_baseStream.Write(buffer, offset, count);
}
public override void WriteByte(byte value)
{
if (_disposed)
{
throw new ObjectDisposedException("ZlibStream");
}
_baseStream.WriteByte(value);
}
#endregion System.IO.Stream methods
}
}

View File

@@ -193,6 +193,22 @@ namespace SharpCompress.Compressors.LZMA
return count;
}
public override int ReadByte()
{
if (_mFinished)
{
return -1;
}
if (!_mIter.MoveNext())
{
_mFinished = true;
return -1;
}
return _mIter.Current;
}
public IEnumerable<byte> Run()
{
const uint kBurstSize = (1u << 18);

View File

@@ -110,6 +110,8 @@ namespace SharpCompress.Compressors.LZMA
public override int Read(byte[] buffer, int offset, int count) => _stream.Read(buffer, offset, count);
public override int ReadByte() => _stream.ReadByte();
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
public override void SetLength(long value) => throw new NotImplementedException();
@@ -120,6 +122,12 @@ namespace SharpCompress.Compressors.LZMA
_writeCount += count;
}
public override void WriteByte(byte value)
{
_stream.WriteByte(value);
++_writeCount;
}
#endregion
/// <summary>

View File

@@ -51,5 +51,10 @@ namespace SharpCompress.Compressors.Xz.Filters
{
return BaseStream.Read(buffer, offset, count);
}
public override int ReadByte()
{
return BaseStream.ReadByte();
}
}
}

View File

@@ -47,6 +47,12 @@ namespace SharpCompress.Crypto
hash = CalculateCrc(table, hash, buffer, offset, count);
}
public override void WriteByte(byte value)
{
stream.WriteByte(value);
hash = CalculateCrc(table, hash, value);
}
public override bool CanRead => stream.CanRead;
public override bool CanSeek => false;
public override bool CanWrite => stream.CanWrite;
@@ -98,9 +104,16 @@ namespace SharpCompress.Crypto
unchecked
{
for (int i = offset, end = offset + count; i < end; i++)
crc = (crc >> 8) ^ table[(crc ^ buffer[i]) & 0xFF];
{
crc = CalculateCrc(table, crc, buffer[i]);
}
}
return crc;
}
private static uint CalculateCrc(uint[] table, uint crc, byte b)
{
return (crc >> 8) ^ table[(crc ^ b) & 0xFF];
}
}
}

View File

@@ -1,74 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.IO
{
public class ReadOnlyAppendingStream : Stream
{
private readonly Queue<Stream> streams;
private Stream current;
public ReadOnlyAppendingStream(IEnumerable<Stream> streams)
{
this.streams = new Queue<Stream>(streams);
}
public override bool CanRead => true;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override void Flush()
{
throw new NotImplementedException();
}
public override long Length => throw new NotImplementedException();
public override long Position { get => throw new NotImplementedException(); set => throw new NotImplementedException(); }
public override int Read(byte[] buffer, int offset, int count)
{
if (current == null && streams.Count == 0)
{
return -1;
}
if (current == null)
{
current = streams.Dequeue();
}
int totalRead = 0;
while (totalRead < count)
{
int read = current.Read(buffer, offset + totalRead, count - totalRead);
if (read <= 0)
{
if (streams.Count == 0)
{
return totalRead;
}
current = streams.Dequeue();
}
totalRead += read;
}
return totalRead;
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotImplementedException();
}
public override void SetLength(long value)
{
throw new NotImplementedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotImplementedException();
}
}
}

View File

@@ -3,33 +3,22 @@ using System.IO;
namespace SharpCompress.IO
{
internal class BufferedSubStream : Stream
internal class BufferedSubStream : NonDisposingStream
{
private long position;
private int cacheOffset;
private int cacheLength;
private readonly byte[] cache;
public BufferedSubStream(Stream stream, long origin, long bytesToRead)
public BufferedSubStream(Stream stream, long origin, long bytesToRead) : base(stream, throwOnDispose: false)
{
Stream = stream;
position = origin;
BytesLeftToRead = bytesToRead;
cache = new byte[32 << 10];
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
//Stream.Dispose();
}
}
private long BytesLeftToRead { get; set; }
public Stream Stream { get; }
public override bool CanRead => true;
public override bool CanSeek => false;

View File

@@ -1,15 +1,12 @@
using System;
using System;
using System.IO;
namespace SharpCompress.IO
{
internal class CountingWritableSubStream : Stream
internal class CountingWritableSubStream : NonDisposingStream
{
private readonly Stream writableStream;
internal CountingWritableSubStream(Stream stream)
internal CountingWritableSubStream(Stream stream) : base(stream, throwOnDispose: false)
{
writableStream = stream;
}
public ulong Count { get; private set; }
@@ -22,7 +19,7 @@ namespace SharpCompress.IO
public override void Flush()
{
writableStream.Flush();
Stream.Flush();
}
public override long Length => throw new NotSupportedException();
@@ -46,8 +43,14 @@ namespace SharpCompress.IO
public override void Write(byte[] buffer, int offset, int count)
{
writableStream.Write(buffer, offset, count);
Stream.Write(buffer, offset, count);
Count += (uint)count;
}
public override void WriteByte(byte value)
{
Stream.WriteByte(value);
++Count;
}
}
}

View File

@@ -20,6 +20,7 @@ namespace SharpCompress.IO
{
Stream.Dispose();
}
base.Dispose(disposing);
}
public Stream Stream { get; }
@@ -47,6 +48,19 @@ namespace SharpCompress.IO
return read;
}
public override int ReadByte()
{
int value = Stream.ReadByte();
if (value == -1)
{
return -1;
}
++currentEntryTotalReadBytes;
listener.FireCompressedBytesRead(currentEntryTotalReadBytes, currentEntryTotalReadBytes);
return value;
}
public override long Seek(long offset, SeekOrigin origin)
{
return Stream.Seek(offset, origin);

View File

@@ -15,10 +15,9 @@ namespace SharpCompress.IO
protected override void Dispose(bool disposing)
{
GC.SuppressFinalize(this);
if (ThrowOnDispose)
{
throw new InvalidOperationException();
throw new InvalidOperationException($"Attempt to dispose of a {nameof(NonDisposingStream)} when {nameof(ThrowOnDispose)} is {ThrowOnDispose}");
}
}

View File

@@ -11,7 +11,7 @@ namespace SharpCompress.IO
}
public ReadOnlySubStream(Stream stream, long? origin, long bytesToRead)
: base(stream, false)
: base(stream, throwOnDispose: false)
{
if (origin != null)
{
@@ -51,6 +51,20 @@ namespace SharpCompress.IO
return read;
}
public override int ReadByte()
{
if (BytesLeftToRead <= 0)
{
return -1;
}
int value = Stream.ReadByte();
if (value != -1)
{
--BytesLeftToRead;
}
return value;
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();

View File

@@ -42,36 +42,8 @@ namespace SharpCompress.Readers
public static void WriteEntryToDirectory(this IReader reader, string destinationDirectory,
ExtractionOptions options = null)
{
string destinationFileName = string.Empty;
string file = Path.GetFileName(reader.Entry.Key);
options = options ?? new ExtractionOptions()
{
Overwrite = true
};
if (options.ExtractFullPath)
{
string folder = Path.GetDirectoryName(reader.Entry.Key);
string destdir = Path.Combine(destinationDirectory, folder);
if (!Directory.Exists(destdir))
{
Directory.CreateDirectory(destdir);
}
destinationFileName = Path.Combine(destdir, file);
}
else
{
destinationFileName = Path.Combine(destinationDirectory, file);
}
if (!reader.Entry.IsDirectory)
{
reader.WriteEntryToFile(destinationFileName, options);
}
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
{
Directory.CreateDirectory(destinationFileName);
}
ExtractionMethods.WriteEntryToDirectory(reader.Entry, destinationDirectory, options,
reader.WriteEntryToFile);
}
/// <summary>
@@ -80,21 +52,14 @@ namespace SharpCompress.Readers
public static void WriteEntryToFile(this IReader reader, string destinationFileName,
ExtractionOptions options = null)
{
FileMode fm = FileMode.Create;
options = options ?? new ExtractionOptions()
{
Overwrite = true
};
if (!options.Overwrite)
{
fm = FileMode.CreateNew;
}
using (FileStream fs = File.Open(destinationFileName, fm))
{
reader.WriteEntryTo(fs);
}
reader.Entry.PreserveExtractionOptions(destinationFileName, options);
ExtractionMethods.WriteEntryToFile(reader.Entry, destinationFileName, options,
(x, fm) =>
{
using (FileStream fs = File.Open(destinationFileName, fm))
{
reader.WriteEntryTo(fs);
}
});
}
#endif
}

View File

@@ -2,9 +2,9 @@
<PropertyGroup>
<AssemblyTitle>SharpCompress - Pure C# Decompression/Compression</AssemblyTitle>
<NeutralLanguage>en-US</NeutralLanguage>
<VersionPrefix>0.21.0</VersionPrefix>
<AssemblyVersion>0.21.0</AssemblyVersion>
<FileVersion>0.21.0</FileVersion>
<VersionPrefix>0.24.0</VersionPrefix>
<AssemblyVersion>0.24.0</AssemblyVersion>
<FileVersion>0.24.0</FileVersion>
<Authors>Adam Hathcock</Authors>
<TargetFrameworks Condition="'$(LibraryFrameworks)'==''">net45;net35;netstandard1.0;netstandard1.3;netstandard2.0</TargetFrameworks>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
@@ -28,12 +28,12 @@
<DefineConstants>$(DefineConstants);NETCORE</DefineConstants>
</PropertyGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'netstandard1.3' ">
<PackageReference Include="System.Text.Encoding.CodePages" Version="4.4.0" />
<PackageReference Include="System.Text.Encoding.CodePages" Version="4.5.1" />
</ItemGroup>
<PropertyGroup Condition=" '$(TargetFramework)' == 'netstandard2.0' ">
<DefineConstants>$(DefineConstants);NETCORE</DefineConstants>
</PropertyGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'netstandard2.0' ">
<PackageReference Include="System.Text.Encoding.CodePages" Version="4.4.0" />
<PackageReference Include="System.Text.Encoding.CodePages" Version="4.5.1" />
</ItemGroup>
</Project>

View File

@@ -2,6 +2,8 @@
using System;
#endif
using System.IO;
using System.Linq;
using System.Linq.Expressions;
namespace SharpCompress.Writers
{
@@ -30,22 +32,33 @@ namespace SharpCompress.Writers
writer.Write(entryPath, new FileInfo(source));
}
public static void WriteAll(this IWriter writer, string directory, string searchPattern = "*",
public static void WriteAll(this IWriter writer, string directory, string searchPattern = "*", SearchOption option = SearchOption.TopDirectoryOnly)
{
writer.WriteAll(directory, searchPattern, null, option);
}
public static void WriteAll(this IWriter writer, string directory, string searchPattern = "*", Expression<Func<string, bool>> fileSearchFunc = null,
SearchOption option = SearchOption.TopDirectoryOnly)
{
if (!Directory.Exists(directory))
{
throw new ArgumentException("Directory does not exist: " + directory);
}
if (fileSearchFunc == null)
{
fileSearchFunc = n => true;
}
#if NET35
foreach (var file in Directory.GetDirectories(directory, searchPattern, option))
foreach (var file in Directory.GetDirectories(directory, searchPattern, option).Where(fileSearchFunc.Compile()))
#else
foreach (var file in Directory.EnumerateFiles(directory, searchPattern, option))
foreach (var file in Directory.EnumerateFiles(directory, searchPattern, option).Where(fileSearchFunc.Compile()))
#endif
{
writer.Write(file.Substring(directory.Length), file);
}
}
#endif
}
}

View File

@@ -1,4 +1,3 @@
using SharpCompress.Archives;
using SharpCompress.Common;
namespace SharpCompress.Writers.Tar
@@ -18,6 +17,7 @@ namespace SharpCompress.Writers.Tar
internal TarWriterOptions(WriterOptions options) : this(options.CompressionType, true)
{
ArchiveEncoding = options.ArchiveEncoding;
}
}
}

View File

@@ -36,7 +36,8 @@ namespace SharpCompress.Writers.Zip
byte[] encodedComment = archiveEncoding.Encode(Comment);
var zip64_stream = Compressed >= uint.MaxValue || Decompressed >= uint.MaxValue;
var zip64 = zip64_stream || HeaderOffset >= uint.MaxValue || Zip64HeaderOffset != 0;
var zip64 = zip64_stream || HeaderOffset >= uint.MaxValue;
var usedCompression = compression;
var compressedvalue = zip64 ? uint.MaxValue : (uint)Compressed;
var decompressedvalue = zip64 ? uint.MaxValue : (uint)Decompressed;
@@ -56,17 +57,21 @@ namespace SharpCompress.Writers.Zip
if (!zip64_stream)
flags |= HeaderFlags.UsePostDataDescriptor;
if (compression == ZipCompressionMethod.LZMA)
if (usedCompression == ZipCompressionMethod.LZMA)
{
flags |= HeaderFlags.Bit1; // eos marker
}
}
// Support for zero byte files
if (Decompressed == 0 && Compressed == 0)
usedCompression = ZipCompressionMethod.None;
//constant sig, then version made by, then version to extract
outputStream.Write(new byte[] { 80, 75, 1, 2, version, 0, version, 0 }, 0, 8);
outputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)flags), 0, 2);
outputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)compression), 0, 2); // zipping method
outputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)usedCompression), 0, 2); // zipping method
outputStream.Write(DataConverter.LittleEndian.GetBytes(ModificationTime.DateTimeToDosTime()), 0, 4);
// zipping date and time

View File

@@ -383,6 +383,14 @@ namespace SharpCompress.Writers.Zip
originalStream.Position = (long)(entry.HeaderOffset + 6);
originalStream.WriteByte(0);
if (counting.Count == 0 && entry.Decompressed == 0)
{
// set compression to STORED for zero byte files (no compression data)
originalStream.Position = (long)(entry.HeaderOffset + 8);
originalStream.WriteByte(0);
originalStream.WriteByte(0);
}
originalStream.Position = (long)(entry.HeaderOffset + 14);
writer.WriteFooter(entry.Crc, compressedvalue, decompressedvalue);

View File

@@ -10,7 +10,7 @@ using Xunit;
namespace SharpCompress.Test
{
public class ArchiveTests : TestBase
public class ArchiveTests : ReaderTests
{
protected void ArchiveStreamReadExtractAll(string testArchive, CompressionType compression)
{
@@ -23,14 +23,13 @@ namespace SharpCompress.Test
{
foreach (var path in testArchives)
{
ResetScratch();
using (var stream = new NonDisposingStream(File.OpenRead(path), true))
using (var archive = ArchiveFactory.Open(stream))
{
Assert.True(archive.IsSolid);
using (var reader = archive.ExtractAllEntries())
{
ReaderTests.UseReader(this, reader, compression);
UseReader(reader, compression);
}
VerifyFiles();
@@ -69,7 +68,6 @@ namespace SharpCompress.Test
{
foreach (var path in testArchives)
{
ResetScratch();
using (var stream = new NonDisposingStream(File.OpenRead(path), true))
using (var archive = ArchiveFactory.Open(stream, readerOptions))
{
@@ -106,118 +104,42 @@ namespace SharpCompress.Test
protected void ArchiveFileRead(string testArchive, ReaderOptions readerOptions = null)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
ArchiveFileRead(testArchive.AsEnumerable(), readerOptions);
}
protected void ArchiveFileRead(IEnumerable<string> testArchives, ReaderOptions readerOptions = null)
{
foreach (var path in testArchives)
using (var archive = ArchiveFactory.Open(testArchive, readerOptions))
{
ResetScratch();
using (var archive = ArchiveFactory.Open(path, readerOptions))
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
//archive.EntryExtractionBegin += archive_EntryExtractionBegin;
//archive.FilePartExtractionBegin += archive_FilePartExtractionBegin;
//archive.CompressedBytesRead += archive_CompressedBytesRead;
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(SCRATCH_FILES_PATH,
new ExtractionOptions()
{
ExtractFullPath = true,
Overwrite = true
});
}
entry.WriteToDirectory(SCRATCH_FILES_PATH,
new ExtractionOptions()
{
ExtractFullPath = true,
Overwrite = true
});
}
VerifyFiles();
}
VerifyFiles();
}
private void archive_CompressedBytesRead(object sender, CompressedBytesReadEventArgs e)
{
Console.WriteLine("Read Compressed File Part Bytes: {0} Percentage: {1}%",
e.CurrentFilePartCompressedBytesRead, CreatePercentage(e.CurrentFilePartCompressedBytesRead, partTotal));
string percentage = entryTotal.HasValue ? CreatePercentage(e.CompressedBytesRead,
entryTotal.Value).ToString() : "Unknown";
Console.WriteLine("Read Compressed File Entry Bytes: {0} Percentage: {1}%",
e.CompressedBytesRead, percentage);
}
private void archive_FilePartExtractionBegin(object sender, FilePartExtractionBeginEventArgs e)
{
partTotal = e.Size;
Console.WriteLine("Initializing File Part Extraction: " + e.Name);
}
private void archive_EntryExtractionBegin(object sender, ArchiveExtractionEventArgs<IArchiveEntry> e)
{
entryTotal = e.Item.Size;
Console.WriteLine("Initializing File Entry Extraction: " + e.Item.Key);
}
private long? entryTotal;
private long partTotal;
private long totalSize;
/// <summary>
/// Demonstrate the ExtractionOptions.PreserveFileTime and ExtractionOptions.PreserveAttributes extract options
/// </summary>
protected void ArchiveFileReadEx(string testArchive)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
ArchiveFileReadEx(testArchive.AsEnumerable());
}
/// <summary>
/// Demonstrate the TotalUncompressSize property, and the ExtractionOptions.PreserveFileTime and ExtractionOptions.PreserveAttributes extract options
/// </summary>
protected void ArchiveFileReadEx(IEnumerable<string> testArchives)
{
foreach (var path in testArchives)
using (var archive = ArchiveFactory.Open(testArchive))
{
ResetScratch();
using (var archive = ArchiveFactory.Open(path))
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
totalSize = archive.TotalUncompressSize;
//archive.EntryExtractionBegin += Archive_EntryExtractionBeginEx;
//archive.EntryExtractionEnd += Archive_EntryExtractionEndEx;
//archive.CompressedBytesRead += Archive_CompressedBytesReadEx;
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(SCRATCH_FILES_PATH,
new ExtractionOptions()
{
ExtractFullPath = true,
Overwrite = true,
PreserveAttributes = true,
PreserveFileTime = true
});
}
entry.WriteToDirectory(SCRATCH_FILES_PATH,
new ExtractionOptions()
{
ExtractFullPath = true,
Overwrite = true,
PreserveAttributes = true,
PreserveFileTime = true
});
}
VerifyFilesEx();
}
}
private void Archive_EntryExtractionEndEx(object sender, ArchiveExtractionEventArgs<IArchiveEntry> e)
{
partTotal += e.Item.Size;
}
private void Archive_CompressedBytesReadEx(object sender, CompressedBytesReadEventArgs e)
{
string percentage = entryTotal.HasValue ? CreatePercentage(e.CompressedBytesRead, entryTotal.Value).ToString() : "-";
string tortalPercentage = CreatePercentage(partTotal + e.CompressedBytesRead, totalSize).ToString();
Console.WriteLine(@"Read Compressed File Progress: {0}% Total Progress {1}%", percentage, tortalPercentage);
}
private void Archive_EntryExtractionBeginEx(object sender, ArchiveExtractionEventArgs<IArchiveEntry> e)
{
entryTotal = e.Item.Size;
}
private int CreatePercentage(long n, long d)
{
return (int)(((double)n / (double)d) * 100);
VerifyFilesEx();
}
}
}

View File

@@ -17,8 +17,7 @@ namespace SharpCompress.Test.GZip
[Fact]
public void GZip_Archive_Generic()
{
ResetScratch();
using (Stream stream = File.Open(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"), FileMode.Open))
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz")))
using (var archive = ArchiveFactory.Open(stream))
{
var entry = archive.Entries.First();
@@ -31,8 +30,7 @@ namespace SharpCompress.Test.GZip
[Fact]
public void GZip_Archive()
{
ResetScratch();
using (Stream stream = File.Open(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"), FileMode.Open))
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz")))
using (var archive = GZipArchive.Open(stream))
{
var entry = archive.Entries.First();
@@ -47,8 +45,7 @@ namespace SharpCompress.Test.GZip
public void GZip_Archive_NoAdd()
{
string jpg = Path.Combine(ORIGINAL_FILES_PATH, "jpg", "test.jpg");
ResetScratch();
using (Stream stream = File.Open(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"), FileMode.Open))
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz")))
using (var archive = GZipArchive.Open(stream))
{
Assert.Throws<InvalidOperationException>(() => archive.AddEntry("jpg\\test.jpg", jpg));
@@ -60,9 +57,8 @@ namespace SharpCompress.Test.GZip
[Fact]
public void GZip_Archive_Multiple_Reads()
{
ResetScratch();
var inputStream = new MemoryStream();
using (var fileStream = File.Open(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"), FileMode.Open))
using (var fileStream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz")))
{
fileStream.CopyTo(inputStream);
inputStream.Position = 0;

View File

@@ -17,7 +17,6 @@ namespace SharpCompress.Test.GZip
[Fact]
public void GZip_Writer_Generic()
{
ResetScratch();
using (Stream stream = File.Open(Path.Combine(SCRATCH_FILES_PATH, "Tar.tar.gz"), FileMode.OpenOrCreate, FileAccess.Write))
using (var writer = WriterFactory.Open(stream, ArchiveType.GZip, CompressionType.GZip))
{
@@ -30,7 +29,6 @@ namespace SharpCompress.Test.GZip
[Fact]
public void GZip_Writer()
{
ResetScratch();
using (Stream stream = File.Open(Path.Combine(SCRATCH_FILES_PATH, "Tar.tar.gz"), FileMode.OpenOrCreate, FileAccess.Write))
using (var writer = new GZipWriter(stream))
{
@@ -44,17 +42,12 @@ namespace SharpCompress.Test.GZip
public void GZip_Writer_Generic_Bad_Compression()
{
Assert.Throws<InvalidFormatException>(() =>
{
ResetScratch();
using (Stream stream = File.OpenWrite(Path.Combine(SCRATCH_FILES_PATH, "Tar.tar.gz")))
using (var writer = WriterFactory.Open(stream, ArchiveType.GZip, CompressionType.BZip2))
{
writer.Write("Tar.tar", Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar"));
}
CompareArchivesByPath(Path.Combine(SCRATCH_FILES_PATH, "Tar.tar.gz"),
Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"));
});
{
using (Stream stream = File.OpenWrite(Path.Combine(SCRATCH_FILES_PATH, "Tar.tar.gz")))
using (var writer = WriterFactory.Open(stream, ArchiveType.GZip, CompressionType.BZip2))
{
}
});
}
}
}

View File

@@ -0,0 +1,57 @@
using System;
using System.IO;
namespace SharpCompress.Test.Mocks
{
// This is a simplified version of CryptoStream that always flushes the inner stream on Dispose to trigger an error in EntryStream
// CryptoStream doesn't always trigger the Flush, so this class is used instead
// See https://referencesource.microsoft.com/#mscorlib/system/security/cryptography/cryptostream.cs,141
public class FlushOnDisposeStream : Stream, IDisposable
{
private Stream inner;
public FlushOnDisposeStream(Stream innerStream) {
this.inner = innerStream;
}
public override bool CanRead => this.inner.CanRead;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override long Length => this.inner.Length;
public override long Position { get => this.inner.Position; set => this.inner.Position = value; }
public override void Flush() {
throw new NotImplementedException();
}
public override int Read(byte[] buffer, int offset, int count) {
return this.inner.Read(buffer, offset, count);
}
public override long Seek(long offset, SeekOrigin origin) {
throw new NotImplementedException();
}
public override void SetLength(long value) {
throw new NotImplementedException();
}
public override void Write(byte[] buffer, int offset, int count) {
throw new NotImplementedException();
}
protected override void Dispose(bool disposing) {
if(disposing) {
this.inner.Flush();
this.inner.Close();
}
base.Dispose(disposing);
}
}
}

View File

@@ -1,7 +1,7 @@
using System;
using System.IO;
namespace SharpCompress.Test
namespace SharpCompress.Test.Mocks
{
public class ForwardOnlyStream : Stream
{
@@ -16,9 +16,15 @@ namespace SharpCompress.Test
protected override void Dispose(bool disposing)
{
base.Dispose(disposing);
stream.Dispose();
IsDisposed = true;
if (!IsDisposed)
{
if (disposing)
{
stream.Dispose();
IsDisposed = true;
base.Dispose(disposing);
}
}
}
public override bool CanRead => true;
@@ -55,7 +61,8 @@ namespace SharpCompress.Test
}
public override void Write(byte[] buffer, int offset, int count)
{throw new NotSupportedException();
{
throw new NotSupportedException();
}
}
}

View File

@@ -0,0 +1,69 @@
using System.IO;
namespace SharpCompress.Test.Mocks
{
public class TestStream : Stream
{
private readonly Stream stream;
public TestStream(Stream stream) : this(stream, stream.CanRead, stream.CanWrite, stream.CanSeek)
{
}
public bool IsDisposed { get; private set; }
public TestStream(Stream stream, bool read, bool write, bool seek)
{
this.stream = stream;
CanRead = read;
CanWrite = write;
CanSeek = seek;
}
protected override void Dispose(bool disposing)
{
base.Dispose(disposing);
stream.Dispose();
IsDisposed = true;
}
public override bool CanRead { get; }
public override bool CanSeek { get; }
public override bool CanWrite { get; }
public override void Flush()
{
stream.Flush();
}
public override long Length => stream.Length;
public override long Position
{
get => stream.Position;
set => stream.Position = value;
}
public override int Read(byte[] buffer, int offset, int count)
{
return stream.Read(buffer, offset, count);
}
public override long Seek(long offset, SeekOrigin origin)
{
return stream.Seek(offset, origin);
}
public override void SetLength(long value)
{
stream.SetLength(value);
}
public override void Write(byte[] buffer, int offset, int count)
{
stream.Write(buffer, offset, count);
}
}
}

View File

@@ -48,7 +48,6 @@ namespace SharpCompress.Test.Rar
private void ReadRarPassword(string testArchive, string password)
{
ResetScratch();
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, testArchive)))
using (var archive = RarArchive.Open(stream, new ReaderOptions()
{
@@ -80,7 +79,6 @@ namespace SharpCompress.Test.Rar
protected void ArchiveFileReadPassword(string archiveName, string password)
{
ResetScratch();
using (var archive = RarArchive.Open(Path.Combine(TEST_ARCHIVES_PATH, archiveName), new ReaderOptions()
{
Password = password,
@@ -131,7 +129,6 @@ namespace SharpCompress.Test.Rar
private void DoRar_test_invalid_exttime_ArchiveStreamRead(string filename)
{
ResetScratch();
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename)))
{
using (var archive = ArchiveFactory.Open(stream))
@@ -151,7 +148,6 @@ namespace SharpCompress.Test.Rar
[Fact]
public void Rar_Jpg_ArchiveStreamRead()
{
ResetScratch();
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Rar.jpeg.jpg")))
{
using (var archive = RarArchive.Open(stream, new ReaderOptions()
@@ -185,7 +181,6 @@ namespace SharpCompress.Test.Rar
private void DoRar_IsSolidArchiveCheck(string filename)
{
ResetScratch();
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename)))
{
using (var archive = RarArchive.Open(stream))
@@ -253,7 +248,6 @@ namespace SharpCompress.Test.Rar
private void DoRar_Multi_ArchiveStreamRead(string[] archives)
{
ResetScratch();
using (var archive = RarArchive.Open(archives.Select(s => Path.Combine(TEST_ARCHIVES_PATH, s))
.Select(File.OpenRead)))
{
@@ -305,7 +299,6 @@ namespace SharpCompress.Test.Rar
private void DoRar_ArchiveFileRead_HasDirectories(string filename)
{
ResetScratch();
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename)))
{
using (var archive = RarArchive.Open(stream))
@@ -319,7 +312,6 @@ namespace SharpCompress.Test.Rar
[Fact]
public void Rar_Jpg_ArchiveFileRead()
{
ResetScratch();
using (var archive = RarArchive.Open(Path.Combine(TEST_ARCHIVES_PATH, "Rar.jpeg.jpg"), new ReaderOptions()
{
LookForHeader = true

View File

@@ -15,7 +15,6 @@ namespace SharpCompress.Test.Rar
public RarHeaderFactoryTest()
{
ResetScratch();
rarHeaderFactory = new RarHeaderFactory(
StreamingMode.Seekable,
new ReaderOptions { LeaveStreamOpen = true });
@@ -47,7 +46,7 @@ namespace SharpCompress.Test.Rar
private void ReadEncryptedFlag(string testArchive, bool isEncrypted)
{
using (var stream = GetReaderStream(testArchive))
using (var stream = new FileStream(Path.Combine(TEST_ARCHIVES_PATH, testArchive), FileMode.Open, FileAccess.Read))
{
foreach (var header in rarHeaderFactory.ReadHeaders(stream))
{
@@ -59,10 +58,5 @@ namespace SharpCompress.Test.Rar
}
}
}
private FileStream GetReaderStream(string testArchive)
{
return new FileStream(Path.Combine(TEST_ARCHIVES_PATH, testArchive), FileMode.Open);
}
}
}

View File

@@ -33,7 +33,6 @@ namespace SharpCompress.Test.Rar
private void DoRar_Multi_Reader(string[] archives)
{
ResetScratch();
using (var reader = RarReader.Open(archives.Select(s => Path.Combine(TEST_ARCHIVES_PATH, s))
.Select(p => File.OpenRead(p))))
{
@@ -64,7 +63,6 @@ namespace SharpCompress.Test.Rar
{
Assert.Throws<InvalidFormatException>(() =>
{
ResetScratch();
using (var reader = RarReader.Open(archives.Select(s => Path.Combine(TEST_ARCHIVES_PATH, s))
.Select(p => File.OpenRead(p)),
new ReaderOptions()
@@ -110,7 +108,6 @@ namespace SharpCompress.Test.Rar
private void DoRar_Multi_Reader_Delete_Files(string[] archives)
{
ResetScratch();
foreach (var file in archives)
{
@@ -202,27 +199,7 @@ namespace SharpCompress.Test.Rar
private void ReadRar(string testArchive, string password)
{
ResetScratch();
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, testArchive)))
using (var reader = RarReader.Open(stream, new ReaderOptions()
{
Password = password
}))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(CompressionType.Rar, reader.Entry.CompressionType);
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH, new ExtractionOptions()
{
ExtractFullPath = true,
Overwrite = true
});
}
}
}
VerifyFiles();
Read(testArchive, CompressionType.Rar, new ReaderOptions { Password = password });
}
[Fact]
@@ -237,9 +214,8 @@ namespace SharpCompress.Test.Rar
private void DoRar_Entry_Stream(string filename)
{
ResetScratch();
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename)))
using (var reader = RarReader.Open(stream))
using (var reader = ReaderFactory.Open(stream))
{
while (reader.MoveToNextEntry())
{
@@ -271,9 +247,8 @@ namespace SharpCompress.Test.Rar
[Fact]
public void Rar_Reader_Audio_program()
{
ResetScratch();
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Rar.Audio_program.rar")))
using (var reader = RarReader.Open(stream, new ReaderOptions()
using (var reader = ReaderFactory.Open(stream, new ReaderOptions()
{
LookForHeader = true
}))
@@ -295,7 +270,6 @@ namespace SharpCompress.Test.Rar
[Fact]
public void Rar_Jpg_Reader()
{
ResetScratch();
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Rar.jpeg.jpg")))
using (var reader = RarReader.Open(stream, new ReaderOptions()
{
@@ -339,9 +313,8 @@ namespace SharpCompress.Test.Rar
private void DoRar_Solid_Skip_Reader(string filename)
{
ResetScratch();
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename)))
using (var reader = RarReader.Open(stream, new ReaderOptions()
using (var reader = ReaderFactory.Open(stream, new ReaderOptions()
{
LookForHeader = true
}))
@@ -373,9 +346,8 @@ namespace SharpCompress.Test.Rar
private void DoRar_Reader_Skip(string filename)
{
ResetScratch();
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename)))
using (var reader = RarReader.Open(stream, new ReaderOptions()
using (var reader = ReaderFactory.Open(stream, new ReaderOptions()
{
LookForHeader = true
}))

View File

@@ -1,52 +1,66 @@
using System.Collections.Generic;
using System.IO;
using System.IO;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Test.Mocks;
using Xunit;
namespace SharpCompress.Test
{
public class ReaderTests : TestBase
public abstract class ReaderTests : TestBase
{
protected void Read(string testArchive, CompressionType expectedCompression)
protected void Read(string testArchive, CompressionType expectedCompression, ReaderOptions options = null)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
Read(testArchive.AsEnumerable(), expectedCompression);
options = options ?? new ReaderOptions();
options.LeaveStreamOpen = true;
ReadImpl(testArchive, expectedCompression, options);
options.LeaveStreamOpen = false;
ReadImpl(testArchive, expectedCompression, options);
VerifyFiles();
}
protected void Read(IEnumerable<string> testArchives, CompressionType expectedCompression)
private void ReadImpl(string testArchive, CompressionType expectedCompression, ReaderOptions options)
{
foreach (var path in testArchives)
using (var file = File.OpenRead(testArchive))
{
using (var stream = new NonDisposingStream(new ForwardOnlyStream(File.OpenRead(path)), true))
using (var reader = ReaderFactory.Open(stream, new ReaderOptions()
{
LeaveStreamOpen = true
}))
using (var protectedStream = new NonDisposingStream(new ForwardOnlyStream(file), throwOnDispose: true))
{
UseReader(this, reader, expectedCompression);
stream.ThrowOnDispose = false;
using (var testStream = new TestStream(protectedStream))
{
using (var reader = ReaderFactory.Open(testStream, options))
{
UseReader(reader, expectedCompression);
protectedStream.ThrowOnDispose = false;
Assert.False(testStream.IsDisposed, "{nameof(testStream)} prematurely closed");
}
// Boolean XOR -- If the stream should be left open (true), then the stream should not be diposed (false)
// and if the stream should be closed (false), then the stream should be disposed (true)
var message = $"{nameof(options.LeaveStreamOpen)} is set to '{options.LeaveStreamOpen}', so {nameof(testStream.IsDisposed)} should be set to '{!testStream.IsDisposed}', but is set to {testStream.IsDisposed}";
Assert.True(options.LeaveStreamOpen != testStream.IsDisposed, message);
}
}
}
}
public static void UseReader(TestBase test, IReader reader, CompressionType expectedCompression)
public void UseReader(IReader reader, CompressionType expectedCompression)
{
test.ResetScratch();
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(reader.Entry.CompressionType, expectedCompression);
reader.WriteEntryToDirectory(test.SCRATCH_FILES_PATH, new ExtractionOptions()
Assert.Equal(expectedCompression, reader.Entry.CompressionType);
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH, new ExtractionOptions()
{
ExtractFullPath = true,
Overwrite = true
});
}
}
test.VerifyFiles();
}
}
}

View File

@@ -1,6 +1,6 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>netcoreapp2.0</TargetFrameworks>
<TargetFrameworks>netcoreapp2.2</TargetFrameworks>
<AssemblyName>SharpCompress.Test</AssemblyName>
<AssemblyOriginatorKeyFile>../../SharpCompress.snk</AssemblyOriginatorKeyFile>
<SignAssembly>true</SignAssembly>
@@ -12,9 +12,10 @@
<ProjectReference Include="..\..\src\SharpCompress\SharpCompress.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="15.7.0" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.3.1" />
<PackageReference Include="xunit" Version="2.3.1" />
<PackageReference Include="Xunit.SkippableFact" Version="1.3.3" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="16.2.0" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.4.1" />
<PackageReference Include="xunit" Version="2.4.1" />
<PackageReference Include="Xunit.SkippableFact" Version="1.3.12" />
<PackageReference Include="Mono.Posix.NETStandard" Version="1.0.0" />
</ItemGroup>
</Project>

View File

@@ -4,7 +4,7 @@ using Xunit;
namespace SharpCompress.Test.Streams
{
public class StreamTests
public class LzmaStreamTests
{
[Fact]
public void TestLzma2Decompress1Byte()

View File

@@ -2,7 +2,7 @@
using SharpCompress.IO;
using Xunit;
namespace SharpCompress.Test
namespace SharpCompress.Test.Streams
{
public class RewindableStreamTest
{

View File

@@ -5,6 +5,10 @@ using SharpCompress.Archives.Tar;
using SharpCompress.Common;
using SharpCompress.Writers;
using Xunit;
using System.Text;
using SharpCompress.Readers;
using SharpCompress.Writers.Tar;
using SharpCompress.Readers.Tar;
namespace SharpCompress.Test.Tar
{
@@ -27,6 +31,39 @@ namespace SharpCompress.Test.Tar
ArchiveFileRead("Tar.tar");
}
[Fact]
public void Tar_FileName_Exactly_100_Characters()
{
string archive = "Tar_FileName_Exactly_100_Characters.tar";
// create the 100 char filename
string filename = "filename_with_exactly_100_characters_______________________________________________________________X";
// Step 1: create a tar file containing a file with the test name
using (Stream stream = File.OpenWrite(Path.Combine(SCRATCH2_FILES_PATH, archive)))
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, CompressionType.None))
using (Stream inputStream = new MemoryStream())
{
StreamWriter sw = new StreamWriter(inputStream);
sw.Write("dummy filecontent");
sw.Flush();
inputStream.Position = 0;
writer.Write(filename, inputStream, null);
}
// Step 2: check if the written tar file can be read correctly
string unmodified = Path.Combine(SCRATCH2_FILES_PATH, archive);
using (var archive2 = TarArchive.Open(unmodified))
{
Assert.Equal(1, archive2.Entries.Count);
Assert.Contains(filename, archive2.Entries.Select(entry => entry.Key));
foreach (var entry in archive2.Entries)
Assert.Equal("dummy filecontent", new StreamReader(entry.OpenEntryStream()).ReadLine());
}
}
[Fact]
public void Tar_NonUstarArchiveWithLongNameDoesNotSkipEntriesAfterTheLongOne()
@@ -48,7 +85,6 @@ namespace SharpCompress.Test.Tar
{
string archive = "Tar_VeryLongFilepathReadback.tar";
ResetScratch();
// create a very long filename
string longFilename = "";
@@ -103,11 +139,17 @@ namespace SharpCompress.Test.Tar
string scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.tar");
string unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar");
ResetScratch();
// var aropt = new Ar
using (var archive = TarArchive.Create())
{
archive.AddAllFromDirectory(ORIGINAL_FILES_PATH);
archive.SaveTo(scratchPath, CompressionType.None);
var twopt = new TarWriterOptions(CompressionType.None, true);
twopt.ArchiveEncoding = new ArchiveEncoding()
{
Default = Encoding.GetEncoding(866)
};
archive.SaveTo(scratchPath, twopt);
}
CompareArchivesByPath(unmodified, scratchPath);
}
@@ -119,7 +161,6 @@ namespace SharpCompress.Test.Tar
string unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.mod.tar");
string modified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar");
ResetScratch();
using (var archive = TarArchive.Open(unmodified))
{
archive.AddEntry("jpg\\test.jpg", jpg);
@@ -135,7 +176,6 @@ namespace SharpCompress.Test.Tar
string modified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.mod.tar");
string unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar");
ResetScratch();
using (var archive = TarArchive.Open(unmodified))
{
var entry = archive.Entries.Single(x => x.Key.EndsWith("jpg"));
@@ -155,5 +195,51 @@ namespace SharpCompress.Test.Tar
Assert.True(archive.Type == ArchiveType.Tar);
}
}
[Fact]
public void Tar_Empty_Archive()
{
string archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.Empty.tar");
using (Stream stream = File.OpenRead(archiveFullPath))
using (IArchive archive = ArchiveFactory.Open(stream))
{
Assert.True(archive.Type == ArchiveType.Tar);
}
}
[Theory]
[InlineData(10)]
[InlineData(128)]
public void Tar_Japanese_Name(int length)
{
using (var mstm = new MemoryStream())
{
var enc = new ArchiveEncoding()
{
Default = Encoding.UTF8
};
var twopt = new TarWriterOptions(CompressionType.None, true);
twopt.ArchiveEncoding = enc;
var fname = new string((char)0x3042, length);
using (var tw = new TarWriter(mstm, twopt))
using (var input = new MemoryStream(new byte[32]))
{
tw.Write(fname, input, null);
}
using (var inputMemory = new MemoryStream(mstm.ToArray()))
{
var tropt = new ReaderOptions()
{
ArchiveEncoding = enc
};
using (var tr = TarReader.Open(inputMemory, tropt))
{
while (tr.MoveToNextEntry())
{
Assert.Equal(fname, tr.Entry.Key);
}
}
}
}
}
}
}

View File

@@ -3,6 +3,7 @@ using System.IO;
using SharpCompress.Common;
using SharpCompress.Readers;
using SharpCompress.Readers.Tar;
using SharpCompress.Test.Mocks;
using Xunit;
namespace SharpCompress.Test.Tar
@@ -26,7 +27,6 @@ namespace SharpCompress.Test.Tar
using (Stream stream = new ForwardOnlyStream(File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar"))))
using (IReader reader = ReaderFactory.Open(stream))
{
ResetScratch();
int x = 0;
while (reader.MoveToNextEntry())
{
@@ -74,7 +74,6 @@ namespace SharpCompress.Test.Tar
[Fact]
public void Tar_BZip2_Entry_Stream()
{
ResetScratch();
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2")))
using (var reader = TarReader.Open(stream))
{
@@ -161,5 +160,91 @@ namespace SharpCompress.Test.Tar
Assert.True(reader.ArchiveType == ArchiveType.Tar);
}
}
[Fact]
public void Tar_With_TarGz_With_Flushed_EntryStream()
{
string archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.ContainsTarGz.tar");
using(Stream stream = File.OpenRead(archiveFullPath))
using(IReader reader = ReaderFactory.Open(stream))
{
Assert.True(reader.MoveToNextEntry());
Assert.Equal("inner.tar.gz", reader.Entry.Key);
using(var entryStream = reader.OpenEntryStream()) {
using(FlushOnDisposeStream flushingStream = new FlushOnDisposeStream(entryStream)) {
// Extract inner.tar.gz
using(var innerReader = ReaderFactory.Open(flushingStream)) {
Assert.True(innerReader.MoveToNextEntry());
Assert.Equal("test", innerReader.Entry.Key);
}
}
}
}
}
[Fact]
public void Tar_GZip_With_Symlink_Entries()
{
var isWindows = System.Runtime.InteropServices.RuntimeInformation.IsOSPlatform(
System.Runtime.InteropServices.OSPlatform.Windows);
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "TarWithSymlink.tar.gz")))
using (var reader = TarReader.Open(stream))
{
List<string> names = new List<string>();
while (reader.MoveToNextEntry())
{
if (reader.Entry.IsDirectory)
{
continue;
}
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH,
new ExtractionOptions()
{
ExtractFullPath = true,
Overwrite = true,
WriteSymbolicLink = (sourcePath, targetPath) =>
{
if (!isWindows)
{
var link = new Mono.Unix.UnixSymbolicLinkInfo(sourcePath);
if (System.IO.File.Exists(sourcePath))
{
link.Delete(); // equivalent to ln -s -f
}
link.CreateSymbolicLinkTo(targetPath);
}
}
});
if (!isWindows)
{
if (reader.Entry.LinkTarget != null)
{
var path = System.IO.Path.Combine(SCRATCH_FILES_PATH, reader.Entry.Key);
var link = new Mono.Unix.UnixSymbolicLinkInfo(path);
if (link.HasContents)
{
// need to convert the link to an absolute path for comparison
var target = reader.Entry.LinkTarget;
var realTarget = System.IO.Path.GetFullPath(
System.IO.Path.Combine($"{System.IO.Path.GetDirectoryName(path)}",
target)
);
Assert.Equal(realTarget, link.GetContents().ToString());
}
else
{
Assert.True(false, "Symlink has no target");
}
}
}
}
}
}
}
}

View File

@@ -2,23 +2,21 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Text;
using SharpCompress.Readers;
using Xunit;
[assembly: CollectionBehavior(DisableTestParallelization = true)]
namespace SharpCompress.Test
{
public class TestBase
public class TestBase : IDisposable
{
private string SOLUTION_BASE_PATH;
protected string TEST_ARCHIVES_PATH;
protected string ORIGINAL_FILES_PATH;
protected string MISC_TEST_FILES_PATH;
private string SCRATCH_BASE_PATH;
public string SCRATCH_FILES_PATH;
protected string SCRATCH2_FILES_PATH;
public TestBase()
{
@@ -28,23 +26,18 @@ namespace SharpCompress.Test
TEST_ARCHIVES_PATH = Path.Combine(SOLUTION_BASE_PATH, "TestArchives", "Archives");
ORIGINAL_FILES_PATH = Path.Combine(SOLUTION_BASE_PATH, "TestArchives", "Original");
MISC_TEST_FILES_PATH = Path.Combine(SOLUTION_BASE_PATH, "TestArchives", "MiscTest");
SCRATCH_FILES_PATH = Path.Combine(SOLUTION_BASE_PATH, "TestArchives", "Scratch");
SCRATCH2_FILES_PATH = Path.Combine(SOLUTION_BASE_PATH, "TestArchives", "Scratch2");
}
public void ResetScratch()
{
if (Directory.Exists(SCRATCH_FILES_PATH))
{
Directory.Delete(SCRATCH_FILES_PATH, true);
}
Directory.CreateDirectory(SCRATCH_FILES_PATH);
if (Directory.Exists(SCRATCH2_FILES_PATH))
{
Directory.Delete(SCRATCH2_FILES_PATH, true);
}
Directory.CreateDirectory(SCRATCH2_FILES_PATH);
SCRATCH_BASE_PATH = Path.Combine(SOLUTION_BASE_PATH, "TestArchives", Guid.NewGuid().ToString());
SCRATCH_FILES_PATH = Path.Combine(SCRATCH_BASE_PATH, "Scratch");
SCRATCH2_FILES_PATH = Path.Combine(SCRATCH_BASE_PATH, "Scratch2");
Directory.CreateDirectory(SCRATCH_FILES_PATH);
Directory.CreateDirectory(SCRATCH2_FILES_PATH);
}
public void Dispose()
{
Directory.Delete(SCRATCH_BASE_PATH, true);
}
public void VerifyFiles()
@@ -168,14 +161,6 @@ namespace SharpCompress.Test
return;
}
if (IsFileLocked(new FileInfo(file1)))
{
throw new InvalidOperationException($"{file1} is not disposed");
}
if (IsFileLocked(new FileInfo(file2)))
{
throw new InvalidOperationException($"{file2} is not disposed");
}
using (var file1Stream = File.OpenRead(file1))
using (var file2Stream = File.OpenRead(file2))
{
@@ -203,13 +188,15 @@ namespace SharpCompress.Test
Assert.Equal(fi1.Attributes, fi2.Attributes);
}
protected void CompareArchivesByPath(string file1, string file2)
{
protected void CompareArchivesByPath(string file1, string file2) {
ReaderOptions readerOptions = new ReaderOptions { LeaveStreamOpen = false };
readerOptions.ArchiveEncoding.Default = Encoding.GetEncoding(866);
//don't compare the order. OS X reads files from the file system in a different order therefore makes the archive ordering different
var archive1Entries = new List<string>();
var archive2Entries = new List<string>();
using (var archive1 = ReaderFactory.Open(File.OpenRead(file1)))
using (var archive2 = ReaderFactory.Open(File.OpenRead(file2)))
using (var archive1 = ReaderFactory.Open(File.OpenRead(file1), readerOptions))
using (var archive2 = ReaderFactory.Open(File.OpenRead(file2), readerOptions))
{
while (archive1.MoveToNextEntry())
{
@@ -227,29 +214,5 @@ namespace SharpCompress.Test
}
}
protected bool IsFileLocked(FileInfo file)
{
FileStream stream = null;
try
{
stream = file.Open(FileMode.Open, FileAccess.Read, FileShare.None);
}
catch (IOException)
{
//the file is unavailable because it is:
//still being written to
//or being processed by another thread
//or does not exist (has already been processed)
return true;
}
finally
{
stream?.Close();
}
//file is not locked
return false;
}
}
}

View File

@@ -1,70 +0,0 @@
using System.IO;
namespace SharpCompress.Test
{
public class TestStream : Stream
{
private readonly Stream stream;
public TestStream(Stream stream)
: this(stream, true, true, true)
{
}
public bool IsDisposed { get; private set; }
public TestStream(Stream stream, bool read, bool write, bool seek)
{
this.stream = stream;
CanRead = read;
CanWrite = write;
CanSeek = seek;
}
protected override void Dispose(bool disposing)
{
base.Dispose(disposing);
stream.Dispose();
IsDisposed = true;
}
public override bool CanRead { get; }
public override bool CanSeek { get; }
public override bool CanWrite { get; }
public override void Flush()
{
stream.Flush();
}
public override long Length => stream.Length;
public override long Position
{
get => stream.Position;
set => stream.Position = value;
}
public override int Read(byte[] buffer, int offset, int count)
{
return stream.Read(buffer, offset, count);
}
public override long Seek(long offset, SeekOrigin origin)
{
return stream.Seek(offset, origin);
}
public override void SetLength(long value)
{
stream.SetLength(value);
}
public override void Write(byte[] buffer, int offset, int count)
{
stream.Write(buffer, offset, count);
}
}
}

View File

@@ -1,5 +1,6 @@
using System;
using System.IO;
using System.Text;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -18,13 +19,15 @@ namespace SharpCompress.Test
protected void Write(CompressionType compressionType, string archive, string archiveToVerifyAgainst)
{
ResetScratch();
using (Stream stream = File.OpenWrite(Path.Combine(SCRATCH2_FILES_PATH, archive)))
{
using (var writer = WriterFactory.Open(stream, type, new WriterOptions(compressionType)
{
LeaveStreamOpen = true
}))
using (Stream stream = File.OpenWrite(Path.Combine(SCRATCH2_FILES_PATH, archive))) {
WriterOptions writerOptions = new WriterOptions(compressionType)
{
LeaveStreamOpen = true,
};
writerOptions.ArchiveEncoding.Default = Encoding.GetEncoding(866);
using (var writer = WriterFactory.Open(stream, type, writerOptions))
{
writer.WriteAll(ORIGINAL_FILES_PATH, "*", SearchOption.AllDirectories);
}
@@ -32,13 +35,19 @@ namespace SharpCompress.Test
CompareArchivesByPath(Path.Combine(SCRATCH2_FILES_PATH, archive),
Path.Combine(TEST_ARCHIVES_PATH, archiveToVerifyAgainst));
using (Stream stream = File.OpenRead(Path.Combine(SCRATCH2_FILES_PATH, archive)))
using (var reader = ReaderFactory.Open(new NonDisposingStream(stream)))
using (Stream stream = File.OpenRead(Path.Combine(SCRATCH2_FILES_PATH, archive)))
{
reader.WriteAllToDirectory(SCRATCH_FILES_PATH, new ExtractionOptions()
ReaderOptions readerOptions = new ReaderOptions();
readerOptions.ArchiveEncoding.Default = Encoding.GetEncoding(866);
using (var reader = ReaderFactory.Open(new NonDisposingStream(stream), readerOptions))
{
ExtractFullPath = true
});
reader.WriteAllToDirectory(SCRATCH_FILES_PATH, new ExtractionOptions()
{
ExtractFullPath = true
});
}
}
VerifyFiles();
}

View File

@@ -5,6 +5,7 @@ using SharpCompress.Archives;
using SharpCompress.Common;
using SharpCompress.Readers;
using SharpCompress.Readers.Zip;
using SharpCompress.Test.Mocks;
using SharpCompress.Writers;
using SharpCompress.Writers.Zip;
using Xunit;
@@ -100,7 +101,6 @@ namespace SharpCompress.Test.Zip
public void RunSingleTest(long files, long filesize, bool set_zip64, bool forward_only, long write_chunk_size = 1024 * 1024, string filename = "zip64-test.zip")
{
ResetScratch();
filename = Path.Combine(SCRATCH2_FILES_PATH, filename);
if (File.Exists(filename))
@@ -134,7 +134,7 @@ namespace SharpCompress.Test.Zip
var eo = new ZipWriterEntryOptions() { DeflateCompressionLevel = Compressors.Deflate.CompressionLevel.None };
using (var zip = File.OpenWrite(filename))
using(var st = forward_only ? (Stream)new NonSeekableStream(zip) : zip)
using(var st = forward_only ? (Stream)new ForwardOnlyStream(zip) : zip)
using (var zipWriter = (ZipWriter)WriterFactory.Open(st, ArchiveType.Zip, opts))
{
@@ -187,32 +187,5 @@ namespace SharpCompress.Test.Zip
);
}
}
/// <summary>
/// Helper to create non-seekable streams from filestream
/// </summary>
private class NonSeekableStream : Stream
{
private readonly Stream stream;
public NonSeekableStream(Stream s) { stream = s; }
public override bool CanRead => stream.CanRead;
public override bool CanSeek => false;
public override bool CanWrite => stream.CanWrite;
public override long Length => throw new NotImplementedException();
public override long Position { get => throw new NotImplementedException(); set => throw new NotImplementedException(); }
public override void Flush() { stream.Flush(); }
public override int Read(byte[] buffer, int offset, int count)
{ return stream.Read(buffer, offset, count); }
public override long Seek(long offset, SeekOrigin origin)
{ throw new NotImplementedException(); }
public override void SetLength(long value)
{ throw new NotImplementedException(); }
public override void Write(byte[] buffer, int offset, int count)
{ stream.Write(buffer, offset, count); }
}
}
}

View File

@@ -7,6 +7,7 @@ using SharpCompress.Archives.Zip;
using SharpCompress.Common;
using SharpCompress.Readers;
using SharpCompress.Writers;
using SharpCompress.Writers.Zip;
using Xunit;
namespace SharpCompress.Test.Zip
@@ -157,12 +158,15 @@ namespace SharpCompress.Test.Zip
string unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.noEmptyDirs.zip");
string modified = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.mod.zip");
ResetScratch();
using (var archive = ZipArchive.Open(unmodified))
{
var entry = archive.Entries.Single(x => x.Key.EndsWith("jpg"));
archive.RemoveEntry(entry);
archive.SaveTo(scratchPath, CompressionType.Deflate);
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.Deflate);
writerOptions.ArchiveEncoding.Default = Encoding.GetEncoding(866);
archive.SaveTo(scratchPath, writerOptions);
}
CompareArchivesByPath(modified, scratchPath);
}
@@ -175,11 +179,14 @@ namespace SharpCompress.Test.Zip
string unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.mod.zip");
string modified = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.mod2.zip");
ResetScratch();
using (var archive = ZipArchive.Open(unmodified))
{
archive.AddEntry("jpg\\test.jpg", jpg);
archive.SaveTo(scratchPath, CompressionType.Deflate);
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.Deflate);
writerOptions.ArchiveEncoding.Default = Encoding.GetEncoding(866);
archive.SaveTo(scratchPath, writerOptions);
}
CompareArchivesByPath(modified, scratchPath);
}
@@ -190,7 +197,6 @@ namespace SharpCompress.Test.Zip
string scratchPath1 = Path.Combine(SCRATCH_FILES_PATH, "a.zip");
string scratchPath2 = Path.Combine(SCRATCH_FILES_PATH, "b.zip");
ResetScratch();
using (var arc = ZipArchive.Create())
{
string str = "test.txt";
@@ -206,7 +212,6 @@ namespace SharpCompress.Test.Zip
[Fact]
public void Zip_Removal_Poly()
{
ResetScratch();
string scratchPath = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.noEmptyDirs.zip");
@@ -254,7 +259,6 @@ namespace SharpCompress.Test.Zip
[Fact]
public void Zip_Create_New()
{
ResetScratch();
foreach (var file in Directory.EnumerateFiles(ORIGINAL_FILES_PATH, "*.*", SearchOption.AllDirectories))
{
var newFileName = file.Substring(ORIGINAL_FILES_PATH.Length);
@@ -276,7 +280,11 @@ namespace SharpCompress.Test.Zip
using (var archive = ZipArchive.Create())
{
archive.AddAllFromDirectory(SCRATCH_FILES_PATH);
archive.SaveTo(scratchPath, CompressionType.Deflate);
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.Deflate);
writerOptions.ArchiveEncoding.Default = Encoding.GetEncoding(866);
archive.SaveTo(scratchPath, writerOptions);
}
CompareArchivesByPath(unmodified, scratchPath);
Directory.Delete(SCRATCH_FILES_PATH, true);
@@ -285,7 +293,6 @@ namespace SharpCompress.Test.Zip
[Fact]
public void Zip_Create_New_Add_Remove()
{
ResetScratch();
foreach (var file in Directory.EnumerateFiles(ORIGINAL_FILES_PATH, "*.*", SearchOption.AllDirectories))
{
var newFileName = file.Substring(ORIGINAL_FILES_PATH.Length);
@@ -315,7 +322,6 @@ namespace SharpCompress.Test.Zip
[Fact]
public void Zip_Deflate_WinzipAES_Read()
{
ResetScratch();
using (var reader = ZipArchive.Open(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.WinzipAES.zip"), new ReaderOptions()
{
Password = "test"
@@ -336,7 +342,6 @@ namespace SharpCompress.Test.Zip
[Fact]
public void Zip_Deflate_WinzipAES_MultiOpenEntryStream()
{
ResetScratch();
using (var reader = ZipArchive.Open(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.WinzipAES2.zip"), new ReaderOptions()
{
Password = "test"
@@ -352,10 +357,25 @@ namespace SharpCompress.Test.Zip
}
}
[Fact]
public void Zip_Read_Volume_Comment()
{
using (var reader = ZipArchive.Open(Path.Combine(TEST_ARCHIVES_PATH, "Zip.zip64.zip"), new ReaderOptions()
{
Password = "test"
}))
{
var isComplete = reader.IsComplete;
Assert.Equal(1, reader.Volumes.Count);
string expectedComment = "Encoding:utf-8 || Compression:Deflate levelDefault || Encrypt:None || ZIP64:Always\r\nCreated at 2017-Jan-23 14:10:43 || DotNetZip Tool v1.9.1.8\r\nTest zip64 archive";
Assert.Equal(expectedComment, reader.Volumes.First().Comment);
}
}
[Fact]
public void Zip_BZip2_Pkware_Read()
{
ResetScratch();
using (var reader = ZipArchive.Open(Path.Combine(TEST_ARCHIVES_PATH, "Zip.bzip2.pkware.zip"), new ReaderOptions()
{
Password = "test"
@@ -378,7 +398,6 @@ namespace SharpCompress.Test.Zip
{
string unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.noEmptyDirs.zip");
ResetScratch();
ZipArchive a = ZipArchive.Open(unmodified);
int count = 0;
foreach (var e in a.Entries)
@@ -493,5 +512,48 @@ namespace SharpCompress.Test.Zip
}
}
}
}
[Fact]
public void Zip_BadLocalExtra_Read()
{
string zipPath = Path.Combine(TEST_ARCHIVES_PATH, "Zip.badlocalextra.zip");
using (ZipArchive za = ZipArchive.Open(zipPath))
{
var ex = Record.Exception(() =>
{
var firstEntry = za.Entries.First(x => x.Key == "first.txt");
var buffer = new byte[4096];
using (var memoryStream = new MemoryStream())
using (var firstStream = firstEntry.OpenEntryStream())
{
firstStream.CopyTo(memoryStream);
Assert.Equal(199, memoryStream.Length);
}
});
Assert.Null(ex);
}
}
[Fact]
public void Zip_NoCompression_DataDescriptors_Read()
{
string zipPath = Path.Combine(TEST_ARCHIVES_PATH, "Zip.none.datadescriptors.zip");
using (ZipArchive za = ZipArchive.Open(zipPath))
{
var firstEntry = za.Entries.First(x => x.Key == "first.txt");
var buffer = new byte[4096];
using (var memoryStream = new MemoryStream())
using (var firstStream = firstEntry.OpenEntryStream())
{
firstStream.CopyTo(memoryStream);
Assert.Equal(199, memoryStream.Length);
}
}
}
}
}

View File

@@ -4,6 +4,7 @@ using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.Zip;
using SharpCompress.Test.Mocks;
using SharpCompress.Writers;
using Xunit;
@@ -19,7 +20,6 @@ namespace SharpCompress.Test.Zip
[Fact]
public void Issue_269_Double_Skip()
{
ResetScratch();
var path = Path.Combine(TEST_ARCHIVES_PATH, "PrePostHeaders.zip");
using (Stream stream = new ForwardOnlyStream(File.OpenRead(path)))
using (IReader reader = ReaderFactory.Open(stream))
@@ -77,7 +77,6 @@ namespace SharpCompress.Test.Zip
using (Stream stream = new ForwardOnlyStream(File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd.zip"))))
using (IReader reader = ReaderFactory.Open(stream))
{
ResetScratch();
int x = 0;
while (reader.MoveToNextEntry())
{
@@ -144,7 +143,6 @@ namespace SharpCompress.Test.Zip
[Fact]
public void Zip_BZip2_PkwareEncryption_Read()
{
ResetScratch();
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.bzip2.pkware.zip")))
using (var reader = ZipReader.Open(stream, new ReaderOptions()
{
@@ -170,7 +168,6 @@ namespace SharpCompress.Test.Zip
[Fact]
public void Zip_Reader_Disposal_Test()
{
ResetScratch();
using (TestStream stream = new TestStream(File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd.zip"))))
{
using (var reader = ReaderFactory.Open(stream))
@@ -195,7 +192,6 @@ namespace SharpCompress.Test.Zip
[Fact]
public void Zip_Reader_Disposal_Test2()
{
ResetScratch();
using (TestStream stream = new TestStream(File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd.zip"))))
{
var reader = ReaderFactory.Open(stream);
@@ -220,7 +216,6 @@ namespace SharpCompress.Test.Zip
{
Assert.Throws<NotSupportedException>(() =>
{
ResetScratch();
using (
Stream stream =
File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH,
@@ -251,7 +246,6 @@ namespace SharpCompress.Test.Zip
[Fact]
public void Zip_Deflate_WinzipAES_Read()
{
ResetScratch();
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.WinzipAES.zip")))
using (var reader = ZipReader.Open(stream, new ReaderOptions()
{
@@ -275,41 +269,48 @@ namespace SharpCompress.Test.Zip
VerifyFiles();
}
private class NonSeekableMemoryStream : MemoryStream
{
public override bool CanSeek => false;
}
[Fact]
public void TestSharpCompressWithEmptyStream()
{
ResetScratch();
MemoryStream stream = new NonSeekableMemoryStream();
using (IWriter zipWriter = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.Deflate))
var expected = new Tuple<string, byte[]>[]
{
zipWriter.Write("foo.txt", new MemoryStream(new byte[0]));
zipWriter.Write("foo2.txt", new MemoryStream(new byte[10]));
}
new Tuple<string, byte[]>("foo.txt", new byte[0]),
new Tuple<string, byte[]>("foo2.txt", new byte[10])
};
stream = new MemoryStream(stream.ToArray());
File.WriteAllBytes(Path.Combine(SCRATCH_FILES_PATH, "foo.zip"), stream.ToArray());
using (IReader zipReader = ZipReader.Open(new NonDisposingStream(stream, true)))
using (var memory = new MemoryStream())
{
while (zipReader.MoveToNextEntry())
Stream stream = new TestStream(memory, read: true, write: true, seek: false);
using (IWriter zipWriter = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.Deflate))
{
using (EntryStream entry = zipReader.OpenEntryStream())
zipWriter.Write(expected[0].Item1, new MemoryStream(expected[0].Item2));
zipWriter.Write(expected[1].Item1, new MemoryStream(expected[1].Item2));
}
stream = new MemoryStream(memory.ToArray());
File.WriteAllBytes(Path.Combine(SCRATCH_FILES_PATH, "foo.zip"), memory.ToArray());
using (IReader zipReader = ZipReader.Open(new NonDisposingStream(stream, true)))
{
var i = 0;
while (zipReader.MoveToNextEntry())
{
MemoryStream tempStream = new MemoryStream();
const int bufSize = 0x1000;
byte[] buf = new byte[bufSize];
int bytesRead = 0;
while ((bytesRead = entry.Read(buf, 0, bufSize)) > 0)
using (EntryStream entry = zipReader.OpenEntryStream())
{
tempStream.Write(buf, 0, bytesRead);
MemoryStream tempStream = new MemoryStream();
const int bufSize = 0x1000;
byte[] buf = new byte[bufSize];
int bytesRead = 0;
while ((bytesRead = entry.Read(buf, 0, bufSize)) > 0)
{
tempStream.Write(buf, 0, bytesRead);
}
Assert.Equal(expected[i].Item1, zipReader.Entry.Key);
Assert.Equal(expected[i].Item2, tempStream.ToArray());
}
i++;
}
}
}

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.