Compare commits

...

87 Commits

Author SHA1 Message Date
Adam Hathcock
3009e6dcfd Mark for 0.32.2 2022-07-29 10:45:56 +01:00
Adam Hathcock
70343b17bc add more tests for uncompressed streaming zips 2022-07-29 09:47:35 +01:00
Adam Hathcock
3f6027ec2c Merge pull request #686 from Erior/477
Mitigation of problems
2022-07-29 09:41:24 +01:00
Lars Vahlenberg
5706732c55 Naive implementation of searching of DataDescriptor, not compatible with big archives (>32bit), but handles test cases. 2022-07-28 23:03:06 +02:00
Lars Vahlenberg
ad633a9dd0 missing test file from error report 2022-07-28 21:20:42 +02:00
Lars Vahlenberg
7c56df1237 Mitigation of problems 2022-07-28 20:36:28 +02:00
Adam Hathcock
c1110f2897 Merge pull request #683 from OwnageIsMagic/patch-1
WriteAll: use delegate instead of Expression
2022-07-27 10:13:50 +01:00
Adam Hathcock
647642578b Merge branch 'master' into patch-1 2022-07-27 09:49:13 +01:00
OwnageIsMagic
5ca4efac31 WriteAll: revert 109a7c1 2022-07-26 21:36:00 +03:00
Adam Hathcock
deddf12b70 Merge pull request #684 from daverant/nuget-license
Include license in nuget package
2022-07-26 16:21:41 +01:00
OwnageIsMagic
109a7c12ea WriteAll: update delegate type 2022-07-19 04:03:26 +03:00
David Rant
f955031e27 Hide license in IDE 2022-07-18 17:16:22 +01:00
David Rant
6a69c6cd02 Reference bundled package license file 2022-07-18 17:11:06 +01:00
David Rant
c1d4ac45ab Include license when packing 2022-07-18 17:10:36 +01:00
OwnageIsMagic
2946a35b0e WriteAll: use delegate instead of Expression 2022-07-18 04:36:31 +03:00
Adam Hathcock
c73a8cb18f Merge pull request #682 from adamhathcock/RarFileVolIdx_RarArcVer_GzCrc 2022-07-16 11:29:48 +01:00
Nanook
574a093038 Minor tweak that got missed in the last tidy. 2022-07-15 21:25:39 +01:00
Nanook
4eb1fe0b80 RarArchive has Min/MaxVersion. RarEntry has Volumne Indexes. GZ CRC fix. 2022-07-15 21:15:10 +01:00
Adam Hathcock
4c46cd725b Merge pull request #679 from louis-michelbergeron/master
Fix LZMADecoder Code function
2022-06-28 08:27:13 +01:00
Adam Hathcock
fdbd0e1fba Merge branch 'master' into master 2022-06-28 08:21:49 +01:00
louis-michel
5801168ce0 Merge branch 'master' of https://github.com/louis-michelbergeron/sharpcompress 2022-06-27 19:13:20 -04:00
louis-michel
d4c7551087 Fix LZMA Code function 2022-06-27 19:13:10 -04:00
Adam Hathcock
c9daf0c9f5 Merge pull request #675 from Erior/feature/#636
ReadOnlySubStream overrides and adds logic #636
2022-06-22 11:17:18 +01:00
Adam Hathcock
8cb566b031 Merge branch 'master' into feature/#636 2022-06-22 09:05:57 +01:00
Lars Vahlenberg
089b16326e ReadOnlySubStream overrides and adds logic to Read byte[], needs to have same logic for Span<byte> for consistency. 2022-06-21 19:30:07 +02:00
Adam Hathcock
c0e43cc0e5 Mark for 0.32.1 2022-06-20 10:32:47 +01:00
Adam Hathcock
514c3539e6 Merge pull request #672 from MartinDemberger/Task_477
Corrected skip-marker on skip of uncompressed ZIP file with missing size informations.
2022-06-20 10:31:31 +01:00
Adam Hathcock
62c94a178c Merge branch 'master' into Task_477 2022-06-20 10:26:45 +01:00
Adam Hathcock
9fee38b18d Merge pull request #674 from MartinDemberger/DeduplicateNonDisposing
Suppress nested NonDisposingStream
2022-06-20 10:25:25 +01:00
Adam Hathcock
cd3114d39e Merge branch 'master' into DeduplicateNonDisposing 2022-06-20 10:20:02 +01:00
Adam Hathcock
12b4e15812 Merge pull request #673 from Erior/feature/Malformed-zip-file-generated
Feature/malformed zip file generated
2022-06-20 10:19:41 +01:00
Martin Demberger
35336a0827 Suppress nested NonDisposingStream 2022-06-19 22:05:52 +02:00
Martin Demberger
ece7cbfec3 Set skip-marker when stream is skipped 2022-06-18 14:35:14 +02:00
Lars Vahlenberg
a00075ee0d Wrong flags set, we do not expose this in the interface 2022-06-17 15:07:07 +02:00
Lars Vahlenberg
b6c4e28b4d Generated test case, however, don't see any problems 2022-06-16 23:32:46 +02:00
Martin Demberger
8b55cce39a Better handling of uncompressed zip files. 2022-06-15 16:28:14 +02:00
Adam Hathcock
6e99446ce5 Mark for 0.32 2022-06-13 15:28:54 +01:00
Adam Hathcock
20a09b4866 Drop net5 2022-06-13 15:24:53 +01:00
Adam Hathcock
7f7db5eabd Merge pull request #669 from louis-michelbergeron/master
XZ decoding BCJ filters support
2022-06-13 08:37:28 +01:00
louis-michelbergeron
0651d064fc Update README.md 2022-06-10 15:32:41 -04:00
louis-michelbergeron
73ca7759d3 Update README.md
Contribution line.
2022-06-10 15:32:08 -04:00
louis-michel
0f112d0685 BCJ executable filter (only for decoding), used by XZ. 2022-06-10 13:29:42 -04:00
Adam Hathcock
fa5c91ecf6 Merge pull request #663 from Nanook/master
Align behavour of 7Zip exception with encrypted filenames arc with rar when no password provided
2022-05-04 08:21:39 +01:00
Nanook
3b2fd1b9fa Merge branch 'adamhathcock:master' into master 2022-05-04 01:36:58 +01:00
Craig
e424094fdf 7z encrypted filename exception with no password matches rar behaviour. 2022-05-04 01:35:58 +01:00
Adam Hathcock
bad9ab2c9d Merge pull request #662 from Nanook/master
Properly integrated zip multivolume and general split support.
2022-05-03 08:23:33 +01:00
Craig
61c01ce9b0 Properly integrated zip multivolume and split support. 2022-04-30 19:35:40 +01:00
Adam Hathcock
3de5df9f38 Merge pull request #661 from Nanook/master
Added multipart Zip support (z01...). Added IEntry.IsSolid
2022-04-29 13:43:37 +01:00
Craig
910aa1c22e Corrected the Crc exception as it was within a #DEBUG define 2022-04-27 14:12:00 +01:00
Craig
71c8f3129f RarStream Position fix, it was returning the file size. 7Zip CrcCheckStream always failed. Added a Solid Rar entry CRC test. 2022-04-27 13:16:05 +01:00
Craig
224614312f Added multipart Zip support (z01...). Added IEntry.IsSolid and implemented Rar and 7Zi[ support. 2022-04-25 01:16:53 +01:00
Adam Hathcock
f717133947 Merge pull request #660 from adamhathcock/dependabot/github_actions/actions/upload-artifact-3
Bump actions/upload-artifact from 2 to 3
2022-04-21 11:16:52 +01:00
dependabot[bot]
fcbfcfed03 Bump actions/upload-artifact from 2 to 3
Bumps [actions/upload-artifact](https://github.com/actions/upload-artifact) from 2 to 3.
- [Release notes](https://github.com/actions/upload-artifact/releases)
- [Commits](https://github.com/actions/upload-artifact/compare/v2...v3)

---
updated-dependencies:
- dependency-name: actions/upload-artifact
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-04-21 08:52:47 +00:00
Adam Hathcock
e6601c91ca Merge pull request #659 from adamhathcock/updates
Dependency updates and start of enforcing some C# standards
2022-04-21 09:52:26 +01:00
Adam Hathcock
0379903689 Fix tests 2022-04-21 08:59:35 +01:00
Adam Hathcock
6950eebf9f Dependency updates and start of enforcing some C# standards 2022-04-21 08:54:59 +01:00
Adam Hathcock
c15f1327c9 Merge pull request #658 from Nanook/master
Added Split archive support with unit tests. …
2022-04-21 08:26:06 +01:00
Craig
ec1999f73d Added Split archive support with unit tests. Added ArchiveFactory.IsArchive() and minor refactor. 2022-04-21 00:12:26 +01:00
Adam Hathcock
3d0a18b05d Merge pull request #655 from Ryhon0/master
Tar file mode, user and group
2022-04-11 16:43:40 +01:00
Ryhon
156a02c5a8 Tar file mode, user and group 2022-04-11 14:33:55 +02:00
Adam Hathcock
eba1a443e3 Merge pull request #652 from ds5678/net6_with_trimming
Add a net 6 target and make trimmable
2022-04-11 10:14:18 +01:00
ds5678
eb44cdc903 Update dotnetcore.yml 2022-04-08 02:21:07 -04:00
Jeremy Pritts
196df63de2 fix build project 2022-03-31 14:52:36 -04:00
Jeremy Pritts
ed3c11f44c update workflow and csproj files 2022-03-31 04:04:56 -04:00
Jeremy Pritts
7f6c877fdc add a net 6 target and make trimmable 2022-03-30 21:24:51 -04:00
Adam Hathcock
eee8309db8 Mark for 0.31 2022-03-30 12:03:03 +01:00
Adam Hathcock
155cfab792 Merge pull request #651 from loop-evgeny/evgeny-upgrade-adler32
Update Adler32 from ImageSharp v2.1.0
2022-03-30 12:01:32 +01:00
Evgeny Morozov
e1c36afdec Update Adler32 from ImageSharp v2.1.0
Adler32.cs is taken from 09b2cdb83a with minimal change to make it build as part of SharpCompress. Fixes https://github.com/adamhathcock/sharpcompress/issues/650 and https://github.com/adamhathcock/sharpcompress/issues/645.
2022-03-30 12:17:14 +02:00
Adam Hathcock
6b0d6a41ca Merge pull request #638 from Nanook/rar2MultiWithTest
Rar2 fix with new unit tests that fail on previous build.
2022-02-16 08:33:12 +00:00
Craig
dab157bb71 Rar2 fix with new unit tests that fail on previous build. 2022-02-15 16:24:22 +00:00
Adam Hathcock
8d17d09455 Merge pull request #624 from adamhathcock/issue-617
Add test and probable fix for Issue 617
2021-11-22 09:20:15 +00:00
Adam Hathcock
05208ccd9b Add test and probable fix for Issue 617 2021-11-22 08:40:40 +00:00
Adam Hathcock
a1e7c0068d Merge pull request #622 from adamhathcock/net461-tests
Net461 tests
2021-10-02 15:32:20 +01:00
Adam Hathcock
e6bec19946 Mark for 0.30 2021-10-02 15:29:22 +01:00
Adam Hathcock
ec2be2869f Fix whitespace from dotnet format 2021-10-02 15:29:03 +01:00
Adam Hathcock
ce5432ed73 Fix tests for multi-targetting 2021-10-02 15:25:43 +01:00
Adam Hathcock
b6e0ad89ce Remove duplicated artifact step 2021-10-02 15:21:05 +01:00
Adam Hathcock
2745bfa19b Minor SDK update 2021-10-02 15:19:51 +01:00
Adam Hathcock
3cdc4b38a6 Test 461 on github actions 2021-10-02 15:19:13 +01:00
Adam Hathcock
fc1ca808d7 Merge pull request #621 from inthemedium/master
Add net461 target to clean up issues with system.* nuget dependencies
2021-10-02 15:08:18 +01:00
Jeff Tyson
6983e66037 Fix preprocessor condition 2021-10-01 16:34:00 +00:00
Jeff Tyson
01f7336d09 Based on docs, the target should bet net461 2021-09-29 22:04:47 +00:00
Jeff Tyson
1561bba538 Add net462 target to clean up issues with system.* nuget dependencies 2021-09-29 21:55:11 +00:00
Adam Hathcock
3ecf8a5e0c Merge pull request #616 from amosonn/patch-1
Fix for chunked read for ZLibBaseStream
2021-09-27 09:14:58 +01:00
Adam Hathcock
e2095fc416 Merge branch 'master' into patch-1 2021-09-27 09:08:58 +01:00
Amos Onn
8398d40106 Fix #615 2021-09-14 22:02:18 +02:00
Amos Onn
134fa8892f Test for bug #615 2021-09-14 21:55:05 +02:00
163 changed files with 4137 additions and 1023 deletions

543
.editorconfig Normal file
View File

@@ -0,0 +1,543 @@
# Version: 2.0.1 (Using https://semver.org/)
# Updated: 2020-12-11
# See https://github.com/RehanSaeed/EditorConfig/releases for release notes.
# See https://github.com/RehanSaeed/EditorConfig for updates to this file.
# See http://EditorConfig.org for more information about .editorconfig files.
##########################################
# Common Settings
##########################################
# This file is the top-most EditorConfig file
root = true
# All Files
[*]
charset = utf-8
indent_style = space
indent_size = 4
insert_final_newline = true
trim_trailing_whitespace = true
##########################################
# File Extension Settings
##########################################
# Visual Studio Solution Files
[*.sln]
indent_style = tab
# Visual Studio XML Project Files
[*.{csproj,vbproj,vcxproj.filters,proj,projitems,shproj}]
indent_size = 2
# XML Configuration Files
[*.{xml,config,props,targets,nuspec,resx,ruleset,vsixmanifest,vsct}]
indent_size = 2
# JSON Files
[*.{json,json5,webmanifest}]
indent_size = 2
# YAML Files
[*.{yml,yaml}]
indent_size = 2
# Markdown Files
[*.md]
trim_trailing_whitespace = false
# Web Files
[*.{htm,html,js,jsm,ts,tsx,css,sass,scss,less,svg,vue}]
indent_size = 2
# Batch Files
[*.{cmd,bat}]
end_of_line = crlf
# Bash Files
[*.sh]
end_of_line = lf
# Makefiles
[Makefile]
indent_style = tab
##########################################
# Default .NET Code Style Severities
# https://docs.microsoft.com/dotnet/fundamentals/code-analysis/configuration-options#scope
##########################################
[*.{cs,csx,cake,vb,vbx}]
# Default Severity for all .NET Code Style rules below
dotnet_analyzer_diagnostic.severity = warning
##########################################
# File Header (Uncomment to support file headers)
# https://docs.microsoft.com/visualstudio/ide/reference/add-file-header
##########################################
# [*.{cs,csx,cake,vb,vbx}]
# file_header_template = <copyright file="{fileName}" company="PROJECT-AUTHOR">\n© PROJECT-AUTHOR\n</copyright>
# SA1636: File header copyright text should match
# Justification: .editorconfig supports file headers. If this is changed to a value other than "none", a stylecop.json file will need to added to the project.
# dotnet_diagnostic.SA1636.severity = none
##########################################
# .NET Language Conventions
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions
##########################################
# .NET Code Style Settings
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#net-code-style-settings
[*.{cs,csx,cake,vb,vbx}]
# "this." and "Me." qualifiers
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#this-and-me
#dotnet_style_qualification_for_field = true:warning
#dotnet_style_qualification_for_property = true:warning
#dotnet_style_qualification_for_method = true:warning
#dotnet_style_qualification_for_event = true:warning
# Language keywords instead of framework type names for type references
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#language-keywords
dotnet_style_predefined_type_for_locals_parameters_members = true:warning
dotnet_style_predefined_type_for_member_access = true:warning
# Modifier preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#normalize-modifiers
dotnet_style_require_accessibility_modifiers = always:warning
csharp_preferred_modifier_order = public,private,protected,internal,static,extern,new,virtual,abstract,sealed,override,readonly,unsafe,volatile,async:warning
visual_basic_preferred_modifier_order = Partial,Default,Private,Protected,Public,Friend,NotOverridable,Overridable,MustOverride,Overloads,Overrides,MustInherit,NotInheritable,Static,Shared,Shadows,ReadOnly,WriteOnly,Dim,Const,WithEvents,Widening,Narrowing,Custom,Async:warning
dotnet_style_readonly_field = true:warning
# Parentheses preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#parentheses-preferences
dotnet_style_parentheses_in_arithmetic_binary_operators = always_for_clarity:warning
dotnet_style_parentheses_in_relational_binary_operators = always_for_clarity:warning
dotnet_style_parentheses_in_other_binary_operators = always_for_clarity:warning
dotnet_style_parentheses_in_other_operators = always_for_clarity:suggestion
# Expression-level preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#expression-level-preferences
dotnet_style_object_initializer = true:warning
dotnet_style_collection_initializer = true:warning
dotnet_style_explicit_tuple_names = true:warning
dotnet_style_prefer_inferred_tuple_names = true:warning
dotnet_style_prefer_inferred_anonymous_type_member_names = true:warning
dotnet_style_prefer_auto_properties = true:warning
dotnet_style_prefer_is_null_check_over_reference_equality_method = true:warning
dotnet_style_prefer_conditional_expression_over_assignment = false:suggestion
dotnet_diagnostic.IDE0045.severity = suggestion
dotnet_style_prefer_conditional_expression_over_return = false:suggestion
dotnet_diagnostic.IDE0046.severity = suggestion
dotnet_style_prefer_compound_assignment = true:warning
# Null-checking preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#null-checking-preferences
dotnet_style_coalesce_expression = true:warning
dotnet_style_null_propagation = true:warning
# Parameter preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#parameter-preferences
dotnet_code_quality_unused_parameters = all:warning
# More style options (Undocumented)
# https://github.com/MicrosoftDocs/visualstudio-docs/issues/3641
dotnet_style_operator_placement_when_wrapping = end_of_line
# https://github.com/dotnet/roslyn/pull/40070
dotnet_style_prefer_simplified_interpolation = true:warning
# C# Code Style Settings
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#c-code-style-settings
[*.{cs,csx,cake}]
# Implicit and explicit types
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#implicit-and-explicit-types
csharp_style_var_for_built_in_types = true:warning
csharp_style_var_when_type_is_apparent = true:warning
csharp_style_var_elsewhere = true:warning
# Expression-bodied members
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#expression-bodied-members
csharp_style_expression_bodied_methods = true:warning
csharp_style_expression_bodied_constructors = true:warning
csharp_style_expression_bodied_operators = true:warning
csharp_style_expression_bodied_properties = true:warning
csharp_style_expression_bodied_indexers = true:warning
csharp_style_expression_bodied_accessors = true:warning
csharp_style_expression_bodied_lambdas = true:warning
csharp_style_expression_bodied_local_functions = true:warning
# Pattern matching
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#pattern-matching
csharp_style_pattern_matching_over_is_with_cast_check = true:warning
csharp_style_pattern_matching_over_as_with_null_check = true:warning
# Inlined variable declarations
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#inlined-variable-declarations
csharp_style_inlined_variable_declaration = true:warning
# Expression-level preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#expression-level-preferences
csharp_prefer_simple_default_expression = true:warning
# "Null" checking preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#c-null-checking-preferences
csharp_style_throw_expression = true:warning
csharp_style_conditional_delegate_call = true:warning
# Code block preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#code-block-preferences
csharp_prefer_braces = true:warning
# Unused value preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#unused-value-preferences
csharp_style_unused_value_expression_statement_preference = discard_variable:suggestion
dotnet_diagnostic.IDE0058.severity = suggestion
csharp_style_unused_value_assignment_preference = discard_variable:suggestion
dotnet_diagnostic.IDE0059.severity = suggestion
# Index and range preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#index-and-range-preferences
csharp_style_prefer_index_operator = true:warning
csharp_style_prefer_range_operator = true:warning
# Miscellaneous preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#miscellaneous-preferences
csharp_style_deconstructed_variable_declaration = true:warning
csharp_style_pattern_local_over_anonymous_function = true:warning
csharp_using_directive_placement = outside_namespace:warning
csharp_prefer_static_local_function = true:warning
csharp_prefer_simple_using_statement = true:suggestion
dotnet_diagnostic.IDE0063.severity = suggestion
csharp_style_namespace_declarations = file_scoped
##########################################
# .NET Formatting Conventions
# https://docs.microsoft.com/visualstudio/ide/editorconfig-code-style-settings-reference#formatting-conventions
##########################################
# Organize usings
# https://docs.microsoft.com/visualstudio/ide/editorconfig-formatting-conventions#organize-using-directives
dotnet_sort_system_directives_first = true
# Newline options
# https://docs.microsoft.com/visualstudio/ide/editorconfig-formatting-conventions#new-line-options
csharp_new_line_before_open_brace = all
csharp_new_line_before_else = true
csharp_new_line_before_catch = true
csharp_new_line_before_finally = true
csharp_new_line_before_members_in_object_initializers = true
csharp_new_line_before_members_in_anonymous_types = true
csharp_new_line_between_query_expression_clauses = true
# Indentation options
# https://docs.microsoft.com/visualstudio/ide/editorconfig-formatting-conventions#indentation-options
csharp_indent_case_contents = true
csharp_indent_switch_labels = true
csharp_indent_labels = no_change
csharp_indent_block_contents = true
csharp_indent_braces = false
csharp_indent_case_contents_when_block = false
# Spacing options
# https://docs.microsoft.com/visualstudio/ide/editorconfig-formatting-conventions#spacing-options
csharp_space_after_cast = false
csharp_space_after_keywords_in_control_flow_statements = true
csharp_space_between_parentheses = false
csharp_space_before_colon_in_inheritance_clause = true
csharp_space_after_colon_in_inheritance_clause = true
csharp_space_around_binary_operators = before_and_after
csharp_space_between_method_declaration_parameter_list_parentheses = false
csharp_space_between_method_declaration_empty_parameter_list_parentheses = false
csharp_space_between_method_declaration_name_and_open_parenthesis = false
csharp_space_between_method_call_parameter_list_parentheses = false
csharp_space_between_method_call_empty_parameter_list_parentheses = false
csharp_space_between_method_call_name_and_opening_parenthesis = false
csharp_space_after_comma = true
csharp_space_before_comma = false
csharp_space_after_dot = false
csharp_space_before_dot = false
csharp_space_after_semicolon_in_for_statement = true
csharp_space_before_semicolon_in_for_statement = false
csharp_space_around_declaration_statements = false
csharp_space_before_open_square_brackets = false
csharp_space_between_empty_square_brackets = false
csharp_space_between_square_brackets = false
# Wrapping options
# https://docs.microsoft.com/visualstudio/ide/editorconfig-formatting-conventions#wrap-options
csharp_preserve_single_line_statements = false
csharp_preserve_single_line_blocks = true
##########################################
# .NET Naming Conventions
# https://docs.microsoft.com/visualstudio/ide/editorconfig-naming-conventions
##########################################
[*.{cs,csx,cake,vb,vbx}]
dotnet_diagnostic.CA1000.severity = suggestion
dotnet_diagnostic.CA1001.severity = error
dotnet_diagnostic.CA1018.severity = error
dotnet_diagnostic.CA1051.severity = suggestion
dotnet_diagnostic.CA1068.severity = error
dotnet_diagnostic.CA1069.severity = error
dotnet_diagnostic.CA1304.severity = error
dotnet_diagnostic.CA1305.severity = suggestion
dotnet_diagnostic.CA1309.severity = suggestion
dotnet_diagnostic.CA1310.severity = error
dotnet_diagnostic.CA1707.severity = suggestion
dotnet_diagnostic.CA1708.severity = suggestion
dotnet_diagnostic.CA1711.severity = suggestion
dotnet_diagnostic.CA1716.severity = suggestion
dotnet_diagnostic.CA1720.severity = suggestion
dotnet_diagnostic.CA1725.severity = suggestion
dotnet_diagnostic.CA1805.severity = suggestion
dotnet_diagnostic.CA1816.severity = suggestion
dotnet_diagnostic.CA1822.severity = suggestion
dotnet_diagnostic.CA1825.severity = error
dotnet_diagnostic.CA1826.severity = silent
dotnet_diagnostic.CA1827.severity = error
dotnet_diagnostic.CA1829.severity = suggestion
dotnet_diagnostic.CA1834.severity = error
dotnet_diagnostic.CA1845.severity = suggestion
dotnet_diagnostic.CA1848.severity = suggestion
dotnet_diagnostic.CA2016.severity = suggestion
dotnet_diagnostic.CA2201.severity = error
dotnet_diagnostic.CA2206.severity = error
dotnet_diagnostic.CA2208.severity = error
dotnet_diagnostic.CA2211.severity = error
dotnet_diagnostic.CA2249.severity = error
dotnet_diagnostic.CA2251.severity = error
dotnet_diagnostic.CA2252.severity = none
dotnet_diagnostic.CA2254.severity = suggestion
dotnet_diagnostic.CS0169.severity = error
dotnet_diagnostic.CS0219.severity = error
dotnet_diagnostic.CS1998.severity = error
dotnet_diagnostic.CS8602.severity = error
dotnet_diagnostic.CS8604.severity = error
dotnet_diagnostic.CS8618.severity = error
dotnet_diagnostic.CS0618.severity = error
dotnet_diagnostic.CS1998.severity = error
dotnet_diagnostic.CS4014.severity = error
dotnet_diagnostic.CS8600.severity = error
dotnet_diagnostic.CS8603.severity = error
dotnet_diagnostic.CS8625.severity = error
dotnet_diagnostic.BL0005.severity = suggestion
dotnet_diagnostic.MVC1000.severity = suggestion
dotnet_diagnostic.IDE0055.severity = suggestion # Fix formatting
dotnet_diagnostic.IDE0023.severity = suggestion # use expression body for operators
dotnet_diagnostic.IDE0025.severity = suggestion # use expression body for properties
dotnet_diagnostic.IDE1006.severity = suggestion # Naming rule violation: These words cannot contain lower case characters
dotnet_diagnostic.IDE0072.severity = suggestion # Populate switch - forces population of all cases even when default specified
dotnet_diagnostic.IDE0027.severity = suggestion # Use expression body for accessors
dotnet_diagnostic.IDE0032.severity = suggestion # Use auto property
dotnet_diagnostic.IDE0007.severity = error # Use var
dotnet_diagnostic.IDE0160.severity = suggestion # Use block scoped
dotnet_diagnostic.IDE0011.severity = error # Use braces on if statements
dotnet_diagnostic.IDE0057.severity = suggestion # substring can be simplified
dotnet_diagnostic.IDE0004.severity = error # redundant cast
dotnet_diagnostic.IDE0010.severity = silent # populate switch
dotnet_diagnostic.IDE0021.severity = silent # expression body for constructors
dotnet_diagnostic.IDE0022.severity = silent # expression body for methods
dotnet_diagnostic.IDE0024.severity = silent # expression body for operators
dotnet_diagnostic.IDE0028.severity = silent
dotnet_diagnostic.IDE0033.severity = error # prefer tuple name
dotnet_diagnostic.IDE0040.severity = error # modifiers required
dotnet_diagnostic.IDE0041.severity = error # simplify null
dotnet_diagnostic.IDE0042.severity = error # deconstruct variable
dotnet_diagnostic.IDE0044.severity = error # make field only when possible
dotnet_diagnostic.IDE0047.severity = suggestion # paratemeter name
dotnet_diagnostic.IDE0051.severity = error # unused field
dotnet_diagnostic.IDE0052.severity = error # unused member
dotnet_diagnostic.IDE0060.severity = suggestion # unused parameters
dotnet_diagnostic.IDE0066.severity = suggestion # switch expression
dotnet_diagnostic.IDE0078.severity = suggestion # use pattern matching
dotnet_diagnostic.IDE0090.severity = suggestion # new can be simplified
dotnet_diagnostic.IDE0130.severity = suggestion # namespace folder structure
dotnet_diagnostic.IDE0160.severity = silent # Use block namespaces ARE NOT required
dotnet_diagnostic.IDE0161.severity = suggestion # Please use file namespaces
##########################################
# Styles
##########################################
# camel_case_style - Define the camelCase style
dotnet_naming_style.camel_case_style.capitalization = camel_case
# pascal_case_style - Define the PascalCase style
dotnet_naming_style.pascal_case_style.capitalization = pascal_case
# constant_case - Define the CONSTANT_CASE style
dotnet_naming_style.constant_case.capitalization = all_upper
dotnet_naming_style.constant_case.word_separator = _
# first_upper_style - The first character must start with an upper-case character
dotnet_naming_style.first_upper_style.capitalization = first_word_upper
# prefix_interface_with_i_style - Interfaces must be PascalCase and the first character of an interface must be an 'I'
dotnet_naming_style.prefix_interface_with_i_style.capitalization = pascal_case
dotnet_naming_style.prefix_interface_with_i_style.required_prefix = I
# prefix_type_parameters_with_t_style - Generic Type Parameters must be PascalCase and the first character must be a 'T'
dotnet_naming_style.prefix_type_parameters_with_t_style.capitalization = pascal_case
dotnet_naming_style.prefix_type_parameters_with_t_style.required_prefix = T
# disallowed_style - Anything that has this style applied is marked as disallowed
dotnet_naming_style.disallowed_style.capitalization = pascal_case
dotnet_naming_style.disallowed_style.required_prefix = ____RULE_VIOLATION____
dotnet_naming_style.disallowed_style.required_suffix = ____RULE_VIOLATION____
# internal_error_style - This style should never occur... if it does, it indicates a bug in file or in the parser using the file
dotnet_naming_style.internal_error_style.capitalization = pascal_case
dotnet_naming_style.internal_error_style.required_prefix = ____INTERNAL_ERROR____
dotnet_naming_style.internal_error_style.required_suffix = ____INTERNAL_ERROR____
# prefix_interface_with_i_style - Interfaces must be PascalCase and the first character of an interface must be an 'I'
dotnet_naming_style.underscore_camel_case_style.capitalization = camel_case
dotnet_naming_style.underscore_camel_case_style.required_prefix = _
##########################################
# .NET Design Guideline Field Naming Rules
# Naming rules for fields follow the .NET Framework design guidelines
# https://docs.microsoft.com/dotnet/standard/design-guidelines/index
##########################################
# All public/protected/protected_internal constant fields must be constant_case
# https://docs.microsoft.com/dotnet/standard/design-guidelines/field
dotnet_naming_symbols.public_protected_constant_fields_group.applicable_accessibilities = public, protected, protected_internal
dotnet_naming_symbols.public_protected_constant_fields_group.required_modifiers = const
dotnet_naming_symbols.public_protected_constant_fields_group.applicable_kinds = field
dotnet_naming_rule.public_protected_constant_fields_must_be_pascal_case_rule.symbols = public_protected_constant_fields_group
dotnet_naming_rule.public_protected_constant_fields_must_be_pascal_case_rule.style = constant_case
dotnet_naming_rule.public_protected_constant_fields_must_be_pascal_case_rule.severity = warning
# All public/protected/protected_internal static readonly fields must be constant_case
# https://docs.microsoft.com/dotnet/standard/design-guidelines/field
dotnet_naming_symbols.public_protected_static_readonly_fields_group.applicable_accessibilities = public, protected, protected_internal
dotnet_naming_symbols.public_protected_static_readonly_fields_group.required_modifiers = static, readonly
dotnet_naming_symbols.public_protected_static_readonly_fields_group.applicable_kinds = field
dotnet_naming_rule.public_protected_static_readonly_fields_must_be_pascal_case_rule.symbols = public_protected_static_readonly_fields_group
dotnet_naming_rule.public_protected_static_readonly_fields_must_be_pascal_case_rule.style = constant_case
dotnet_naming_rule.public_protected_static_readonly_fields_must_be_pascal_case_rule.severity = warning
# No other public/protected/protected_internal fields are allowed
# https://docs.microsoft.com/dotnet/standard/design-guidelines/field
dotnet_naming_symbols.other_public_protected_fields_group.applicable_accessibilities = public, protected, protected_internal
dotnet_naming_symbols.other_public_protected_fields_group.applicable_kinds = field
dotnet_naming_rule.other_public_protected_fields_disallowed_rule.symbols = other_public_protected_fields_group
dotnet_naming_rule.other_public_protected_fields_disallowed_rule.style = disallowed_style
dotnet_naming_rule.other_public_protected_fields_disallowed_rule.severity = error
##########################################
# StyleCop Field Naming Rules
# Naming rules for fields follow the StyleCop analyzers
# This does not override any rules using disallowed_style above
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers
##########################################
# All constant fields must be constant_case
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1303.md
dotnet_naming_symbols.stylecop_constant_fields_group.applicable_accessibilities = public, internal, protected_internal, protected, private_protected, private
dotnet_naming_symbols.stylecop_constant_fields_group.required_modifiers = const
dotnet_naming_symbols.stylecop_constant_fields_group.applicable_kinds = field
dotnet_naming_rule.stylecop_constant_fields_must_be_pascal_case_rule.symbols = stylecop_constant_fields_group
dotnet_naming_rule.stylecop_constant_fields_must_be_pascal_case_rule.style = constant_case
dotnet_naming_rule.stylecop_constant_fields_must_be_pascal_case_rule.severity = warning
# All static readonly fields must be constant_case
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1311.md
dotnet_naming_symbols.stylecop_static_readonly_fields_group.applicable_accessibilities = public, internal, protected_internal, protected, private_protected, private
dotnet_naming_symbols.stylecop_static_readonly_fields_group.required_modifiers = static, readonly
dotnet_naming_symbols.stylecop_static_readonly_fields_group.applicable_kinds = field
dotnet_naming_rule.stylecop_static_readonly_fields_must_be_pascal_case_rule.symbols = stylecop_static_readonly_fields_group
dotnet_naming_rule.stylecop_static_readonly_fields_must_be_pascal_case_rule.style = constant_case
dotnet_naming_rule.stylecop_static_readonly_fields_must_be_pascal_case_rule.severity = warning
# No non-private instance fields are allowed
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1401.md
dotnet_naming_symbols.stylecop_fields_must_be_private_group.applicable_accessibilities = public, internal, protected_internal, protected, private_protected
dotnet_naming_symbols.stylecop_fields_must_be_private_group.applicable_kinds = field
dotnet_naming_rule.stylecop_instance_fields_must_be_private_rule.symbols = stylecop_fields_must_be_private_group
dotnet_naming_rule.stylecop_instance_fields_must_be_private_rule.style = disallowed_style
dotnet_naming_rule.stylecop_instance_fields_must_be_private_rule.severity = error
# Private fields must be camelCase
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1306.md
dotnet_naming_symbols.stylecop_private_fields_group.applicable_accessibilities = private
dotnet_naming_symbols.stylecop_private_fields_group.applicable_kinds = field
dotnet_naming_rule.stylecop_private_fields_must_be_camel_case_rule.symbols = stylecop_private_fields_group
dotnet_naming_rule.stylecop_private_fields_must_be_camel_case_rule.style = underscore_camel_case_style
dotnet_naming_rule.stylecop_private_fields_must_be_camel_case_rule.severity = warning
# Local variables must be camelCase
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1312.md
dotnet_naming_symbols.stylecop_local_fields_group.applicable_accessibilities = local
dotnet_naming_symbols.stylecop_local_fields_group.applicable_kinds = local
dotnet_naming_rule.stylecop_local_fields_must_be_camel_case_rule.symbols = stylecop_local_fields_group
dotnet_naming_rule.stylecop_local_fields_must_be_camel_case_rule.style = camel_case_style
dotnet_naming_rule.stylecop_local_fields_must_be_camel_case_rule.severity = warning
# This rule should never fire. However, it's included for at least two purposes:
# First, it helps to understand, reason about, and root-case certain types of issues, such as bugs in .editorconfig parsers.
# Second, it helps to raise immediate awareness if a new field type is added (as occurred recently in C#).
dotnet_naming_symbols.sanity_check_uncovered_field_case_group.applicable_accessibilities = *
dotnet_naming_symbols.sanity_check_uncovered_field_case_group.applicable_kinds = field
dotnet_naming_rule.sanity_check_uncovered_field_case_rule.symbols = sanity_check_uncovered_field_case_group
dotnet_naming_rule.sanity_check_uncovered_field_case_rule.style = internal_error_style
dotnet_naming_rule.sanity_check_uncovered_field_case_rule.severity = error
##########################################
# Other Naming Rules
##########################################
# All of the following must be PascalCase:
# - Namespaces
# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-namespaces
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1300.md
# - Classes and Enumerations
# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-classes-structs-and-interfaces
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1300.md
# - Delegates
# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-classes-structs-and-interfaces#names-of-common-types
# - Constructors, Properties, Events, Methods
# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-type-members
dotnet_naming_symbols.element_group.applicable_kinds = namespace, class, enum, struct, delegate, event, method, property
dotnet_naming_rule.element_rule.symbols = element_group
dotnet_naming_rule.element_rule.style = pascal_case_style
dotnet_naming_rule.element_rule.severity = warning
# Interfaces use PascalCase and are prefixed with uppercase 'I'
# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-classes-structs-and-interfaces
dotnet_naming_symbols.interface_group.applicable_kinds = interface
dotnet_naming_rule.interface_rule.symbols = interface_group
dotnet_naming_rule.interface_rule.style = prefix_interface_with_i_style
dotnet_naming_rule.interface_rule.severity = warning
# Generics Type Parameters use PascalCase and are prefixed with uppercase 'T'
# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-classes-structs-and-interfaces
dotnet_naming_symbols.type_parameter_group.applicable_kinds = type_parameter
dotnet_naming_rule.type_parameter_rule.symbols = type_parameter_group
dotnet_naming_rule.type_parameter_rule.style = prefix_type_parameters_with_t_style
dotnet_naming_rule.type_parameter_rule.severity = warning
# Function parameters use camelCase
# https://docs.microsoft.com/dotnet/standard/design-guidelines/naming-parameters
dotnet_naming_symbols.parameters_group.applicable_kinds = parameter
dotnet_naming_rule.parameters_rule.symbols = parameters_group
dotnet_naming_rule.parameters_rule.style = camel_case_style
dotnet_naming_rule.parameters_rule.severity = warning
##########################################
# License
##########################################
# The following applies as to the .editorconfig file ONLY, and is
# included below for reference, per the requirements of the license
# corresponding to this .editorconfig file.
# See: https://github.com/RehanSaeed/EditorConfig
#
# MIT License
#
# Copyright (c) 2017-2019 Muhammad Rehan Saeed
# Copyright (c) 2019 Henry Gabryjelski
#
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the
# Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute,
# sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject
# to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
##########################################

6
.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,6 @@
version: 2
updates:
- package-ecosystem: "github-actions" # search for actions - there are other options available
directory: "/" # search in .github/workflows under root `/`
schedule:
interval: "weekly" # check for action update every week

View File

@@ -9,16 +9,12 @@ jobs:
os: [windows-latest, ubuntu-latest]
steps:
- uses: actions/checkout@v1
- uses: actions/setup-dotnet@v1
- uses: actions/checkout@v3
- uses: actions/setup-dotnet@v2
with:
dotnet-version: 5.0.400
dotnet-version: 6.0.x
- run: dotnet run -p build/build.csproj
- uses: actions/upload-artifact@v2
- uses: actions/upload-artifact@v3
with:
name: ${{ matrix.os }}-sharpcompress.nupkg
path: artifacts/*
- uses: actions/upload-artifact@v2
with:
name: ${{ matrix.os }}-sharpcompress.snupkg
path: artifacts/*

1
.gitignore vendored
View File

@@ -18,3 +18,4 @@ tools
.DS_Store
*.snupkg
/tests/TestArchives/6d23a38c-f064-4ef1-ad89-b942396f53b9/Scratch

View File

@@ -182,6 +182,8 @@ I'm always looking for help or ideas. Please submit code or email with ideas. Un
XZ implementation based on: https://github.com/sambott/XZ.NET by @sambott
XZ BCJ filters support contributed by Louis-Michel Bergeron, on behalf of aDolus Technology Inc. - 2022
7Zip implementation based on: https://code.google.com/p/managed-lzma/
LICENSE

View File

@@ -6,78 +6,78 @@ using GlobExpressions;
using static Bullseye.Targets;
using static SimpleExec.Command;
class Program
{
private const string Clean = "clean";
private const string Format = "format";
private const string Build = "build";
private const string Test = "test";
private const string Publish = "publish";
const string Clean = "clean";
const string Format = "format";
const string Build = "build";
const string Test = "test";
const string Publish = "publish";
static void Main(string[] args)
{
Target(Clean,
ForEach("**/bin", "**/obj"),
dir =>
{
IEnumerable<string> GetDirectories(string d)
{
return Glob.Directories(".", d);
}
Target(Clean,
ForEach("**/bin", "**/obj"),
dir =>
{
IEnumerable<string> GetDirectories(string d)
{
return Glob.Directories(".", d);
}
void RemoveDirectory(string d)
{
if (Directory.Exists(d))
{
Console.WriteLine(d);
Directory.Delete(d, true);
}
}
foreach (var d in GetDirectories(dir))
{
RemoveDirectory(d);
}
});
Target(Format, () =>
{
Run("dotnet", "tool restore");
Run("dotnet", "format --check");
});
Target(Build, DependsOn(Format),
framework =>
void RemoveDirectory(string d)
{
if (Directory.Exists(d))
{
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows) && framework == "net46")
{
return;
}
Run("dotnet", "build src/SharpCompress/SharpCompress.csproj -c Release");
});
Console.WriteLine(d);
Directory.Delete(d, true);
}
}
Target(Test, DependsOn(Build), ForEach("net5.0"),
framework =>
{
IEnumerable<string> GetFiles(string d)
{
return Glob.Files(".", d);
}
foreach (var d in GetDirectories(dir))
{
RemoveDirectory(d);
}
});
foreach (var file in GetFiles("**/*.Test.csproj"))
{
Run("dotnet", $"test {file} -c Release -f {framework}");
}
});
Target(Format,
() =>
{
Run("dotnet", "tool restore");
Run("dotnet", "format --check");
});
Target(Publish, DependsOn(Test),
() =>
{
Run("dotnet", "pack src/SharpCompress/SharpCompress.csproj -c Release -o artifacts/");
});
Target(Build,
DependsOn(Format),
framework =>
{
Run("dotnet", "build src/SharpCompress/SharpCompress.csproj -c Release");
});
Target("default", DependsOn(Publish), () => Console.WriteLine("Done!"));
Target(Test,
DependsOn(Build),
ForEach("net6.0", "net461"),
framework =>
{
IEnumerable<string> GetFiles(string d)
{
return Glob.Files(".", d);
}
RunTargetsAndExit(args);
}
}
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows) && framework == "net461")
{
return;
}
foreach (var file in GetFiles("**/*.Test.csproj"))
{
Run("dotnet", $"test {file} -c Release -f {framework}");
}
});
Target(Publish,
DependsOn(Test),
() =>
{
Run("dotnet", "pack src/SharpCompress/SharpCompress.csproj -c Release -o artifacts/");
});
Target("default", DependsOn(Publish), () => Console.WriteLine("Done!"));
await RunTargetsAndExitAsync(args);

View File

@@ -2,13 +2,13 @@
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net5.0</TargetFramework>
<TargetFramework>net6.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Bullseye" Version="3.8.0" />
<PackageReference Include="Glob" Version="1.1.8" />
<PackageReference Include="SimpleExec" Version="7.0.0" />
<PackageReference Include="Bullseye" Version="4.0.0" />
<PackageReference Include="Glob" Version="1.1.9" />
<PackageReference Include="SimpleExec" Version="10.0.0" />
</ItemGroup>
</Project>

View File

@@ -1,6 +1,6 @@
{
"sdk": {
"version": "5.0.300",
"version": "6.0.200",
"rollForward": "latestFeature"
}
}

View File

@@ -0,0 +1,420 @@
// Copyright (c) Six Labors.
// Licensed under the Apache License, Version 2.0.
#if !NETSTANDARD2_0 && !NETSTANDARD2_1 && !NETFRAMEWORK
#define SUPPORTS_RUNTIME_INTRINSICS
#define SUPPORTS_HOTPATH
#endif
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
#if SUPPORTS_RUNTIME_INTRINSICS
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
#endif
#pragma warning disable IDE0007 // Use implicit type
namespace SharpCompress.Algorithms
{
/// <summary>
/// Calculates the 32 bit Adler checksum of a given buffer according to
/// RFC 1950. ZLIB Compressed Data Format Specification version 3.3)
/// </summary>
internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/blob/main/src/ImageSharp/Compression/Zlib/Adler32.cs
{
/// <summary>
/// Global inlining options. Helps temporarily disable inlining for better profiler output.
/// </summary>
private static class InliningOptions // From https://github.com/SixLabors/ImageSharp/blob/main/src/ImageSharp/Common/Helpers/InliningOptions.cs
{
/// <summary>
/// <see cref="MethodImplOptions.AggressiveInlining"/> regardless of the build conditions.
/// </summary>
public const MethodImplOptions AlwaysInline = MethodImplOptions.AggressiveInlining;
#if PROFILING
public const MethodImplOptions HotPath = MethodImplOptions.NoInlining;
public const MethodImplOptions ShortMethod = MethodImplOptions.NoInlining;
#else
#if SUPPORTS_HOTPATH
public const MethodImplOptions HotPath = MethodImplOptions.AggressiveOptimization;
#else
public const MethodImplOptions HotPath = MethodImplOptions.AggressiveInlining;
#endif
public const MethodImplOptions ShortMethod = MethodImplOptions.AggressiveInlining;
#endif
public const MethodImplOptions ColdPath = MethodImplOptions.NoInlining;
}
#if SUPPORTS_RUNTIME_INTRINSICS
/// <summary>
/// Provides optimized static methods for trigonometric, logarithmic,
/// and other common mathematical functions.
/// </summary>
private static class Numerics // From https://github.com/SixLabors/ImageSharp/blob/main/src/ImageSharp/Common/Helpers/Numerics.cs
{
/// <summary>
/// Reduces elements of the vector into one sum.
/// </summary>
/// <param name="accumulator">The accumulator to reduce.</param>
/// <returns>The sum of all elements.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int ReduceSum(Vector256<int> accumulator)
{
// Add upper lane to lower lane.
Vector128<int> vsum = Sse2.Add(accumulator.GetLower(), accumulator.GetUpper());
// Add odd to even.
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_11_01_01));
// Add high to low.
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_10_11_10));
return Sse2.ConvertToInt32(vsum);
}
/// <summary>
/// Reduces even elements of the vector into one sum.
/// </summary>
/// <param name="accumulator">The accumulator to reduce.</param>
/// <returns>The sum of even elements.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int EvenReduceSum(Vector256<int> accumulator)
{
Vector128<int> vsum = Sse2.Add(accumulator.GetLower(), accumulator.GetUpper()); // add upper lane to lower lane
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_10_11_10)); // add high to low
// Vector128<int>.ToScalar() isn't optimized pre-net5.0 https://github.com/dotnet/runtime/pull/37882
return Sse2.ConvertToInt32(vsum);
}
}
#endif
/// <summary>
/// The default initial seed value of a Adler32 checksum calculation.
/// </summary>
public const uint SeedValue = 1U;
// Largest prime smaller than 65536
private const uint BASE = 65521;
// NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1
private const uint NMAX = 5552;
#if SUPPORTS_RUNTIME_INTRINSICS
private const int MinBufferSize = 64;
private const int BlockSize = 1 << 5;
// The C# compiler emits this as a compile-time constant embedded in the PE file.
private static ReadOnlySpan<byte> Tap1Tap2 => new byte[]
{
32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17, // tap1
16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1 // tap2
};
#endif
/// <summary>
/// Calculates the Adler32 checksum with the bytes taken from the span.
/// </summary>
/// <param name="buffer">The readonly span of bytes.</param>
/// <returns>The <see cref="uint"/>.</returns>
[MethodImpl(InliningOptions.ShortMethod)]
public static uint Calculate(ReadOnlySpan<byte> buffer)
=> Calculate(SeedValue, buffer);
/// <summary>
/// Calculates the Adler32 checksum with the bytes taken from the span and seed.
/// </summary>
/// <param name="adler">The input Adler32 value.</param>
/// <param name="buffer">The readonly span of bytes.</param>
/// <returns>The <see cref="uint"/>.</returns>
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
public static uint Calculate(uint adler, ReadOnlySpan<byte> buffer)
{
if (buffer.IsEmpty)
{
return adler;
}
#if SUPPORTS_RUNTIME_INTRINSICS
if (Avx2.IsSupported && buffer.Length >= MinBufferSize)
{
return CalculateAvx2(adler, buffer);
}
if (Ssse3.IsSupported && buffer.Length >= MinBufferSize)
{
return CalculateSse(adler, buffer);
}
return CalculateScalar(adler, buffer);
#else
return CalculateScalar(adler, buffer);
#endif
}
// Based on https://github.com/chromium/chromium/blob/master/third_party/zlib/adler32_simd.c
#if SUPPORTS_RUNTIME_INTRINSICS
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
private static unsafe uint CalculateSse(uint adler, ReadOnlySpan<byte> buffer)
{
uint s1 = adler & 0xFFFF;
uint s2 = (adler >> 16) & 0xFFFF;
// Process the data in blocks.
uint length = (uint)buffer.Length;
uint blocks = length / BlockSize;
length -= blocks * BlockSize;
fixed (byte* bufferPtr = &MemoryMarshal.GetReference(buffer))
{
fixed (byte* tapPtr = &MemoryMarshal.GetReference(Tap1Tap2))
{
byte* localBufferPtr = bufferPtr;
// _mm_setr_epi8 on x86
Vector128<sbyte> tap1 = Sse2.LoadVector128((sbyte*)tapPtr);
Vector128<sbyte> tap2 = Sse2.LoadVector128((sbyte*)(tapPtr + 0x10));
Vector128<byte> zero = Vector128<byte>.Zero;
var ones = Vector128.Create((short)1);
while (blocks > 0)
{
uint n = NMAX / BlockSize; /* The NMAX constraint. */
if (n > blocks)
{
n = blocks;
}
blocks -= n;
// Process n blocks of data. At most NMAX data bytes can be
// processed before s2 must be reduced modulo BASE.
Vector128<uint> v_ps = Vector128.CreateScalar(s1 * n);
Vector128<uint> v_s2 = Vector128.CreateScalar(s2);
Vector128<uint> v_s1 = Vector128<uint>.Zero;
do
{
// Load 32 input bytes.
Vector128<byte> bytes1 = Sse3.LoadDquVector128(localBufferPtr);
Vector128<byte> bytes2 = Sse3.LoadDquVector128(localBufferPtr + 0x10);
// Add previous block byte sum to v_ps.
v_ps = Sse2.Add(v_ps, v_s1);
// Horizontally add the bytes for s1, multiply-adds the
// bytes by [ 32, 31, 30, ... ] for s2.
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes1, zero).AsUInt32());
Vector128<short> mad1 = Ssse3.MultiplyAddAdjacent(bytes1, tap1);
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad1, ones).AsUInt32());
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes2, zero).AsUInt32());
Vector128<short> mad2 = Ssse3.MultiplyAddAdjacent(bytes2, tap2);
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad2, ones).AsUInt32());
localBufferPtr += BlockSize;
}
while (--n > 0);
v_s2 = Sse2.Add(v_s2, Sse2.ShiftLeftLogical(v_ps, 5));
// Sum epi32 ints v_s1(s2) and accumulate in s1(s2).
const byte S2301 = 0b1011_0001; // A B C D -> B A D C
const byte S1032 = 0b0100_1110; // A B C D -> C D A B
v_s1 = Sse2.Add(v_s1, Sse2.Shuffle(v_s1, S1032));
s1 += v_s1.ToScalar();
v_s2 = Sse2.Add(v_s2, Sse2.Shuffle(v_s2, S2301));
v_s2 = Sse2.Add(v_s2, Sse2.Shuffle(v_s2, S1032));
s2 = v_s2.ToScalar();
// Reduce.
s1 %= BASE;
s2 %= BASE;
}
if (length > 0)
{
HandleLeftOver(localBufferPtr, length, ref s1, ref s2);
}
return s1 | (s2 << 16);
}
}
}
// Based on: https://github.com/zlib-ng/zlib-ng/blob/develop/arch/x86/adler32_avx2.c
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
public static unsafe uint CalculateAvx2(uint adler, ReadOnlySpan<byte> buffer)
{
uint s1 = adler & 0xFFFF;
uint s2 = (adler >> 16) & 0xFFFF;
uint length = (uint)buffer.Length;
fixed (byte* bufferPtr = &MemoryMarshal.GetReference(buffer))
{
byte* localBufferPtr = bufferPtr;
Vector256<byte> zero = Vector256<byte>.Zero;
var dot3v = Vector256.Create((short)1);
var dot2v = Vector256.Create(32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1);
// Process n blocks of data. At most NMAX data bytes can be
// processed before s2 must be reduced modulo BASE.
var vs1 = Vector256.CreateScalar(s1);
var vs2 = Vector256.CreateScalar(s2);
while (length >= 32)
{
int k = length < NMAX ? (int)length : (int)NMAX;
k -= k % 32;
length -= (uint)k;
Vector256<uint> vs10 = vs1;
Vector256<uint> vs3 = Vector256<uint>.Zero;
while (k >= 32)
{
// Load 32 input bytes.
Vector256<byte> block = Avx.LoadVector256(localBufferPtr);
// Sum of abs diff, resulting in 2 x int32's
Vector256<ushort> vs1sad = Avx2.SumAbsoluteDifferences(block, zero);
vs1 = Avx2.Add(vs1, vs1sad.AsUInt32());
vs3 = Avx2.Add(vs3, vs10);
// sum 32 uint8s to 16 shorts.
Vector256<short> vshortsum2 = Avx2.MultiplyAddAdjacent(block, dot2v);
// sum 16 shorts to 8 uint32s.
Vector256<int> vsum2 = Avx2.MultiplyAddAdjacent(vshortsum2, dot3v);
vs2 = Avx2.Add(vsum2.AsUInt32(), vs2);
vs10 = vs1;
localBufferPtr += BlockSize;
k -= 32;
}
// Defer the multiplication with 32 to outside of the loop.
vs3 = Avx2.ShiftLeftLogical(vs3, 5);
vs2 = Avx2.Add(vs2, vs3);
s1 = (uint)Numerics.EvenReduceSum(vs1.AsInt32());
s2 = (uint)Numerics.ReduceSum(vs2.AsInt32());
s1 %= BASE;
s2 %= BASE;
vs1 = Vector256.CreateScalar(s1);
vs2 = Vector256.CreateScalar(s2);
}
if (length > 0)
{
HandleLeftOver(localBufferPtr, length, ref s1, ref s2);
}
return s1 | (s2 << 16);
}
}
private static unsafe void HandleLeftOver(byte* localBufferPtr, uint length, ref uint s1, ref uint s2)
{
if (length >= 16)
{
s2 += s1 += localBufferPtr[0];
s2 += s1 += localBufferPtr[1];
s2 += s1 += localBufferPtr[2];
s2 += s1 += localBufferPtr[3];
s2 += s1 += localBufferPtr[4];
s2 += s1 += localBufferPtr[5];
s2 += s1 += localBufferPtr[6];
s2 += s1 += localBufferPtr[7];
s2 += s1 += localBufferPtr[8];
s2 += s1 += localBufferPtr[9];
s2 += s1 += localBufferPtr[10];
s2 += s1 += localBufferPtr[11];
s2 += s1 += localBufferPtr[12];
s2 += s1 += localBufferPtr[13];
s2 += s1 += localBufferPtr[14];
s2 += s1 += localBufferPtr[15];
localBufferPtr += 16;
length -= 16;
}
while (length-- > 0)
{
s2 += s1 += *localBufferPtr++;
}
if (s1 >= BASE)
{
s1 -= BASE;
}
s2 %= BASE;
}
#endif
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
private static unsafe uint CalculateScalar(uint adler, ReadOnlySpan<byte> buffer)
{
uint s1 = adler & 0xFFFF;
uint s2 = (adler >> 16) & 0xFFFF;
uint k;
fixed (byte* bufferPtr = buffer)
{
var localBufferPtr = bufferPtr;
uint length = (uint)buffer.Length;
while (length > 0)
{
k = length < NMAX ? length : NMAX;
length -= k;
while (k >= 16)
{
s2 += s1 += localBufferPtr[0];
s2 += s1 += localBufferPtr[1];
s2 += s1 += localBufferPtr[2];
s2 += s1 += localBufferPtr[3];
s2 += s1 += localBufferPtr[4];
s2 += s1 += localBufferPtr[5];
s2 += s1 += localBufferPtr[6];
s2 += s1 += localBufferPtr[7];
s2 += s1 += localBufferPtr[8];
s2 += s1 += localBufferPtr[9];
s2 += s1 += localBufferPtr[10];
s2 += s1 += localBufferPtr[11];
s2 += s1 += localBufferPtr[12];
s2 += s1 += localBufferPtr[13];
s2 += s1 += localBufferPtr[14];
s2 += s1 += localBufferPtr[15];
localBufferPtr += 16;
k -= 16;
}
while (k-- > 0)
{
s2 += s1 += *localBufferPtr++;
}
s1 %= BASE;
s2 %= BASE;
}
return (s2 << 16) | s1;
}
}
}
}

View File

@@ -1,285 +0,0 @@
// Copyright (c) Six Labors and contributors.
// Licensed under the GNU Affero General Public License, Version 3.
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
#if !NETSTANDARD2_0 && !NETSTANDARD2_1
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
#endif
namespace SharpCompress.Algorithms
{
/// <summary>
/// Calculates the 32 bit Adler checksum of a given buffer according to
/// RFC 1950. ZLIB Compressed Data Format Specification version 3.3)
/// </summary>
internal static class Adler32
{
/// <summary>
/// The default initial seed value of a Adler32 checksum calculation.
/// </summary>
public const uint SeedValue = 1U;
#if !NETSTANDARD2_0 && !NETSTANDARD2_1
private const int MinBufferSize = 64;
#endif
// Largest prime smaller than 65536
private const uint BASE = 65521;
// NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1
private const uint NMAX = 5552;
/// <summary>
/// Calculates the Adler32 checksum with the bytes taken from the span.
/// </summary>
/// <param name="buffer">The readonly span of bytes.</param>
/// <returns>The <see cref="uint"/>.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static uint Calculate(ReadOnlySpan<byte> buffer)
{
return Calculate(SeedValue, buffer);
}
/// <summary>
/// Calculates the Adler32 checksum with the bytes taken from the span and seed.
/// </summary>
/// <param name="adler">The input Adler32 value.</param>
/// <param name="buffer">The readonly span of bytes.</param>
/// <returns>The <see cref="uint"/>.</returns>
public static uint Calculate(uint adler, ReadOnlySpan<byte> buffer)
{
if (buffer.IsEmpty)
{
return SeedValue;
}
#if !NETSTANDARD2_0 && !NETSTANDARD2_1
if (Sse3.IsSupported && buffer.Length >= MinBufferSize)
{
return CalculateSse(adler, buffer);
}
return CalculateScalar(adler, buffer);
#else
return CalculateScalar(adler, buffer);
#endif
}
// Based on https://github.com/chromium/chromium/blob/master/third_party/zlib/adler32_simd.c
#if !NETSTANDARD2_0 && !NETSTANDARD2_1
private static unsafe uint CalculateSse(uint adler, ReadOnlySpan<byte> buffer)
{
uint s1 = adler & 0xFFFF;
uint s2 = (adler >> 16) & 0xFFFF;
// Process the data in blocks.
const int BLOCK_SIZE = 1 << 5;
uint length = (uint)buffer.Length;
uint blocks = length / BLOCK_SIZE;
length -= blocks * BLOCK_SIZE;
int index = 0;
fixed (byte* bufferPtr = &buffer[0])
{
index += (int)blocks * BLOCK_SIZE;
var localBufferPtr = bufferPtr;
// _mm_setr_epi8 on x86
var tap1 = Vector128.Create(32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17);
var tap2 = Vector128.Create(16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1);
Vector128<byte> zero = Vector128<byte>.Zero;
var ones = Vector128.Create((short)1);
while (blocks > 0)
{
uint n = NMAX / BLOCK_SIZE; /* The NMAX constraint. */
if (n > blocks)
{
n = blocks;
}
blocks -= n;
// Process n blocks of data. At most NMAX data bytes can be
// processed before s2 must be reduced modulo BASE.
Vector128<int> v_ps = Vector128.CreateScalar(s1 * n).AsInt32();
Vector128<int> v_s2 = Vector128.CreateScalar(s2).AsInt32();
Vector128<int> v_s1 = Vector128<int>.Zero;
do
{
// Load 32 input bytes.
Vector128<byte> bytes1 = Sse3.LoadDquVector128(localBufferPtr);
Vector128<byte> bytes2 = Sse3.LoadDquVector128(localBufferPtr + 16);
// Add previous block byte sum to v_ps.
v_ps = Sse2.Add(v_ps, v_s1);
// Horizontally add the bytes for s1, multiply-adds the
// bytes by [ 32, 31, 30, ... ] for s2.
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes1, zero).AsInt32());
Vector128<short> mad1 = Ssse3.MultiplyAddAdjacent(bytes1, tap1);
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad1, ones));
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes2, zero).AsInt32());
Vector128<short> mad2 = Ssse3.MultiplyAddAdjacent(bytes2, tap2);
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad2, ones));
localBufferPtr += BLOCK_SIZE;
}
while (--n > 0);
v_s2 = Sse2.Add(v_s2, Sse2.ShiftLeftLogical(v_ps, 5));
// Sum epi32 ints v_s1(s2) and accumulate in s1(s2).
const byte S2301 = 0b1011_0001; // A B C D -> B A D C
const byte S1032 = 0b0100_1110; // A B C D -> C D A B
v_s1 = Sse2.Add(v_s1, Sse2.Shuffle(v_s1, S2301));
v_s1 = Sse2.Add(v_s1, Sse2.Shuffle(v_s1, S1032));
s1 += (uint)v_s1.ToScalar();
v_s2 = Sse2.Add(v_s2, Sse2.Shuffle(v_s2, S2301));
v_s2 = Sse2.Add(v_s2, Sse2.Shuffle(v_s2, S1032));
s2 = (uint)v_s2.ToScalar();
// Reduce.
s1 %= BASE;
s2 %= BASE;
}
}
ref byte bufferRef = ref MemoryMarshal.GetReference(buffer);
if (length > 0)
{
if (length >= 16)
{
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
length -= 16;
}
while (length-- > 0)
{
s2 += s1 += Unsafe.Add(ref bufferRef, index++);
}
if (s1 >= BASE)
{
s1 -= BASE;
}
s2 %= BASE;
}
return s1 | (s2 << 16);
}
#endif
private static uint CalculateScalar(uint adler, ReadOnlySpan<byte> buffer)
{
uint s1 = adler & 0xFFFF;
uint s2 = (adler >> 16) & 0xFFFF;
uint k;
ref byte bufferRef = ref MemoryMarshal.GetReference<byte>(buffer);
uint length = (uint)buffer.Length;
int index = 0;
while (length > 0)
{
k = length < NMAX ? length : NMAX;
length -= k;
while (k >= 16)
{
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
k -= 16;
}
if (k != 0)
{
do
{
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
}
while (--k != 0);
}
s1 %= BASE;
s2 %= BASE;
}
return (s2 << 16) | s1;
}
}
}

View File

@@ -1,8 +1,9 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives
@@ -23,28 +24,14 @@ namespace SharpCompress.Archives
protected ReaderOptions ReaderOptions { get; }
private bool disposed;
protected SourceStream SrcStream;
internal AbstractArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerOptions)
internal AbstractArchive(ArchiveType type, SourceStream srcStream)
{
Type = type;
if (!fileInfo.Exists)
{
throw new ArgumentException("File does not exist: " + fileInfo.FullName);
}
ReaderOptions = readerOptions;
readerOptions.LeaveStreamOpen = false;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(fileInfo));
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
}
protected abstract IEnumerable<TVolume> LoadVolumes(FileInfo file);
internal AbstractArchive(ArchiveType type, IEnumerable<Stream> streams, ReaderOptions readerOptions)
{
Type = type;
ReaderOptions = readerOptions;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(streams.Select(CheckStreams)));
ReaderOptions = srcStream.ReaderOptions;
SrcStream = srcStream;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(SrcStream));
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
}
@@ -98,7 +85,7 @@ namespace SharpCompress.Archives
/// </summary>
public virtual long TotalUncompressSize => Entries.Aggregate(0L, (total, cf) => total + cf.Size);
protected abstract IEnumerable<TVolume> LoadVolumes(IEnumerable<Stream> streams);
protected abstract IEnumerable<TVolume> LoadVolumes(SourceStream srcStream);
protected abstract IEnumerable<TEntry> LoadEntries(IEnumerable<TVolume> volumes);
IEnumerable<IArchiveEntry> IArchive.Entries => Entries.Cast<IArchiveEntry>();
@@ -111,6 +98,8 @@ namespace SharpCompress.Archives
{
lazyVolumes.ForEach(v => v.Dispose());
lazyEntries.GetLoaded().Cast<Entry>().ForEach(x => x.Close());
if (SrcStream != null)
SrcStream.Dispose();
disposed = true;
}
}

View File

@@ -1,8 +1,9 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Writers;
@@ -40,13 +41,8 @@ namespace SharpCompress.Archives
{
}
internal AbstractWritableArchive(ArchiveType type, Stream stream, ReaderOptions readerFactoryOptions)
: base(type, stream.AsEnumerable(), readerFactoryOptions)
{
}
internal AbstractWritableArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerFactoryOptions)
: base(type, fileInfo, readerFactoryOptions)
internal AbstractWritableArchive(ArchiveType type, SourceStream srcStream)
: base(type, srcStream)
{
}

View File

@@ -1,11 +1,14 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Archives.GZip;
using SharpCompress.Archives.Rar;
using SharpCompress.Archives.SevenZip;
using SharpCompress.Archives.Tar;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives
@@ -26,34 +29,25 @@ namespace SharpCompress.Archives
throw new ArgumentException("Stream should be readable and seekable");
}
readerOptions ??= new ReaderOptions();
if (ZipArchive.IsZipFile(stream, null))
ArchiveType? type;
IsArchive(stream, out type); //test and reset stream position
if (type != null)
{
stream.Seek(0, SeekOrigin.Begin);
return ZipArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
if (SevenZipArchive.IsSevenZipFile(stream))
{
stream.Seek(0, SeekOrigin.Begin);
return SevenZipArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
if (GZipArchive.IsGZipFile(stream))
{
stream.Seek(0, SeekOrigin.Begin);
return GZipArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
if (RarArchive.IsRarFile(stream, readerOptions))
{
stream.Seek(0, SeekOrigin.Begin);
return RarArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
if (TarArchive.IsTarFile(stream))
{
stream.Seek(0, SeekOrigin.Begin);
return TarArchive.Open(stream, readerOptions);
switch (type.Value)
{
case ArchiveType.Zip:
return ZipArchive.Open(stream, readerOptions);
case ArchiveType.SevenZip:
return SevenZipArchive.Open(stream, readerOptions);
case ArchiveType.GZip:
return GZipArchive.Open(stream, readerOptions);
case ArchiveType.Rar:
return RarArchive.Open(stream, readerOptions);
case ArchiveType.Tar:
return TarArchive.Open(stream, readerOptions);
}
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip, LZip");
}
@@ -90,30 +84,108 @@ namespace SharpCompress.Archives
fileInfo.CheckNotNull(nameof(fileInfo));
options ??= new ReaderOptions { LeaveStreamOpen = false };
using var stream = fileInfo.OpenRead();
if (ZipArchive.IsZipFile(stream, null))
ArchiveType? type;
using (Stream stream = fileInfo.OpenRead())
{
return ZipArchive.Open(fileInfo, options);
IsArchive(stream, out type); //test and reset stream position
if (type != null)
{
switch (type.Value)
{
case ArchiveType.Zip:
return ZipArchive.Open(fileInfo, options);
case ArchiveType.SevenZip:
return SevenZipArchive.Open(fileInfo, options);
case ArchiveType.GZip:
return GZipArchive.Open(fileInfo, options);
case ArchiveType.Rar:
return RarArchive.Open(fileInfo, options);
case ArchiveType.Tar:
return TarArchive.Open(fileInfo, options);
}
}
}
stream.Seek(0, SeekOrigin.Begin);
if (SevenZipArchive.IsSevenZipFile(stream))
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="options"></param>
public static IArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? options = null)
{
fileInfos.CheckNotNull(nameof(fileInfos));
FileInfo[] files = fileInfos.ToArray();
if (files.Length == 0)
throw new InvalidOperationException("No files to open");
FileInfo fileInfo = files[0];
if (files.Length == 1)
return Open(fileInfo, options);
fileInfo.CheckNotNull(nameof(fileInfo));
options ??= new ReaderOptions { LeaveStreamOpen = false };
ArchiveType? type;
using (Stream stream = fileInfo.OpenRead())
IsArchive(stream, out type); //test and reset stream position
if (type != null)
{
return SevenZipArchive.Open(fileInfo, options);
switch (type.Value)
{
case ArchiveType.Zip:
return ZipArchive.Open(files, options);
case ArchiveType.SevenZip:
return SevenZipArchive.Open(files, options);
case ArchiveType.GZip:
return GZipArchive.Open(files, options);
case ArchiveType.Rar:
return RarArchive.Open(files, options);
case ArchiveType.Tar:
return TarArchive.Open(files, options);
}
}
stream.Seek(0, SeekOrigin.Begin);
if (GZipArchive.IsGZipFile(stream))
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
public static IArchive Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
{
streams.CheckNotNull(nameof(streams));
if (streams.Count() == 0)
throw new InvalidOperationException("No streams");
if (streams.Count() == 1)
return Open(streams.First(), options);
options ??= new ReaderOptions();
ArchiveType? type;
using (Stream stream = streams.First())
IsArchive(stream, out type); //test and reset stream position
if (type != null)
{
return GZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (RarArchive.IsRarFile(stream, options))
{
return RarArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (TarArchive.IsTarFile(stream))
{
return TarArchive.Open(fileInfo, options);
switch (type.Value)
{
case ArchiveType.Zip:
return ZipArchive.Open(streams, options);
case ArchiveType.SevenZip:
return SevenZipArchive.Open(streams, options);
case ArchiveType.GZip:
return GZipArchive.Open(streams, options);
case ArchiveType.Rar:
return RarArchive.Open(streams, options);
case ArchiveType.Tar:
return TarArchive.Open(streams, options);
}
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
}
@@ -130,5 +202,95 @@ namespace SharpCompress.Archives
entry.WriteToDirectory(destinationDirectory, options);
}
}
public static bool IsArchive(string filePath, out ArchiveType? type)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
using (Stream s = File.OpenRead(filePath))
return IsArchive(s, out type);
}
private static bool IsArchive(Stream stream, out ArchiveType? type)
{
type = null;
stream.CheckNotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
{
throw new ArgumentException("Stream should be readable and seekable");
}
if (ZipArchive.IsZipFile(stream, null))
type = ArchiveType.Zip;
stream.Seek(0, SeekOrigin.Begin);
if (type == null)
{
if (SevenZipArchive.IsSevenZipFile(stream))
type = ArchiveType.SevenZip;
stream.Seek(0, SeekOrigin.Begin);
}
if (type == null)
{
if (GZipArchive.IsGZipFile(stream))
type = ArchiveType.GZip;
stream.Seek(0, SeekOrigin.Begin);
}
if (type == null)
{
if (RarArchive.IsRarFile(stream))
type = ArchiveType.Rar;
stream.Seek(0, SeekOrigin.Begin);
}
if (type == null)
{
if (TarArchive.IsTarFile(stream))
type = ArchiveType.Tar;
stream.Seek(0, SeekOrigin.Begin);
}
if (type == null) //test multipartzip as it could find zips in other non compressed archive types?
{
if (ZipArchive.IsZipMulti(stream)) //test the zip (last) file of a multipart zip
type = ArchiveType.Zip;
stream.Seek(0, SeekOrigin.Begin);
}
return type != null;
}
/// <summary>
/// From a passed in archive (zip, rar, 7z, 001), return all parts.
/// </summary>
/// <param name="part1"></param>
/// <returns></returns>
public static IEnumerable<string> GetFileParts(string part1)
{
part1.CheckNotNullOrEmpty(nameof(part1));
return GetFileParts(new FileInfo(part1)).Select(a => a.FullName);
}
/// <summary>
/// From a passed in archive (zip, rar, 7z, 001), return all parts.
/// </summary>
/// <param name="part1"></param>
/// <returns></returns>
public static IEnumerable<FileInfo> GetFileParts(FileInfo part1)
{
part1.CheckNotNull(nameof(part1));
yield return part1;
int i = 1;
FileInfo? part = RarArchiveVolumeFactory.GetFilePart(i++, part1);
if (part != null)
{
yield return part;
while ((part = RarArchiveVolumeFactory.GetFilePart(i++, part1)) != null) //tests split too
yield return part;
}
else
{
i = 1;
while ((part = ZipArchiveVolumeFactory.GetFilePart(i++, part1)) != null) //tests split too
yield return part;
}
}
}
}

View File

@@ -0,0 +1,29 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Readers;
using System.Linq;
using System.Text;
using SharpCompress.Common.Rar.Headers;
using System.Text.RegularExpressions;
namespace SharpCompress.Archives
{
internal abstract class ArchiveVolumeFactory
{
internal static FileInfo? GetFilePart(int index, FileInfo part1) //base the name on the first part
{
FileInfo? item = null;
//split 001, 002 ...
Match m = Regex.Match(part1.Name, @"^(.*\.)([0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(Path.Combine(part1.DirectoryName!, String.Concat(m.Groups[1].Value, (index + 1).ToString().PadLeft(m.Groups[2].Value.Length, '0'))));
if (item != null && item.Exists)
return item;
return null;
}
}
}

View File

@@ -1,9 +1,10 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.Common.GZip;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.GZip;
using SharpCompress.Writers;
@@ -32,7 +33,31 @@ namespace SharpCompress.Archives.GZip
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new GZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
return new GZipArchive(new SourceStream(fileInfo, i => ArchiveVolumeFactory.GetFilePart(i, fileInfo), readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? readerOptions = null)
{
fileInfos.CheckNotNull(nameof(fileInfos));
FileInfo[] files = fileInfos.ToArray();
return new GZipArchive(new SourceStream(files[0], i => i < files.Length ? files[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.CheckNotNull(nameof(streams));
Stream[] strms = streams.ToArray();
return new GZipArchive(new SourceStream(strms[0], i => i < strms.Length ? strms[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
@@ -43,7 +68,7 @@ namespace SharpCompress.Archives.GZip
public static GZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
return new GZipArchive(stream, readerOptions ?? new ReaderOptions());
return new GZipArchive(new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions()));
}
public static GZipArchive Create()
@@ -52,20 +77,21 @@ namespace SharpCompress.Archives.GZip
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal GZipArchive(FileInfo fileInfo, ReaderOptions options)
: base(ArchiveType.GZip, fileInfo, options)
internal GZipArchive(SourceStream srcStream)
: base(ArchiveType.Tar, srcStream)
{
}
protected override IEnumerable<GZipVolume> LoadVolumes(FileInfo file)
protected override IEnumerable<GZipVolume> LoadVolumes(SourceStream srcStream)
{
return new GZipVolume(file, ReaderOptions).AsEnumerable();
srcStream.LoadAllParts();
int idx = 0;
return srcStream.Streams.Select(a => new GZipVolume(a, ReaderOptions, idx++));
}
public static bool IsGZipFile(string filePath)
{
return IsGZipFile(new FileInfo(filePath));
@@ -114,16 +140,6 @@ namespace SharpCompress.Archives.GZip
return true;
}
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
internal GZipArchive(Stream stream, ReaderOptions options)
: base(ArchiveType.GZip, stream, options)
{
}
internal GZipArchive()
: base(ArchiveType.GZip)
{
@@ -160,11 +176,6 @@ namespace SharpCompress.Archives.GZip
}
}
protected override IEnumerable<GZipVolume> LoadVolumes(IEnumerable<Stream> streams)
{
return new GZipVolume(streams.First(), ReaderOptions).AsEnumerable();
}
protected override IEnumerable<GZipArchiveEntry> LoadEntries(IEnumerable<GZipVolume> volumes)
{
Stream stream = volumes.Single().Stream;

View File

@@ -1,4 +1,4 @@
#nullable disable
#nullable disable
using System;
using System.Collections.Generic;
@@ -54,7 +54,7 @@ namespace SharpCompress.Archives.GZip
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new NonDisposingStream(stream);
return NonDisposingStream.Create(stream);
}
internal override void Close()
@@ -65,4 +65,4 @@ namespace SharpCompress.Archives.GZip
}
}
}
}
}

View File

@@ -1,4 +1,4 @@
using System.Collections.Generic;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common.Rar;
@@ -13,8 +13,8 @@ namespace SharpCompress.Archives.Rar
/// </summary>
internal class FileInfoRarArchiveVolume : RarVolume
{
internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options)
: base(StreamingMode.Seekable, fileInfo.OpenRead(), FixOptions(options))
internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options, int index = 0)
: base(StreamingMode.Seekable, fileInfo.OpenRead(), FixOptions(options), index)
{
FileInfo = fileInfo;
FileParts = GetVolumeFileParts().ToArray().ToReadOnly();

View File

@@ -1,4 +1,4 @@
using System.IO;
using System.IO;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Archives.Rar
@@ -6,7 +6,7 @@ namespace SharpCompress.Archives.Rar
internal sealed class FileInfoRarFilePart : SeekableFilePart
{
internal FileInfoRarFilePart(FileInfoRarArchiveVolume volume, string? password, MarkHeader mh, FileHeader fh, FileInfo fi)
: base(mh, fh, volume.Stream, password)
: base(mh, fh, volume.Index, volume.Stream, password)
{
FileInfo = fi;
}

View File

@@ -5,39 +5,26 @@ using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Compressors.Rar;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.Rar;
namespace SharpCompress.Archives.Rar
{
public class
public class
RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
{
internal Lazy<IRarUnpack> UnpackV2017 { get; } = new Lazy<IRarUnpack>(() => new SharpCompress.Compressors.Rar.UnpackV2017.Unpack());
internal Lazy<IRarUnpack> UnpackV1 { get; } = new Lazy<IRarUnpack>(() => new SharpCompress.Compressors.Rar.UnpackV1.Unpack());
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
internal RarArchive(FileInfo fileInfo, ReaderOptions options)
: base(ArchiveType.Rar, fileInfo, options)
{
}
protected override IEnumerable<RarVolume> LoadVolumes(FileInfo file)
{
return RarArchiveVolumeFactory.GetParts(file, ReaderOptions);
}
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="streams"></param>
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal RarArchive(IEnumerable<Stream> streams, ReaderOptions options)
: base(ArchiveType.Rar, streams, options)
internal RarArchive(SourceStream srcStream)
: base(ArchiveType.Rar, srcStream)
{
}
@@ -46,9 +33,21 @@ namespace SharpCompress.Archives.Rar
return RarArchiveEntryFactory.GetEntries(this, volumes, ReaderOptions);
}
protected override IEnumerable<RarVolume> LoadVolumes(IEnumerable<Stream> streams)
protected override IEnumerable<RarVolume> LoadVolumes(SourceStream srcStream)
{
return RarArchiveVolumeFactory.GetParts(streams, ReaderOptions);
base.SrcStream.LoadAllParts(); //request all streams
Stream[] streams = base.SrcStream.Streams.ToArray();
int idx = 0;
if (streams.Length > 1 && IsRarFile(streams[1], ReaderOptions)) //test part 2 - true = multipart not split
{
base.SrcStream.IsVolumes = true;
streams[1].Position = 0;
base.SrcStream.Position = 0;
return srcStream.Streams.Select(a => new StreamRarArchiveVolume(a, ReaderOptions, idx++));
}
else //split mode or single file
return new StreamRarArchiveVolume(base.SrcStream, ReaderOptions, idx++).AsEnumerable();
}
protected override IReader CreateReaderForSolidExtraction()
@@ -60,6 +59,9 @@ namespace SharpCompress.Archives.Rar
public override bool IsSolid => Volumes.First().IsSolidArchive;
public virtual int MinVersion => Volumes.First().MinVersion;
public virtual int MaxVersion => Volumes.First().MaxVersion;
#region Creation
/// <summary>
/// Constructor with a FileInfo object to an existing file.
@@ -69,7 +71,8 @@ namespace SharpCompress.Archives.Rar
public static RarArchive Open(string filePath, ReaderOptions? options = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
return new RarArchive(new FileInfo(filePath), options ?? new ReaderOptions());
FileInfo fileInfo = new FileInfo(filePath);
return new RarArchive(new SourceStream(fileInfo, i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo), options ?? new ReaderOptions()));
}
/// <summary>
@@ -80,7 +83,7 @@ namespace SharpCompress.Archives.Rar
public static RarArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new RarArchive(fileInfo, options ?? new ReaderOptions());
return new RarArchive(new SourceStream(fileInfo, i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo), options ?? new ReaderOptions()));
}
/// <summary>
@@ -91,20 +94,34 @@ namespace SharpCompress.Archives.Rar
public static RarArchive Open(Stream stream, ReaderOptions? options = null)
{
stream.CheckNotNull(nameof(stream));
return Open(stream.AsEnumerable(), options ?? new ReaderOptions());
return new RarArchive(new SourceStream(stream, i => null, options ?? new ReaderOptions()));
}
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static RarArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? readerOptions = null)
{
fileInfos.CheckNotNull(nameof(fileInfos));
FileInfo[] files = fileInfos.ToArray();
return new RarArchive(new SourceStream(files[0], i => i < files.Length ? files[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
public static RarArchive Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
/// <param name="readerOptions"></param>
public static RarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.CheckNotNull(nameof(streams));
return new RarArchive(streams, options ?? new ReaderOptions());
Stream[] strms = streams.ToArray();
return new RarArchive(new SourceStream(strms[0], i => i < strms.Length ? strms[i] : null, readerOptions ?? new ReaderOptions()));
}
public static bool IsRarFile(string filePath)
{
return IsRarFile(new FileInfo(filePath));

View File

@@ -21,6 +21,7 @@ namespace SharpCompress.Archives.Rar
this.parts = parts.ToList();
this.archive = archive;
this.readerOptions = readerOptions;
this.IsSolid = this.FileHeader.IsSolid;
}
public override CompressionType CompressionType => CompressionType.Rar;
@@ -85,4 +86,4 @@ namespace SharpCompress.Archives.Rar
}
}
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Rar;
@@ -6,135 +6,35 @@ using SharpCompress.Readers;
using System.Linq;
using System.Text;
using SharpCompress.Common.Rar.Headers;
using System.Text.RegularExpressions;
namespace SharpCompress.Archives.Rar
{
internal static class RarArchiveVolumeFactory
{
internal static IEnumerable<RarVolume> GetParts(IEnumerable<Stream> streams, ReaderOptions options)
internal static FileInfo? GetFilePart(int index, FileInfo part1) //base the name on the first part
{
foreach (Stream s in streams)
{
if (!s.CanRead || !s.CanSeek)
{
throw new ArgumentException("Stream is not readable and seekable");
}
StreamRarArchiveVolume part = new StreamRarArchiveVolume(s, options);
yield return part;
}
}
FileInfo? item = null;
internal static IEnumerable<RarVolume> GetParts(FileInfo fileInfo, ReaderOptions options)
{
FileInfoRarArchiveVolume part = new FileInfoRarArchiveVolume(fileInfo, options);
yield return part;
ArchiveHeader ah = part.ArchiveHeader;
if (!ah.IsVolume)
{
yield break; //if file isn't volume then there is no reason to look
}
fileInfo = GetNextFileInfo(ah, part.FileParts.FirstOrDefault() as FileInfoRarFilePart)!;
//we use fileinfo because rar is dumb and looks at file names rather than archive info for another volume
while (fileInfo != null && fileInfo.Exists)
{
part = new FileInfoRarArchiveVolume(fileInfo, options);
fileInfo = GetNextFileInfo(ah, part.FileParts.FirstOrDefault() as FileInfoRarFilePart)!;
yield return part;
}
}
private static FileInfo? GetNextFileInfo(ArchiveHeader ah, FileInfoRarFilePart? currentFilePart)
{
if (currentFilePart is null)
{
return null;
}
bool oldNumbering = ah.OldNumberingFormat
|| currentFilePart.MarkHeader.OldNumberingFormat;
if (oldNumbering)
{
return FindNextFileWithOldNumbering(currentFilePart.FileInfo);
}
//new style rar - ..part1 | /part01 | part001 ....
Match m = Regex.Match(part1.Name, @"^(.*\.part)([0-9]+)(\.rar)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(Path.Combine(part1.DirectoryName!, String.Concat(m.Groups[1].Value, (index + 1).ToString().PadLeft(m.Groups[2].Value.Length, '0'), m.Groups[3].Value)));
else
{
return FindNextFileWithNewNumbering(currentFilePart.FileInfo);
//old style - ...rar, .r00, .r01 ...
m = Regex.Match(part1.Name, @"^(.*\.r)(ar|[0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(Path.Combine(part1.DirectoryName!, String.Concat(m.Groups[1].Value, index == 0 ? "ar" : (index - 1).ToString().PadLeft(m.Groups[2].Value.Length, '0'))));
else //split .001, .002 ....
return ArchiveVolumeFactory.GetFilePart(index, part1);
}
if (item != null && item.Exists)
return item;
return null; //no more items
}
private static FileInfo FindNextFileWithOldNumbering(FileInfo currentFileInfo)
{
// .rar, .r00, .r01, ...
string extension = currentFileInfo.Extension;
var buffer = new StringBuilder(currentFileInfo.FullName.Length);
buffer.Append(currentFileInfo.FullName.Substring(0,
currentFileInfo.FullName.Length - extension.Length));
if (string.Compare(extension, ".rar", StringComparison.OrdinalIgnoreCase) == 0)
{
buffer.Append(".r00");
}
else
{
if (int.TryParse(extension.Substring(2, 2), out int num))
{
num++;
buffer.Append(".r");
if (num < 10)
{
buffer.Append('0');
}
buffer.Append(num);
}
else
{
ThrowInvalidFileName(currentFileInfo);
}
}
return new FileInfo(buffer.ToString());
}
private static FileInfo FindNextFileWithNewNumbering(FileInfo currentFileInfo)
{
// part1.rar, part2.rar, ...
string extension = currentFileInfo.Extension;
if (string.Compare(extension, ".rar", StringComparison.OrdinalIgnoreCase) != 0)
{
throw new ArgumentException("Invalid extension, expected 'rar': " + currentFileInfo.FullName);
}
int startIndex = currentFileInfo.FullName.LastIndexOf(".part");
if (startIndex < 0)
{
ThrowInvalidFileName(currentFileInfo);
}
StringBuilder buffer = new StringBuilder(currentFileInfo.FullName.Length);
buffer.Append(currentFileInfo.FullName, 0, startIndex);
string numString = currentFileInfo.FullName.Substring(startIndex + 5,
currentFileInfo.FullName.IndexOf('.', startIndex + 5) -
startIndex - 5);
buffer.Append(".part");
if (int.TryParse(numString, out int num))
{
num++;
for (int i = 0; i < numString.Length - num.ToString().Length; i++)
{
buffer.Append('0');
}
buffer.Append(num);
}
else
{
ThrowInvalidFileName(currentFileInfo);
}
buffer.Append(".rar");
return new FileInfo(buffer.ToString());
}
private static void ThrowInvalidFileName(FileInfo fileInfo)
{
throw new ArgumentException("Filename invalid or next archive could not be found:"
+ fileInfo.FullName);
}
}
}
}

View File

@@ -1,4 +1,4 @@
using System.IO;
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
@@ -9,8 +9,8 @@ namespace SharpCompress.Archives.Rar
private readonly Stream stream;
private readonly string? password;
internal SeekableFilePart(MarkHeader mh, FileHeader fh, Stream stream, string? password)
: base(mh, fh)
internal SeekableFilePart(MarkHeader mh, FileHeader fh, int index, Stream stream, string? password)
: base(mh, fh, index)
{
this.stream = stream;
this.password = password;
@@ -28,4 +28,4 @@ namespace SharpCompress.Archives.Rar
internal override string FilePartName => "Unknown Stream - File Entry: " + FileHeader.FileName;
}
}
}

View File

@@ -1,4 +1,4 @@
using System.Collections.Generic;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
@@ -9,8 +9,8 @@ namespace SharpCompress.Archives.Rar
{
internal class StreamRarArchiveVolume : RarVolume
{
internal StreamRarArchiveVolume(Stream stream, ReaderOptions options)
: base(StreamingMode.Seekable, stream, options)
internal StreamRarArchiveVolume(Stream stream, ReaderOptions options, int index = 0)
: base(StreamingMode.Seekable, stream, options, index)
{
}
@@ -21,7 +21,7 @@ namespace SharpCompress.Archives.Rar
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader)
{
return new SeekableFilePart(markHeader, fileHeader, Stream, ReaderOptions.Password);
return new SeekableFilePart(markHeader, fileHeader, this.Index, Stream, ReaderOptions.Password);
}
}
}
}

View File

@@ -1,4 +1,4 @@
#nullable disable
#nullable disable
using System;
using System.Collections.Generic;
@@ -34,8 +34,33 @@ namespace SharpCompress.Archives.SevenZip
public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
{
fileInfo.CheckNotNull("fileInfo");
return new SevenZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
return new SevenZipArchive(new SourceStream(fileInfo, i => ArchiveVolumeFactory.GetFilePart(i, fileInfo), readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions readerOptions = null)
{
fileInfos.CheckNotNull(nameof(fileInfos));
FileInfo[] files = fileInfos.ToArray();
return new SevenZipArchive(new SourceStream(files[0], i => i < files.Length ? files[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(IEnumerable<Stream> streams, ReaderOptions readerOptions = null)
{
streams.CheckNotNull(nameof(streams));
Stream[] strms = streams.ToArray();
return new SevenZipArchive(new SourceStream(strms[0], i => i < strms.Length ? strms[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
@@ -44,17 +69,24 @@ namespace SharpCompress.Archives.SevenZip
public static SevenZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
{
stream.CheckNotNull("stream");
return new SevenZipArchive(stream, readerOptions ?? new ReaderOptions());
return new SevenZipArchive(new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions()));
}
internal SevenZipArchive(FileInfo fileInfo, ReaderOptions readerOptions)
: base(ArchiveType.SevenZip, fileInfo, readerOptions)
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal SevenZipArchive(SourceStream srcStream)
: base(ArchiveType.SevenZip, srcStream)
{
}
protected override IEnumerable<SevenZipVolume> LoadVolumes(FileInfo file)
protected override IEnumerable<SevenZipVolume> LoadVolumes(SourceStream srcStream)
{
return new SevenZipVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
base.SrcStream.LoadAllParts(); //request all streams
int idx = 0;
return new SevenZipVolume(srcStream, ReaderOptions, idx++).AsEnumerable(); //simple single volume or split, multivolume not supported
}
public static bool IsSevenZipFile(string filePath)
@@ -74,38 +106,32 @@ namespace SharpCompress.Archives.SevenZip
}
}
internal SevenZipArchive(Stream stream, ReaderOptions readerOptions)
: base(ArchiveType.SevenZip, stream.AsEnumerable(), readerOptions)
{
}
internal SevenZipArchive()
: base(ArchiveType.SevenZip)
{
}
protected override IEnumerable<SevenZipVolume> LoadVolumes(IEnumerable<Stream> streams)
{
foreach (Stream s in streams)
{
if (!s.CanRead || !s.CanSeek)
{
throw new ArgumentException("Stream is not readable and seekable");
}
SevenZipVolume volume = new SevenZipVolume(s, ReaderOptions);
yield return volume;
}
}
protected override IEnumerable<SevenZipArchiveEntry> LoadEntries(IEnumerable<SevenZipVolume> volumes)
{
var stream = volumes.Single().Stream;
LoadFactory(stream);
var entries = new SevenZipArchiveEntry[database._files.Count];
for (int i = 0; i < database._files.Count; i++)
{
var file = database._files[i];
yield return new SevenZipArchiveEntry(this, new SevenZipFilePart(stream, database, i, file, ReaderOptions.ArchiveEncoding));
entries[i] = new SevenZipArchiveEntry(this, new SevenZipFilePart(stream, database, i, file, ReaderOptions.ArchiveEncoding));
}
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
{
var isSolid = false;
foreach (var entry in group)
{
entry.IsSolid = isSolid;
isSolid = true; //mark others in this group as solid - same as rar behaviour.
}
}
return entries;
}
private void LoadFactory(Stream stream)

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -34,7 +34,31 @@ namespace SharpCompress.Archives.Tar
public static TarArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new TarArchive(fileInfo, readerOptions ?? new ReaderOptions());
return new TarArchive(new SourceStream(fileInfo, i => ArchiveVolumeFactory.GetFilePart(i, fileInfo), readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? readerOptions = null)
{
fileInfos.CheckNotNull(nameof(fileInfos));
FileInfo[] files = fileInfos.ToArray();
return new TarArchive(new SourceStream(files[0], i => i < files.Length ? files[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.CheckNotNull(nameof(streams));
Stream[] strms = streams.ToArray();
return new TarArchive(new SourceStream(strms[0], i => i < strms.Length ? strms[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
@@ -45,7 +69,7 @@ namespace SharpCompress.Archives.Tar
public static TarArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
return new TarArchive(stream, readerOptions ?? new ReaderOptions());
return new TarArchive(new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions()));
}
public static bool IsTarFile(string filePath)
@@ -80,28 +104,20 @@ namespace SharpCompress.Archives.Tar
return false;
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
internal TarArchive(FileInfo fileInfo, ReaderOptions readerOptions)
: base(ArchiveType.Tar, fileInfo, readerOptions)
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream srcStream)
{
}
protected override IEnumerable<TarVolume> LoadVolumes(FileInfo file)
{
return new TarVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
base.SrcStream.LoadAllParts(); //request all streams
int idx = 0;
return new TarVolume(srcStream, ReaderOptions, idx++).AsEnumerable(); //simple single volume or split, multivolume not supported
}
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
internal TarArchive(Stream stream, ReaderOptions readerOptions)
: base(ArchiveType.Tar, stream, readerOptions)
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal TarArchive(SourceStream srcStream)
: base(ArchiveType.Tar, srcStream)
{
}
@@ -110,11 +126,6 @@ namespace SharpCompress.Archives.Tar
{
}
protected override IEnumerable<TarVolume> LoadVolumes(IEnumerable<Stream> streams)
{
return new TarVolume(streams.First(), ReaderOptions).AsEnumerable();
}
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
{
Stream stream = volumes.Single().Stream;

View File

@@ -1,4 +1,4 @@
#nullable disable
#nullable disable
using System;
using System.Collections.Generic;
@@ -53,7 +53,7 @@ namespace SharpCompress.Archives.Tar
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new NonDisposingStream(stream);
return NonDisposingStream.Create(stream);
}
internal override void Close()
@@ -64,4 +64,4 @@ namespace SharpCompress.Archives.Tar
}
}
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -6,6 +6,7 @@ using SharpCompress.Common;
using SharpCompress.Common.Zip;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors.Deflate;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.Zip;
using SharpCompress.Writers;
@@ -25,6 +26,17 @@ namespace SharpCompress.Archives.Zip
/// </summary>
public CompressionLevel DeflateCompressionLevel { get; set; }
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal ZipArchive(SourceStream srcStream)
: base(ArchiveType.Zip, srcStream)
{
headerFactory = new SeekableZipHeaderFactory(srcStream.ReaderOptions.Password, srcStream.ReaderOptions.ArchiveEncoding);
}
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
@@ -44,7 +56,31 @@ namespace SharpCompress.Archives.Zip
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new ZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
return new ZipArchive(new SourceStream(fileInfo, i => ZipArchiveVolumeFactory.GetFilePart(i, fileInfo), readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? readerOptions = null)
{
fileInfos.CheckNotNull(nameof(fileInfos));
FileInfo[] files = fileInfos.ToArray();
return new ZipArchive(new SourceStream(files[0], i => i < files.Length ? files[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.CheckNotNull(nameof(streams));
Stream[] strms = streams.ToArray();
return new ZipArchive(new SourceStream(strms[0], i => i < strms.Length ? strms[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
@@ -55,7 +91,7 @@ namespace SharpCompress.Archives.Zip
public static ZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
return new ZipArchive(stream, readerOptions ?? new ReaderOptions());
return new ZipArchive(new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions()));
}
public static bool IsZipFile(string filePath, string? password = null)
@@ -97,20 +133,62 @@ namespace SharpCompress.Archives.Zip
}
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
internal ZipArchive(FileInfo fileInfo, ReaderOptions readerOptions)
: base(ArchiveType.Zip, fileInfo, readerOptions)
public static bool IsZipMulti(Stream stream, string? password = null)
{
headerFactory = new SeekableZipHeaderFactory(readerOptions.Password, readerOptions.ArchiveEncoding);
StreamingZipHeaderFactory headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding());
try
{
ZipHeader? header = headerFactory.ReadStreamHeader(stream).FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
if (stream.CanSeek) //could be multipart. Test for central directory - might not be z64 safe
{
SeekableZipHeaderFactory z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
var x = z.ReadSeekableHeader(stream).FirstOrDefault();
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
}
else
return false;
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
protected override IEnumerable<ZipVolume> LoadVolumes(FileInfo file)
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream srcStream)
{
return new ZipVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
base.SrcStream.LoadAllParts(); //request all streams
base.SrcStream.Position = 0;
List<Stream> streams = base.SrcStream.Streams.ToList();
int idx = 0;
if (streams.Count > 1) //test part 2 - true = multipart not split
{
streams[1].Position += 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
bool isZip = IsZipFile(streams[1], ReaderOptions.Password);
streams[1].Position -= 4;
if (isZip)
{
base.SrcStream.IsVolumes = true;
var tmp = streams[0]; //arcs as zip, z01 ... swap the zip the end
streams.RemoveAt(0);
streams.Add(tmp);
//streams[0].Position = 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
return streams.Select(a => new ZipVolume(a, ReaderOptions, idx++));
}
}
//split mode or single file
return new ZipVolume(base.SrcStream, ReaderOptions, idx++).AsEnumerable();
}
internal ZipArchive()
@@ -118,46 +196,35 @@ namespace SharpCompress.Archives.Zip
{
}
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
internal ZipArchive(Stream stream, ReaderOptions readerOptions)
: base(ArchiveType.Zip, stream, readerOptions)
{
headerFactory = new SeekableZipHeaderFactory(readerOptions.Password, readerOptions.ArchiveEncoding);
}
protected override IEnumerable<ZipVolume> LoadVolumes(IEnumerable<Stream> streams)
{
return new ZipVolume(streams.First(), ReaderOptions).AsEnumerable();
}
protected override IEnumerable<ZipArchiveEntry> LoadEntries(IEnumerable<ZipVolume> volumes)
{
var volume = volumes.Single();
Stream stream = volume.Stream;
foreach (ZipHeader h in headerFactory.ReadSeekableHeader(stream))
var vols = volumes.ToArray();
foreach (ZipHeader h in headerFactory.ReadSeekableHeader(vols.Last().Stream))
{
if (h != null)
{
switch (h.ZipHeaderType)
{
case ZipHeaderType.DirectoryEntry:
{
DirectoryEntryHeader deh = (DirectoryEntryHeader)h;
Stream s;
if (deh.RelativeOffsetOfEntryHeader + deh.CompressedSize > vols[deh.DiskNumberStart].Stream.Length)
{
yield return new ZipArchiveEntry(this,
new SeekableZipFilePart(headerFactory,
(DirectoryEntryHeader)h,
stream));
var v = vols.Skip(deh.DiskNumberStart).ToArray();
s = new SourceStream(v[0].Stream, i => i < v.Length ? v[i].Stream : null, new ReaderOptions() { LeaveStreamOpen = true });
}
break;
else
s = vols[deh.DiskNumberStart].Stream;
yield return new ZipArchiveEntry(this, new SeekableZipFilePart(headerFactory, deh, s));
}
break;
case ZipHeaderType.DirectoryEnd:
{
byte[] bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
volume.Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
yield break;
}
{
byte[] bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
volumes.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
yield break;
}
}
}
}

View File

@@ -0,0 +1,34 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Readers;
using System.Linq;
using System.Text;
using SharpCompress.Common.Rar.Headers;
using System.Text.RegularExpressions;
namespace SharpCompress.Archives.Zip
{
internal static class ZipArchiveVolumeFactory
{
internal static FileInfo? GetFilePart(int index, FileInfo part1) //base the name on the first part
{
FileInfo? item = null;
//load files with zip/zipx first. Swapped to end once loaded in ZipArchive
//new style .zip, z01.. | .zipx, zx01 - if the numbers go beyond 99 then they use 100 ...1000 etc
Match m = Regex.Match(part1.Name, @"^(.*\.)(zipx?|zx?[0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(Path.Combine(part1.DirectoryName!, String.Concat(m.Groups[1].Value, Regex.Replace(m.Groups[2].Value, @"[^xz]", ""), index.ToString().PadLeft(2, '0'))));
else //split - 001, 002 ...
return ArchiveVolumeFactory.GetFilePart(index, part1);
if (item != null && item.Exists)
return item;
return null; //no more items
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
@@ -53,7 +53,7 @@ namespace SharpCompress.Archives.Zip
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new NonDisposingStream(stream);
return NonDisposingStream.Create(stream);
}
internal override void Close()
@@ -65,4 +65,4 @@ namespace SharpCompress.Archives.Zip
}
}
}
}
}

View File

@@ -36,7 +36,7 @@ namespace SharpCompress.Common
Password = password;
}
#if !NET461
#if !NETFRAMEWORK
static ArchiveEncoding()
{
Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);

View File

@@ -1,5 +1,6 @@
using System;
using System;
using System.Collections.Generic;
using System.Linq;
namespace SharpCompress.Common
{
@@ -70,12 +71,14 @@ namespace SharpCompress.Common
/// </summary>
public abstract bool IsSplitAfter { get; }
public int VolumeIndexFirst => this.Parts?.FirstOrDefault()?.Index ?? 0;
public int VolumeIndexLast => this.Parts?.LastOrDefault()?.Index ?? 0;
/// <inheritdoc/>
public override string ToString() => Key;
internal abstract IEnumerable<FilePart> Parts { get; }
internal bool IsSolid { get; set; }
public bool IsSolid { get; set; }
internal virtual void Close()
{

View File

@@ -15,7 +15,7 @@ namespace SharpCompress.Common
{
string destinationFileName;
string fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
//check for trailing slash.
if (fullDestinationDirectoryPath[fullDestinationDirectoryPath.Length - 1] != Path.DirectorySeparatorChar)
{

View File

@@ -1,4 +1,4 @@
using System.IO;
using System.IO;
namespace SharpCompress.Common
{
@@ -12,6 +12,7 @@ namespace SharpCompress.Common
internal ArchiveEncoding ArchiveEncoding { get; }
internal abstract string FilePartName { get; }
public int Index { get; set; }
internal abstract Stream GetCompressedStream();
internal abstract Stream? GetRawStream();

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
@@ -31,8 +31,8 @@ namespace SharpCompress.Common.GZip
internal long EntryStartPosition { get; }
internal DateTime? DateModified { get; private set; }
internal int? Crc { get; private set; }
internal int? UncompressedSize { get; private set; }
internal uint? Crc { get; private set; }
internal uint? UncompressedSize { get; private set; }
internal override string FilePartName => _name!;
@@ -52,8 +52,8 @@ namespace SharpCompress.Common.GZip
Span<byte> trailer = stackalloc byte[8];
int n = _stream.Read(trailer);
Crc = BinaryPrimitives.ReadInt32LittleEndian(trailer);
UncompressedSize = BinaryPrimitives.ReadInt32LittleEndian(trailer.Slice(4));
Crc = BinaryPrimitives.ReadUInt32LittleEndian(trailer);
UncompressedSize = BinaryPrimitives.ReadUInt32LittleEndian(trailer.Slice(4));
}
private void ReadAndValidateGzipHeader()

View File

@@ -1,12 +1,12 @@
using System.IO;
using System.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common.GZip
{
public class GZipVolume : Volume
{
public GZipVolume(Stream stream, ReaderOptions options)
: base(stream, options)
public GZipVolume(Stream stream, ReaderOptions options, int index = 0)
: base(stream, options, index)
{
}
@@ -20,4 +20,4 @@ namespace SharpCompress.Common.GZip
public override bool IsMultiVolume => true;
}
}
}

View File

@@ -1,4 +1,5 @@
using System;
using System;
using System.Collections.Generic;
namespace SharpCompress.Common
{
@@ -14,9 +15,12 @@ namespace SharpCompress.Common
bool IsDirectory { get; }
bool IsEncrypted { get; }
bool IsSplitAfter { get; }
bool IsSolid { get; }
int VolumeIndexFirst { get; }
int VolumeIndexLast { get; }
DateTime? LastAccessedTime { get; }
DateTime? LastModifiedTime { get; }
long Size { get; }
int? Attrib { get; }
}
}
}

View File

@@ -1,8 +1,11 @@
using System;
using System;
namespace SharpCompress.Common
{
public interface IVolume : IDisposable
{
int Index { get; }
string FileName { get; }
}
}
}

View File

@@ -10,7 +10,7 @@ namespace SharpCompress.Common.Rar
/// <summary>
/// As the V2017 port isn't complete, add this check to use the legacy Rar code.
/// </summary>
internal bool IsRarV3 => FileHeader.CompressionAlgorithm == 29 || FileHeader.CompressionAlgorithm == 36;
internal bool IsRarV3 => FileHeader.CompressionAlgorithm == 20 || FileHeader.CompressionAlgorithm == 26 || FileHeader.CompressionAlgorithm == 29 || FileHeader.CompressionAlgorithm == 36; //Nanook - Added 20+26 as Test arc from WinRar2.8 (algo 20) was failing with 2017 code
/// <summary>
/// The File's 32 bit CRC Hash

View File

@@ -1,4 +1,4 @@
using System.IO;
using System.IO;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Common.Rar
@@ -8,11 +8,12 @@ namespace SharpCompress.Common.Rar
/// </summary>
internal abstract class RarFilePart : FilePart
{
internal RarFilePart(MarkHeader mh, FileHeader fh)
internal RarFilePart(MarkHeader mh, FileHeader fh, int index)
: base(fh.ArchiveEncoding)
{
MarkHeader = mh;
FileHeader = fh;
Index = index;
}
internal MarkHeader MarkHeader { get; }
@@ -24,4 +25,4 @@ namespace SharpCompress.Common.Rar
return null;
}
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -14,9 +14,10 @@ namespace SharpCompress.Common.Rar
public abstract class RarVolume : Volume
{
private readonly RarHeaderFactory _headerFactory;
internal int _maxCompressionAlgorithm;
internal RarVolume(StreamingMode mode, Stream stream, ReaderOptions options)
: base(stream, options)
internal RarVolume(StreamingMode mode, Stream stream, ReaderOptions options, int index = 0)
: base(stream, options, index)
{
_headerFactory = new RarHeaderFactory(mode, options);
}
@@ -51,6 +52,8 @@ namespace SharpCompress.Common.Rar
case HeaderType.File:
{
var fh = (FileHeader)header;
if (_maxCompressionAlgorithm < fh.CompressionAlgorithm)
_maxCompressionAlgorithm = fh.CompressionAlgorithm;
yield return CreateFilePart(lastMarkHeader!, fh);
}
break;
@@ -110,5 +113,37 @@ namespace SharpCompress.Common.Rar
return ArchiveHeader.IsSolid;
}
}
public int MinVersion
{
get
{
EnsureArchiveHeaderLoaded();
if (_maxCompressionAlgorithm >= 50)
return 5; //5-6
else if (_maxCompressionAlgorithm >= 29)
return 3; //3-4
else if (_maxCompressionAlgorithm >= 20)
return 2; //2
else
return 1;
}
}
public int MaxVersion
{
get
{
EnsureArchiveHeaderLoaded();
if (_maxCompressionAlgorithm >= 50)
return 6; //5-6
else if (_maxCompressionAlgorithm >= 29)
return 4; //3-4
else if (_maxCompressionAlgorithm >= 20)
return 2; //2
else
return 1;
}
}
}
}
}

View File

@@ -1248,7 +1248,7 @@ namespace SharpCompress.Common.SevenZip
if (nextHeaderOffset > _streamEnding - db._startPositionAfterHeader)
{
throw new IndexOutOfRangeException();
throw new InvalidOperationException("nextHeaderOffset is invalid");
}
_stream.Seek(nextHeaderOffset, SeekOrigin.Current);

View File

@@ -30,7 +30,7 @@ namespace SharpCompress.Common.SevenZip
}
}
throw new Exception();
throw new InvalidOperationException();
}
public int GetNumOutStreams()
@@ -185,4 +185,4 @@ namespace SharpCompress.Common.SevenZip
return true;
}
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.IO;
using System.Linq;
using SharpCompress.IO;
@@ -26,7 +26,6 @@ namespace SharpCompress.Common.SevenZip
internal CFileItem Header { get; }
internal CFolder? Folder { get; }
internal int Index { get; }
internal override string FilePartName => Header.Name;
@@ -105,4 +104,4 @@ namespace SharpCompress.Common.SevenZip
internal bool IsEncrypted => Folder!._coders.FindIndex(c => c._methodId._id == CMethodId.K_AES_ID) != -1;
}
}
}

View File

@@ -1,13 +1,13 @@
using System.IO;
using System.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common.SevenZip
{
public class SevenZipVolume : Volume
{
public SevenZipVolume(Stream stream, ReaderOptions readerFactoryOptions)
: base(stream, readerFactoryOptions)
public SevenZipVolume(Stream stream, ReaderOptions readerFactoryOptions, int index = 0)
: base(stream, readerFactoryOptions, index)
{
}
}
}
}

View File

@@ -19,11 +19,9 @@ namespace SharpCompress.Common.Tar.Headers
internal string Name { get; set; }
internal string LinkName { get; set; }
//internal int Mode { get; set; }
//internal int UserId { get; set; }
//internal string UserName { get; set; }
//internal int GroupId { get; set; }
//internal string GroupName { get; set; }
internal long Mode { get; set; }
internal long UserId { get; set; }
internal long GroupId { get; set; }
internal long Size { get; set; }
internal DateTime LastModifiedTime { get; set; }
internal EntryType EntryType { get; set; }
@@ -127,9 +125,12 @@ namespace SharpCompress.Common.Tar.Headers
EntryType = ReadEntryType(buffer);
Size = ReadSize(buffer);
//Mode = ReadASCIIInt32Base8(buffer, 100, 7);
//UserId = ReadASCIIInt32Base8(buffer, 108, 7);
//GroupId = ReadASCIIInt32Base8(buffer, 116, 7);
Mode = ReadAsciiInt64Base8(buffer, 100, 7);
if(EntryType == EntryType.Directory)
Mode |= 0b1_000_000_000;
UserId = ReadAsciiInt64Base8(buffer, 108, 7);
GroupId = ReadAsciiInt64Base8(buffer, 116, 7);
long unixTimeStamp = ReadAsciiInt64Base8(buffer, 136, 11);
LastModifiedTime = EPOCH.AddSeconds(unixTimeStamp).ToLocalTime();

View File

@@ -44,6 +44,12 @@ namespace SharpCompress.Common.Tar
public override bool IsSplitAfter => false;
public long Mode => _filePart.Header.Mode;
public long UserID => _filePart.Header.UserId;
public long GroupId => _filePart.Header.GroupId;
internal override IEnumerable<FilePart> Parts => _filePart.AsEnumerable<FilePart>();
internal static IEnumerable<TarEntry> GetEntries(StreamingMode mode, Stream stream,

View File

@@ -1,13 +1,13 @@
using System.IO;
using System.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common.Tar
{
public class TarVolume : Volume
{
public TarVolume(Stream stream, ReaderOptions readerOptions)
: base(stream, readerOptions)
public TarVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
: base(stream, readerOptions, index)
{
}
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.IO;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -9,12 +9,13 @@ namespace SharpCompress.Common
{
private readonly Stream _actualStream;
internal Volume(Stream stream, ReaderOptions readerOptions)
internal Volume(Stream stream, ReaderOptions readerOptions, int index = 0)
{
Index = index;
ReaderOptions = readerOptions;
if (readerOptions.LeaveStreamOpen)
{
stream = new NonDisposingStream(stream);
stream = NonDisposingStream.Create(stream);
}
_actualStream = stream;
}
@@ -29,6 +30,10 @@ namespace SharpCompress.Common
/// </summary>
public virtual bool IsFirstVolume => true;
public virtual int Index { get; internal set; }
public string FileName { get { return (_actualStream as FileStream)?.Name!; } }
/// <summary>
/// RarArchive is part of a multi-part archive.
/// </summary>
@@ -48,4 +53,4 @@ namespace SharpCompress.Common
GC.SuppressFinalize(this);
}
}
}
}

View File

@@ -36,7 +36,7 @@ namespace SharpCompress.Common.Zip
// ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR should be before the EOCD
stream.Seek(eocd_location - ZIP64_EOCD_LENGTH - 4, SeekOrigin.Begin);
uint zip64_locator = reader.ReadUInt32();
if( zip64_locator != ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR )
if (zip64_locator != ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR)
{
throw new ArchiveException("Failed to locate the Zip64 Directory Locator");
}
@@ -86,11 +86,11 @@ namespace SharpCompress.Common.Zip
}
}
private static bool IsMatch( byte[] haystack, int position, byte[] needle)
private static bool IsMatch(byte[] haystack, int position, byte[] needle)
{
for( int i = 0; i < needle.Length; i++ )
for (int i = 0; i < needle.Length; i++)
{
if( haystack[ position + i ] != needle[ i ] )
if (haystack[position + i] != needle[i])
{
return false;
}
@@ -120,9 +120,9 @@ namespace SharpCompress.Common.Zip
// don't exclude the minimum eocd region, otherwise you fail to locate the header in empty zip files
var max_search_area = len; // - MINIMUM_EOCD_LENGTH;
for( int pos_from_end = 0; pos_from_end < max_search_area; ++pos_from_end)
for (int pos_from_end = 0; pos_from_end < max_search_area; ++pos_from_end)
{
if( IsMatch(seek, pos_from_end, needle) )
if (IsMatch(seek, pos_from_end, needle))
{
stream.Seek(-pos_from_end, SeekOrigin.End);
return;

View File

@@ -1,4 +1,4 @@
using System.IO;
using System.IO;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors.Deflate;
using SharpCompress.IO;
@@ -28,7 +28,7 @@ namespace SharpCompress.Common.Zip
_decompressionStream = CreateDecompressionStream(GetCryptoStream(CreateBaseStream()), Header.CompressionMethod);
if (LeaveStreamOpen)
{
return new NonDisposingStream(_decompressionStream);
return NonDisposingStream.Create(_decompressionStream);
}
return _decompressionStream;
}
@@ -43,12 +43,38 @@ namespace SharpCompress.Common.Zip
{
_decompressionStream ??= GetCompressedStream();
_decompressionStream.Skip();
if (_decompressionStream is DeflateStream deflateStream)
if( Header.CompressionMethod != ZipCompressionMethod.None )
{
rewindableStream.Rewind(deflateStream.InputBuffer);
_decompressionStream.Skip();
if (_decompressionStream is DeflateStream deflateStream)
{
rewindableStream.Rewind(deflateStream.InputBuffer);
}
}
else
{
// We would need to search for the magic word
rewindableStream.Position -= 4;
var pos = rewindableStream.Position;
while( Utility.Find(rewindableStream, new byte[] { 0x50,0x4b,0x07,0x08 } ) )
{
// We should probably check CRC32 for positive matching as well
var size = rewindableStream.Position - pos;
var br = new BinaryReader(rewindableStream);
br.ReadUInt32();
br.ReadUInt32(); // CRC32
var compressed_size = br.ReadUInt32();
var uncompressed_size = br.ReadUInt32();
if (compressed_size == size && compressed_size == uncompressed_size )
{
rewindableStream.Position -= 16;
break;
}
rewindableStream.Position -= 12;
}
}
Skipped = true;
}
var reader = new BinaryReader(rewindableStream);
@@ -56,4 +82,4 @@ namespace SharpCompress.Common.Zip
return reader;
}
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Buffers.Binary;
using System.IO;
using System.Linq;
@@ -37,7 +37,7 @@ namespace SharpCompress.Common.Zip
Stream decompressionStream = CreateDecompressionStream(GetCryptoStream(CreateBaseStream()), Header.CompressionMethod);
if (LeaveStreamOpen)
{
return new NonDisposingStream(decompressionStream);
return NonDisposingStream.Create(decompressionStream);
}
return decompressionStream;
}
@@ -142,7 +142,7 @@ namespace SharpCompress.Common.Zip
&& FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor))
|| Header.IsZip64)
{
plainStream = new NonDisposingStream(plainStream); //make sure AES doesn't close
plainStream = NonDisposingStream.Create(plainStream); //make sure AES doesn't close
}
else
{
@@ -174,7 +174,7 @@ namespace SharpCompress.Common.Zip
default:
{
throw new ArgumentOutOfRangeException();
throw new InvalidOperationException("Header.CompressionMethod is invalid");
}
}

View File

@@ -1,15 +1,15 @@
using System.IO;
using System.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common.Zip
{
public class ZipVolume : Volume
{
public ZipVolume(Stream stream, ReaderOptions readerOptions)
: base(stream, readerOptions)
public ZipVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
: base(stream, readerOptions, index)
{
}
public string? Comment { get; internal set; }
}
}
}

View File

@@ -83,7 +83,7 @@ namespace SharpCompress.Compressors.BZip2
stream.SetLength(value);
}
#if !NET461 && !NETSTANDARD2_0
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override int Read(Span<byte> buffer)
{
@@ -123,4 +123,4 @@ namespace SharpCompress.Compressors.BZip2
return true;
}
}
}
}

View File

@@ -1,20 +1,20 @@
// Zlib.cs
// ------------------------------------------------------------------
//
// Copyright (c) 2009 Dino Chiesa and Microsoft Corporation.
// Copyright (c) 2009 Dino Chiesa and Microsoft Corporation.
// All rights reserved.
//
// This code module is part of DotNetZip, a zipfile class library.
//
// ------------------------------------------------------------------
//
// This code is licensed under the Microsoft Public License.
// This code is licensed under the Microsoft Public License.
// See the file License.txt for the license details.
// More info on: http://dotnetzip.codeplex.com
//
// ------------------------------------------------------------------
//
// last saved (in emacs):
// last saved (in emacs):
// Time-stamp: <2009-November-07 05:26:55>
//
// ------------------------------------------------------------------
@@ -27,22 +27,22 @@
// included below.
//
// ------------------------------------------------------------------
//
//
// Copyright (c) 2000,2001,2002,2003 ymnk, JCraft,Inc. All rights reserved.
//
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in
// the documentation and/or other materials provided with the distribution.
//
//
// 3. The names of the authors may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
//
// THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES,
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
// FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JCRAFT,
@@ -53,7 +53,7 @@
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
//
// -----------------------------------------------------------------------
//
// This program is based on zlib-1.1.3; credit to authors
@@ -82,7 +82,7 @@ namespace SharpCompress.Compressors.Deflate
/// <summary>
/// Same as None.
/// </summary>
Level0 = 0,
Level0 = None,
/// <summary>
/// The fastest but least effective compression.
@@ -92,7 +92,7 @@ namespace SharpCompress.Compressors.Deflate
/// <summary>
/// A synonym for BestSpeed.
/// </summary>
Level1 = 1,
Level1 = BestSpeed,
/// <summary>
/// A little slower, but better, than level 1.
@@ -115,14 +115,14 @@ namespace SharpCompress.Compressors.Deflate
Level5 = 5,
/// <summary>
/// The default compression level, with a good balance of speed and compression efficiency.
/// The default compression level, with a good balance of speed and compression efficiency.
/// </summary>
Default = 6,
/// <summary>
/// A synonym for Default.
/// </summary>
Level6 = 6,
Level6 = Default,
/// <summary>
/// Pretty good compression!
@@ -135,7 +135,7 @@ namespace SharpCompress.Compressors.Deflate
Level8 = 8,
/// <summary>
/// The "best" compression, where best means greatest reduction in size of the input data stream.
/// The "best" compression, where best means greatest reduction in size of the input data stream.
/// This is also the slowest compression.
/// </summary>
BestCompression = 9,
@@ -143,7 +143,7 @@ namespace SharpCompress.Compressors.Deflate
/// <summary>
/// A synonym for BestCompression.
/// </summary>
Level9 = 9
Level9 = BestCompression
}
/// <summary>
@@ -154,7 +154,7 @@ namespace SharpCompress.Compressors.Deflate
public enum CompressionStrategy
{
/// <summary>
/// The default strategy is probably the best for normal data.
/// The default strategy is probably the best for normal data.
/// </summary>
Default = 0,
@@ -181,7 +181,7 @@ namespace SharpCompress.Compressors.Deflate
{
/// <summary>
/// The ZlibException class captures exception information generated
/// by the Zlib library.
/// by the Zlib library.
/// </summary>
public ZlibException()
{
@@ -233,7 +233,7 @@ namespace SharpCompress.Compressors.Deflate
/// <param name="target">Contains the array of characteres read from the source TextReader.</param>
/// <param name="start">The starting index of the target array.</param>
/// <param name="count">The maximum number of characters to read from the source TextReader.</param>
///
///
/// <returns>
/// The number of characters read. The number will be less than or equal to
/// count depending on the data available in the source TextReader. Returns -1
@@ -405,4 +405,4 @@ namespace SharpCompress.Compressors.Deflate
BitLengths = new StaticTree(null, extra_blbits, 0, InternalConstants.BL_CODES, InternalConstants.MAX_BL_BITS);
}
}
}
}

View File

@@ -502,13 +502,37 @@ namespace SharpCompress.Compressors.Deflate
throw new ZlibException("Cannot Read after Writing.");
}
int rc = 0;
// set up the output of the deflate/inflate codec:
_z.OutputBuffer = buffer;
_z.NextOut = offset;
_z.AvailableBytesOut = count;
if (count == 0)
{
return 0;
}
if (nomoreinput && _wantCompress)
{
return 0; // workitem 8557
// no more input data available; therefore we flush to
// try to complete the read
rc = _z.Deflate(FlushType.Finish);
if (rc != ZlibConstants.Z_OK && rc != ZlibConstants.Z_STREAM_END)
{
throw new ZlibException(String.Format("Deflating: rc={0} msg={1}", rc, _z.Message));
}
rc = (count - _z.AvailableBytesOut);
// calculate CRC after reading
if (crc != null)
{
crc.SlurpBlock(buffer, offset, rc);
}
return rc;
}
if (buffer is null)
{
@@ -527,13 +551,6 @@ namespace SharpCompress.Compressors.Deflate
throw new ArgumentOutOfRangeException(nameof(count));
}
int rc = 0;
// set up the output of the deflate/inflate codec:
_z.OutputBuffer = buffer;
_z.NextOut = offset;
_z.AvailableBytesOut = count;
// This is necessary in case _workingBuffer has been resized. (new byte[])
// (The first reference to _workingBuffer goes through the private accessor which
// may initialize it.)

View File

@@ -0,0 +1,297 @@
/*
* BranchExecFilter.cs -- Converters for executable
* <Contribution by Louis-Michel Bergeron, on behalf of aDolus Technolog Inc., 2022>
* @TODO Encoding
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SharpCompress.Compressors.Filters
{
[CLSCompliant(false)]
public sealed class BranchExecFilter
{
public enum Alignment : int
{
ARCH_x86_ALIGNMENT = 1,
ARCH_PowerPC_ALIGNMENT = 4,
ARCH_IA64_ALIGNMENT = 16,
ARCH_ARM_ALIGNMENT = 4,
ARCH_ARMTHUMB_ALIGNMENT = 2,
ARCH_SPARC_ALIGNMENT = 4,
}
public static void X86Converter(byte[] data, UInt32 ip, ref UInt32 state) {
long i = 0;
long size = data.Length;
UInt32 pos = 0;
UInt32 mask = state & 7;
if (size < 5)
return;
size -= 4;
ip += 5;
for (;;)
{
i = pos;
for (; i < size; i++)
{
if ((data[i] & 0xFE) == 0xE8)
{
break;
}
}
UInt32 d = (UInt32)(i) - pos;
pos = (UInt32)i;
if (i >= size)
{
state = (d > 2 ? 0 : mask >> (int)d);
return;
}
if (d > 2)
{
mask = 0;
}
else
{
mask >>= (int)d;
if (mask != 0 && (mask > 4 || mask == 3 || (((((data[(UInt32)(mask >> 1) + 1])) + 1) & 0xFE) == 0) ))
{
mask = (mask >> 1) | 4;
pos++;
continue;
}
}
if ((((data[i + 4]) + 1) & 0xFE) == 0)
{
UInt32 inst = ((UInt32)data[i + 4] << 24) | ((UInt32)data[i + 3] << 16) | ((UInt32)data[i + 2] << 8) | ((UInt32)data[i + 1]);
UInt32 cur = ip + (UInt32)pos;
pos += 5;
inst -= cur;
if (mask != 0)
{
UInt32 sh = (mask & 6) << 2;
if (((((((Byte)(inst >> (int)sh))) + 1) & 0xFE) == 0))
{
inst ^= (((UInt32)0x100 << (int)sh) - 1);
inst -= cur;
}
mask = 0;
}
data[i + 1] = (Byte)inst;
data[i + 2] = (Byte)(inst >> 8);
data[i + 3] = (Byte)(inst >> 16);
data[i + 4] = (Byte)(0 - ((inst >> 24) & 1));
}
else
{
mask = (mask >> 1) | 4;
pos++;
}
}
}
public static void PowerPCConverter(byte[] data, UInt32 ip)
{
long i = 0;
long size = data.Length;
size &= ~(UInt32)3;
ip -= 4;
for (;; ) // infinite loop
{
for (;; ) // infinite loop
{
if (i >= size)
return;
i += 4;
if ((data[i - 4] & 0xFC) == 0x48 && (data[i - 1] & 3) == 1)
break;
}
{
UInt32 inst = BitConverter.ToUInt32(data, (int)i - 4);
if (BitConverter.IsLittleEndian)
{
inst = Utility.SwapUINT32(inst);
}
inst -= (UInt32)(ip + i);
inst &= 0x03FFFFFF;
inst |= 0x48000000;
Utility.SetBigUInt32(ref data, inst, (i - 4));
}
}
}
public static void ARMConverter(byte[] data, UInt32 ip)
{
long i = 0;
long size = data.Length;
size &= ~(UInt32)3;
ip += 4;
for (;;) // infinite loop
{
for (;;) // infinite loop
{
if (i >= size)
{
return;
}
i += 4;
if (data[i - 1] == 0xEB)
break;
}
UInt32 inst = BitConverter.ToUInt32(data, (int)i - 4);
inst <<= 2;
inst -= (UInt32)(ip + i);
inst >>= 2;
inst &= 0x00FFFFFF;
inst |= 0xEB000000;
Utility.SetLittleUInt32(ref data, inst, i - 4);
}
}
public static void ARMTConverter(byte[] data, UInt32 ip)
{
long i = 0;
long size = data.Length;
size &= ~(UInt32)1;
long lim = size - 4;
for (;;)
{
UInt32 b1;
for (;;)
{
UInt32 b3;
if (i > lim)
return;
b1 = data[i + 1];
b3 = data[i + 3];
i += 2;
b1 ^= 8;
if ((b3 & b1) >= 0xF8)
break;
}
UInt32 inst = ((UInt32)b1 << 19)
+ (((UInt32)data[i + 1] & 0x7) << 8)
+ (((UInt32)data[i - 2] << 11))
+ (data[i]);
i += 2;
UInt32 cur = ((UInt32)(ip + i)) >> 1;
inst -= cur;
data[i - 4] = (Byte)(inst >> 11);
data[i - 3] = (Byte)(0xF0 | ((inst >> 19) & 0x7));
data[i - 2] = (Byte)inst;
data[i - 1] = (Byte)(0xF8 | (inst >> 8));
}
}
public static void IA64Converter(byte[] data, UInt32 ip)
{
UInt32 i = 0;
long size = data.Length;
if (size < 16)
throw new InvalidDataException("Unexpected data size");
size -= 16;
do
{
UInt32 m = ((UInt32)0x334B0000 >> (data[i] & 0x1E)) & 3;
if (m != 0)
{
m++;
do
{
UInt32 iterator = (UInt32)( (i + (m * 5) - 8));
if (((data[iterator + 3] >> (int)m) & 15) == 5
&& (((data[iterator - 1] | ((UInt32)data[iterator] << 8)) >> (int)m) & 0x70) == 0)
{
UInt32 raw = BitConverter.ToUInt32(data, (int)iterator);
UInt32 inst = raw >> (int)m;
inst = (inst & 0xFFFFF) | ((inst & (1 << 23)) >> 3);
inst <<= 4;
inst -= (ip + (UInt32)i);
inst >>= 4;
inst &= 0x1FFFFF;
inst += 0x700000;
inst &= 0x8FFFFF;
raw &= ~((UInt32)0x8FFFFF << (int)m);
raw |= (inst << (int)m);
Utility.SetLittleUInt32(ref data, raw, iterator);
}
}
while (++m <= 4);
}
i += 16;
}
while (i <= size);
return;
}
public static void SPARCConverter(byte[] data, UInt32 ip)
{
long i = 0;
long size = data.Length;
size &= ~(UInt32)3;
ip -= 4;
for (;;) // infinite loop
{
for (;;) // infinite loop
{
if (i >= size)
return;
i += 4;
if ((data[i - 4] == 0x40 && (data[i - 3] & 0xC0) == 0) ||
(data[i - 4] == 0x7F && (data[i - 3] >= 0xC0)))
break;
}
UInt32 inst = BitConverter.ToUInt32(data, (int)i - 4);
if (BitConverter.IsLittleEndian)
{
inst = Utility.SwapUINT32(inst);
}
inst <<= 2;
inst -= (UInt32)(ip + i);
inst &= 0x01FFFFFF;
inst -= (UInt32)1 << 24;
inst ^= 0xFF000000;
inst >>= 2;
inst |= 0x40000000;
Utility.SetBigUInt32(ref data, inst, (i - 4));
}
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.IO;
using System.Security.Cryptography;
using System.Text;
@@ -20,6 +20,11 @@ namespace SharpCompress.Compressors.LZMA
public AesDecoderStream(Stream input, byte[] info, IPasswordProvider pass, long limit)
{
if (pass.CryptoGetTextPassword() == null)
{
throw new SharpCompress.Common.CryptographicException("Encrypted 7Zip archive has no password specified.");
}
mStream = input;
mLimit = limit;
@@ -143,8 +148,8 @@ namespace SharpCompress.Compressors.LZMA
if ((bt & 0xC0) == 0)
{
salt = new byte[0];
iv = new byte[0];
salt = Array.Empty<byte>();
iv = Array.Empty<byte>();
return;
}

View File

@@ -103,7 +103,7 @@ namespace SharpCompress.Compressors.LZMA.LZ
{
if (historySize > K_MAX_VAL_FOR_NORMALIZE - 256)
{
throw new Exception();
throw new ArgumentOutOfRangeException(nameof(historySize));
}
_cutValue = 16 + (matchMaxLen >> 1);
@@ -423,4 +423,4 @@ namespace SharpCompress.Compressors.LZMA.LZ
_cutValue = cutValue;
}
}
}
}

View File

@@ -118,7 +118,7 @@ namespace SharpCompress.Compressors.LZMA
public override void SetLength(long value) => throw new NotImplementedException();
#if !NET461 && !NETSTANDARD2_0
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override int Read(Span<byte> buffer)
{

View File

@@ -297,14 +297,6 @@ namespace SharpCompress.Compressors.LZMA
_outWindow.ReleaseStream();
rangeDecoder.ReleaseStream();
if (!rangeDecoder.IsFinished || (inSize > 0 && rangeDecoder._total != inSize))
{
throw new DataErrorException();
}
if (_outWindow.HasPending)
{
throw new DataErrorException();
}
_outWindow = null;
}
@@ -480,4 +472,4 @@ namespace SharpCompress.Compressors.LZMA
public override void SetLength(long value) {}
*/
}
}
}

View File

@@ -1649,7 +1649,7 @@ namespace SharpCompress.Compressors.LZMA
{
for (int m = 0; m < K_MATCH_FINDER_I_DS.Length; m++)
{
if (s == K_MATCH_FINDER_I_DS[m])
if (string.Equals(s, K_MATCH_FINDER_I_DS[m], StringComparison.OrdinalIgnoreCase))
{
return m;
}
@@ -1696,7 +1696,7 @@ namespace SharpCompress.Compressors.LZMA
throw new InvalidParamException();
}
EMatchFinderType matchFinderIndexPrev = _matchFinderType;
int m = FindMatchFinder(((string)prop).ToUpper());
int m = FindMatchFinder(((string)prop));
if (m < 0)
{
throw new InvalidParamException();

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Diagnostics;
using System.IO;
@@ -21,16 +21,17 @@ namespace SharpCompress.Compressors.LZMA.Utilites
protected override void Dispose(bool disposing)
{
if (_mCurrentCrc != _mExpectedCrc)
{
throw new InvalidOperationException();
}
//Nanook - is not equal here - _mCurrentCrc is yet to be negated
//if (_mCurrentCrc != _mExpectedCrc)
//{
// throw new InvalidOperationException();
//}
try
{
if (disposing && !_mClosed)
{
_mClosed = true;
_mCurrentCrc = Crc.Finish(_mCurrentCrc);
_mCurrentCrc = Crc.Finish(_mCurrentCrc); //now becomes equal
#if DEBUG
if (_mCurrentCrc == _mExpectedCrc)
{
@@ -54,6 +55,10 @@ namespace SharpCompress.Compressors.LZMA.Utilites
}
Debug.WriteLine("entropy: " + (int)(entropy * 100) + "%");
#endif
if (_mCurrentCrc != _mExpectedCrc) //moved test to here
{
throw new InvalidOperationException();
}
}
}
finally
@@ -102,4 +107,4 @@ namespace SharpCompress.Compressors.LZMA.Utilites
_mCurrentCrc = Crc.Update(_mCurrentCrc, buffer, offset, count);
}
}
}
}

View File

@@ -50,7 +50,7 @@ namespace SharpCompress.Compressors.LZMA.Utilites
Debugger.Break();
}
throw new Exception("Assertion failed.");
throw new InvalidOperationException("Assertion failed.");
}
}
@@ -89,4 +89,4 @@ namespace SharpCompress.Compressors.LZMA.Utilites
}
}
}
}
}

View File

@@ -83,7 +83,7 @@ namespace SharpCompress.Compressors.PPMd.H
internal void AriDecNormalize()
{
// while ((low ^ (low + range)) < TOP || range < BOT && ((range = -low & (BOT - 1)) != 0 ? true : true))
// while ((low ^ (low + range)) < TOP || range < BOT && ((range = -low & (BOT - 1)) != 0 ? true : true))
// {
// code = ((code << 8) | unpackRead.getChar()&0xff)&uintMask;
// range = (range << 8)&uintMask;
@@ -118,7 +118,7 @@ namespace SharpCompress.Compressors.PPMd.H
buffer.Append(_range);
buffer.Append("\n subrange=");
buffer.Append(SubRange);
buffer.Append("]");
buffer.Append(']');
return buffer.ToString();
}
}
@@ -150,8 +150,8 @@ namespace SharpCompress.Compressors.PPMd.H
buffer.Append(_highCount);
buffer.Append("\n scale=");
buffer.Append(_scale);
buffer.Append("]");
buffer.Append(']');
return buffer.ToString();
}
}
}
}

View File

@@ -1,4 +1,4 @@
#nullable disable
#nullable disable
using System;
using System.IO;
@@ -23,6 +23,7 @@ namespace SharpCompress.Compressors.Rar
private int outCount;
private int outTotal;
private bool isDisposed;
private long _position;
public RarStream(IRarUnpack unpack, FileHeader fileHeader, Stream readStream)
{
@@ -32,6 +33,7 @@ namespace SharpCompress.Compressors.Rar
fetch = true;
unpack.DoUnpack(fileHeader, readStream, this);
fetch = false;
_position = 0;
}
protected override void Dispose(bool disposing)
@@ -56,7 +58,8 @@ namespace SharpCompress.Compressors.Rar
public override long Length => fileHeader.UncompressedSize;
public override long Position { get => fileHeader.UncompressedSize - unpack.DestSize; set => throw new NotSupportedException(); }
//commented out code always returned the length of the file
public override long Position { get => _position; /* fileHeader.UncompressedSize - unpack.DestSize;*/ set => throw new NotSupportedException(); }
public override int Read(byte[] buffer, int offset, int count)
{
@@ -80,6 +83,7 @@ namespace SharpCompress.Compressors.Rar
unpack.DoUnpack();
fetch = false;
}
_position += (long)outTotal;
return outTotal;
}
@@ -129,4 +133,4 @@ namespace SharpCompress.Compressors.Rar
}
}
}
}
}

View File

@@ -0,0 +1,68 @@
/*
* ArmFilter.cs -- XZ converter ARM executable
* <Contribution by Louis-Michel Bergeron, on behalf of aDolus Technolog Inc., 2022>
* @TODO Properties offset
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Compressors.Filters;
namespace SharpCompress.Compressors.Xz.Filters
{
internal class ArmFilter : BlockFilter
{
public override bool AllowAsLast => false;
public override bool AllowAsNonLast => true;
public override bool ChangesDataSize => false;
private UInt32 _ip = 0;
//private UInt32 _offset = 0;
public override void Init(byte[] properties)
{
if (properties.Length != 0 && properties.Length != 4)
{
throw new InvalidDataException("ARM properties unexpected length");
}
if (properties.Length == 4)
{
// Even XZ doesn't support it.
throw new InvalidDataException("ARM properties offset is not supported");
//_offset = BitConverter.ToUInt32(properties, 0);
//
//if (_offset % (UInt32)BranchExec.Alignment.ARCH_ARM_ALIGNMENT != 0)
//{
// throw new InvalidDataException("Filter offset does not match alignment");
//}
}
}
public override void ValidateFilter()
{
}
public override int Read(byte[] buffer, int offset, int count)
{
int bytesRead = BaseStream.Read(buffer, offset, count);
BranchExecFilter.ARMConverter(buffer, _ip);
_ip += (UInt32)bytesRead;
return bytesRead;
}
public override void SetBaseStream(Stream stream)
{
BaseStream = stream;
}
}
}

View File

@@ -0,0 +1,69 @@
/*
* ArmFThumbFilter.cs -- XZ converter ARMThumb executable
* <Contribution by Louis-Michel Bergeron, on behalf of aDolus Technolog Inc., 2022>
* @TODO Properties offset
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Compressors.Filters;
namespace SharpCompress.Compressors.Xz.Filters
{
internal class ArmThumbFilter : BlockFilter
{
public override bool AllowAsLast => false;
public override bool AllowAsNonLast => true;
public override bool ChangesDataSize => false;
private UInt32 _ip = 0;
//private UInt32 _offset = 0;
public override void Init(byte[] properties)
{
if (properties.Length != 0 && properties.Length != 4)
{
throw new InvalidDataException("ARM Thumb properties unexpected length");
}
if (properties.Length == 4)
{
// Even XZ doesn't support it.
throw new InvalidDataException("ARM Thumb properties offset is not supported");
//_offset = BitConverter.ToUInt32(properties, 0);
//
//if (_offset % (UInt32)BranchExec.Alignment.ARCH_ARMTHUMB_ALIGNMENT != 0)
//{
// throw new InvalidDataException("Filter offset does not match alignment");
//}
}
}
public override void ValidateFilter()
{
}
public override int Read(byte[] buffer, int offset, int count)
{
int bytesRead = BaseStream.Read(buffer, offset, count);
BranchExecFilter.ARMTConverter(buffer, _ip);
_ip += (UInt32)bytesRead;
return bytesRead;
}
public override void SetBaseStream(Stream stream)
{
BaseStream = stream;
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
@@ -20,6 +20,12 @@ namespace SharpCompress.Compressors.Xz.Filters
private static readonly Dictionary<FilterTypes, Type> FilterMap = new Dictionary<FilterTypes, Type>
{
{FilterTypes.ARCH_x86_FILTER, typeof(X86Filter) },
{FilterTypes.ARCH_PowerPC_FILTER, typeof(PowerPCFilter) },
{FilterTypes.ARCH_IA64_FILTER, typeof(IA64Filter) },
{FilterTypes.ARCH_ARM_FILTER, typeof(ArmFilter) },
{FilterTypes.ARCH_ARMTHUMB_FILTER, typeof(ArmThumbFilter) },
{FilterTypes.ARCH_SPARC_FILTER, typeof(SparcFilter) },
{FilterTypes.LZMA2, typeof(Lzma2Filter) }
};

View File

@@ -0,0 +1,67 @@
/*
* IA64Filter.cs -- XZ converter IA64 executable
* <Contribution by Louis-Michel Bergeron, on behalf of aDolus Technolog Inc., 2022>
* @TODO Properties offset
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Compressors.Filters;
namespace SharpCompress.Compressors.Xz.Filters
{
internal class IA64Filter : BlockFilter
{
public override bool AllowAsLast => false;
public override bool AllowAsNonLast => true;
public override bool ChangesDataSize => false;
private UInt32 _ip = 0;
//private UInt32 _offset = 0;
public override void Init(byte[] properties)
{
if (properties.Length != 0 && properties.Length != 4)
{
throw new InvalidDataException("IA64 properties unexpected length");
}
if (properties.Length == 4)
{
// Even XZ doesn't support it.
throw new InvalidDataException("IA64 properties offset is not supported");
//_offset = BitConverter.ToUInt32(properties, 0);
//
//if (_offset % (UInt32)BranchExec.Alignment.ARCH_IA64_ALIGNMENT != 0)
//{
// throw new InvalidDataException("Filter offset does not match alignment");
//}
}
}
public override void ValidateFilter()
{
}
public override int Read(byte[] buffer, int offset, int count)
{
int bytesRead = BaseStream.Read(buffer, offset, count);
BranchExecFilter.IA64Converter(buffer, _ip);
_ip += (UInt32)bytesRead;
return bytesRead;
}
public override void SetBaseStream(Stream stream)
{
BaseStream = stream;
}
}
}

View File

@@ -0,0 +1,66 @@
/*
* PowerPCFilter.cs -- XZ converter PowerPC executable
* <Contribution by Louis-Michel Bergeron, on behalf of aDolus Technolog Inc., 2022>
* @TODO Properties offset
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Compressors.Filters;
namespace SharpCompress.Compressors.Xz.Filters
{
internal class PowerPCFilter : BlockFilter
{
public override bool AllowAsLast => false;
public override bool AllowAsNonLast => true;
public override bool ChangesDataSize => false;
private UInt32 _ip = 0;
//private UInt32 _offset = 0;
public override void Init(byte[] properties)
{
if (properties.Length != 0 && properties.Length != 4)
{
throw new InvalidDataException("PPC properties unexpected length");
}
if (properties.Length == 4)
{
// Even XZ doesn't support it.
throw new InvalidDataException("PPC properties offset is not supported");
//_offset = BitConverter.ToUInt32(properties, 0);
//
//if (_offset % (UInt32)BranchExec.Alignment.ARCH_PowerPC_ALIGNMENT != 0)
//{
// throw new InvalidDataException("Filter offset does not match alignment");
//}
}
}
public override void ValidateFilter() { }
public override int Read(byte[] buffer, int offset, int count)
{
int bytesRead = BaseStream.Read(buffer, offset, count);
BranchExecFilter.PowerPCConverter(buffer, _ip);
_ip += (UInt32)bytesRead;
return bytesRead;
}
public override void SetBaseStream(Stream stream)
{
BaseStream = stream;
}
}
}

View File

@@ -0,0 +1,65 @@
/*
* SparcFilter.cs -- XZ converter SPARC executable
* <Contribution by Louis-Michel Bergeron, on behalf of aDolus Technolog Inc., 2022>
* @TODO Properties offset
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Compressors.Filters;
namespace SharpCompress.Compressors.Xz.Filters
{
internal class SparcFilter : BlockFilter
{
public override bool AllowAsLast => false;
public override bool AllowAsNonLast => true;
public override bool ChangesDataSize => false;
private UInt32 _ip = 0;
//private UInt32 _offset = 0;
public override void Init(byte[] properties) {
if (properties.Length != 0 && properties.Length != 4)
{
throw new InvalidDataException("SPARC properties unexpected length");
}
if (properties.Length == 4)
{
// Even XZ doesn't support it.
throw new InvalidDataException("SPARC properties offset is not supported");
//_offset = BitConverter.ToUInt32(properties, 0);
//
//if (_offset % (UInt32)BranchExec.Alignment.ARCH_SPARC_ALIGNMENT != 0)
//{
// throw new InvalidDataException("Filter offset does not match alignment");
//}
}
}
public override void ValidateFilter() {
}
public override int Read(byte[] buffer, int offset, int count){
int bytesRead = BaseStream.Read(buffer, offset, count);
BranchExecFilter.SPARCConverter(buffer, _ip);
_ip += (UInt32)bytesRead;
return bytesRead;
}
public override void SetBaseStream(Stream stream) {
BaseStream = stream;
}
}
}

View File

@@ -0,0 +1,70 @@
/*
* X86Filter.cs -- XZ converter x86 executable
* <Contribution by Louis-Michel Bergeron, on behalf of aDolus Technolog Inc., 2022>
* @TODO Properties offset
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Compressors.Filters;
namespace SharpCompress.Compressors.Xz.Filters
{
internal class X86Filter : BlockFilter
{
public override bool AllowAsLast => false;
public override bool AllowAsNonLast => true;
public override bool ChangesDataSize => false;
private UInt32 _ip = 0;
private UInt32 _state = 0;
//private UInt32 _offset = 0;
public override void Init(byte[] properties)
{
if (properties.Length != 0 && properties.Length != 4)
{
throw new InvalidDataException("X86 properties unexpected length");
}
if (properties.Length == 4)
{
// Even XZ doesn't support it.
throw new InvalidDataException("X86 properties offset is not supported");
//_offset = BitConverter.ToUInt32(properties, 0);
//
//if (_offset % (UInt32)BranchExec.Alignment.ARCH_x86_ALIGNMENT != 0)
//{
// throw new InvalidDataException("Filter offset does not match alignment");
//}
}
}
public override void ValidateFilter()
{
}
public override int Read(byte[] buffer, int offset, int count)
{
int bytesRead = BaseStream.Read(buffer, offset, count);
BranchExecFilter.X86Converter(buffer, _ip, ref _state);
_ip += (UInt32)bytesRead;
return bytesRead;
}
public override void SetBaseStream(Stream stream)
{
BaseStream = stream;
}
}
}

View File

@@ -9,7 +9,7 @@ namespace SharpCompress.Compressors.Xz
{
if (MaxBytes <= 0)
{
throw new ArgumentOutOfRangeException();
throw new ArgumentOutOfRangeException(nameof(MaxBytes));
}
if (MaxBytes > 9)

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.IO;
using System.Linq;
using System.Text;
@@ -22,7 +22,7 @@ namespace SharpCompress.Compressors.Xz
public static XZFooter FromStream(Stream stream)
{
var footer = new XZFooter(new BinaryReader(new NonDisposingStream(stream), Encoding.UTF8));
var footer = new XZFooter(new BinaryReader(NonDisposingStream.Create(stream), Encoding.UTF8));
footer.Process();
return footer;
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.IO;
using System.Linq;
using System.Text;
@@ -21,7 +21,7 @@ namespace SharpCompress.Compressors.Xz
public static XZHeader FromStream(Stream stream)
{
var header = new XZHeader(new BinaryReader(new NonDisposingStream(stream), Encoding.UTF8));
var header = new XZHeader(new BinaryReader(NonDisposingStream.Create(stream), Encoding.UTF8));
header.Process();
return header;
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -30,7 +30,7 @@ namespace SharpCompress.Compressors.Xz
public static XZIndex FromStream(Stream stream, bool indexMarkerAlreadyVerified)
{
var index = new XZIndex(new BinaryReader(new NonDisposingStream(stream), Encoding.UTF8), indexMarkerAlreadyVerified);
var index = new XZIndex(new BinaryReader(NonDisposingStream.Create(stream), Encoding.UTF8), indexMarkerAlreadyVerified);
index.Process();
return index;
}

View File

@@ -42,7 +42,7 @@ namespace SharpCompress.Crypto
public override void SetLength(long value) => throw new NotSupportedException();
#if !NET461 && !NETSTANDARD2_0
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override void Write(ReadOnlySpan<byte> buffer)
{

View File

@@ -1,11 +1,20 @@
using System;
using System;
using System.IO;
namespace SharpCompress.IO
{
public class NonDisposingStream : Stream
{
public NonDisposingStream(Stream stream, bool throwOnDispose = false)
public static NonDisposingStream Create(Stream stream, bool throwOnDispose = false)
{
if (stream is NonDisposingStream nonDisposingStream && nonDisposingStream.ThrowOnDispose == throwOnDispose)
{
return nonDisposingStream;
}
return new NonDisposingStream(stream, throwOnDispose);
}
protected NonDisposingStream(Stream stream, bool throwOnDispose = false)
{
Stream = stream;
ThrowOnDispose = throwOnDispose;
@@ -58,7 +67,7 @@ namespace SharpCompress.IO
Stream.Write(buffer, offset, count);
}
#if !NET461 && !NETSTANDARD2_0
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override int Read(Span<byte> buffer)
{
@@ -72,4 +81,4 @@ namespace SharpCompress.IO
#endif
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.IO;
namespace SharpCompress.IO
@@ -65,6 +65,19 @@ namespace SharpCompress.IO
return value;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override int Read(Span<byte> buffer)
{
var slice_len = BytesLeftToRead < buffer.Length ? BytesLeftToRead : buffer.Length;
var read = Stream.Read(buffer.Slice(0,(int)slice_len));
if (read > 0)
{
BytesLeftToRead -= read;
}
return read;
}
#endif
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();
@@ -80,4 +93,4 @@ namespace SharpCompress.IO
throw new NotSupportedException();
}
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.IO;
namespace SharpCompress.IO
@@ -119,8 +119,10 @@ namespace SharpCompress.IO
int read;
if (isRewound && bufferStream.Position != bufferStream.Length)
{
read = bufferStream.Read(buffer, offset, count);
if (read < count)
// don't read more than left
int readCount = Math.Min(count, (int)(bufferStream.Length - bufferStream.Position));
read = bufferStream.Read(buffer, offset, readCount);
if (read < readCount)
{
int tempRead = stream.Read(buffer, offset + read, count - read);
if (IsRecording)
@@ -160,4 +162,4 @@ namespace SharpCompress.IO
throw new NotSupportedException();
}
}
}
}

View File

@@ -0,0 +1,216 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Readers;
namespace SharpCompress.IO
{
public class SourceStream : Stream
{
private long _prevSize;
private List<FileInfo> _files;
private List<Stream> _streams;
private Func<int, FileInfo?> _getFilePart;
private Func<int, Stream?> _getStreamPart;
private int _stream;
public SourceStream(FileInfo file, Func<int, FileInfo?> getPart, ReaderOptions options) : this(null, null, file, getPart, options)
{
}
public SourceStream(Stream stream, Func<int, Stream?> getPart, ReaderOptions options) : this(stream, getPart, null, null, options)
{
}
private SourceStream(Stream? stream, Func<int, Stream?>? getStreamPart, FileInfo? file, Func<int, FileInfo?>? getFilePart, ReaderOptions options)
{
this.ReaderOptions = options;
_files = new List<FileInfo>();
_streams = new List<Stream>();
IsFileMode = file != null;
IsVolumes = false;
if (!IsFileMode)
{
_streams.Add(stream!);
_getStreamPart = getStreamPart!;
_getFilePart = new Func<int, FileInfo>(a => null!);
if (stream! is FileStream)
_files.Add(new FileInfo(((FileStream)stream!).Name));
}
else
{
_files.Add(file!);
_streams.Add(_files[0].OpenRead());
_getFilePart = getFilePart!;
_getStreamPart = new Func<int, Stream>(a => null!);
}
_stream = 0;
_prevSize = 0;
}
public void LoadAllParts()
{
for (int i = 1; SetStream(i); i++)
{
}
SetStream(0);
}
public bool IsVolumes { get; set; }
public ReaderOptions ReaderOptions { get; }
public bool IsFileMode { get; }
public IEnumerable<FileInfo> Files => _files;
public IEnumerable<Stream> Streams => _streams;
private Stream Current => _streams[_stream];
public bool LoadStream(int index) //ensure all parts to id are loaded
{
while (_streams.Count <= index)
{
if (IsFileMode)
{
FileInfo? f = _getFilePart(_streams.Count);
if (f == null)
{
_stream = _streams.Count - 1;
return false;
}
//throw new Exception($"File part {idx} not available.");
_files.Add(f);
_streams.Add(_files.Last().OpenRead());
}
else
{
Stream? s = _getStreamPart(_streams.Count);
if (s == null)
{
_stream = _streams.Count - 1;
return false;
}
//throw new Exception($"Stream part {idx} not available.");
_streams.Add(s);
if (s is FileStream)
_files.Add(new FileInfo(((FileStream)s).Name));
}
}
return true;
}
public bool SetStream(int idx) //allow caller to switch part in multipart
{
if (LoadStream(idx))
_stream = idx;
return _stream == idx;
}
public override bool CanRead => true;
public override bool CanSeek => true;
public override bool CanWrite => false;
public override long Length => (!IsVolumes ? _streams.Sum(a => a.Length) : Current.Length);
public override long Position
{
get => _prevSize + Current.Position; //_prevSize is 0 for multivolume
set => Seek(value, SeekOrigin.Begin);
}
public override void Flush()
{
Current.Flush();
}
public override int Read(byte[] buffer, int offset, int count)
{
if (count <= 0)
return 0;
int total = count;
int r = -1;
while (count != 0 && r != 0)
{
r = Current.Read(buffer, offset, (int)Math.Min(count, Current.Length - Current.Position));
count -= r;
offset += r;
if (!IsVolumes && count != 0 && Current.Position == Current.Length)
{
_prevSize += Current.Length;
if (!SetStream(_stream + 1)) //will load next file if present
break;
Current.Seek(0, SeekOrigin.Begin);
}
}
return total - count;
}
public override long Seek(long offset, SeekOrigin origin)
{
long pos = this.Position;
switch (origin)
{
case SeekOrigin.Begin: pos = offset; break;
case SeekOrigin.Current: pos += offset; break;
case SeekOrigin.End: pos = Length + offset; break;
}
_prevSize = 0;
if (!IsVolumes)
{
SetStream(0);
while (_prevSize + Current.Length < pos)
{
_prevSize += Current.Length;
SetStream(_stream + 1);
}
}
if (pos != _prevSize + Current.Position)
Current.Seek(pos - _prevSize, SeekOrigin.Begin);
return pos;
}
public override void SetLength(long value)
{
throw new NotImplementedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotImplementedException();
}
public override void Close()
{
if (this.IsFileMode || !this.ReaderOptions.LeaveStreamOpen) //close if file mode or options specify it
{
foreach (Stream stream in _streams)
{
try
{
if (stream != null)
stream.Dispose();
}
catch { }
}
_streams.Clear();
_files.Clear();
}
}
protected override void Dispose(bool disposing)
{
this.Close();
base.Dispose(disposing);
}
}
}

View File

@@ -1,4 +1,4 @@
#if NET461 || NETSTANDARD2_0
#if NETFRAMEWORK || NETSTANDARD2_0
using System;
using System.Buffers;

View File

@@ -1,4 +1,4 @@
#if NET461 || NETSTANDARD2_0
#if NETFRAMEWORK || NETSTANDARD2_0
namespace SharpCompress
{

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -38,7 +38,7 @@ namespace SharpCompress.Readers
public abstract TVolume Volume { get; }
/// <summary>
/// Current file entry
/// Current file entry
/// </summary>
public TEntry Entry => entriesForCurrentReadStream!.Current;
@@ -131,12 +131,13 @@ namespace SharpCompress.Readers
private void Skip()
{
var part = Entry.Parts.First();
if (ArchiveType != ArchiveType.Rar
&& !Entry.IsSolid
&& Entry.CompressedSize > 0)
{
//not solid and has a known compressed size then we can skip raw bytes.
var part = Entry.Parts.First();
var rawStream = part.GetRawStream();
if (rawStream != null)
@@ -150,7 +151,7 @@ namespace SharpCompress.Readers
//don't know the size so we have to try to decompress to skip
using (var s = OpenEntryStream())
{
s.Skip();
s.SkipEntry();
}
}
@@ -160,9 +161,14 @@ namespace SharpCompress.Readers
{
throw new ArgumentException("WriteEntryTo or OpenEntryStream can only be called once.");
}
if ((writableStream is null) || (!writableStream.CanWrite))
if (writableStream is null)
{
throw new ArgumentNullException("A writable Stream was required. Use Cancel if that was intended.");
throw new ArgumentNullException(nameof(writableStream));
}
if (!writableStream.CanWrite)
{
throw new ArgumentException("A writable Stream was required. Use Cancel if that was intended.");
}
Write(writableStream);
@@ -228,4 +234,4 @@ namespace SharpCompress.Readers
EntryExtractionProgress?.Invoke(this, new ReaderExtractionEventArgs<IEntry>(entry, new ReaderProgress(entry, bytesTransferred, iterations)));
}
}
}
}

View File

@@ -1,4 +1,4 @@
using System.IO;
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
@@ -6,8 +6,8 @@ namespace SharpCompress.Readers.Rar
{
internal class NonSeekableStreamFilePart : RarFilePart
{
internal NonSeekableStreamFilePart(MarkHeader mh, FileHeader fh)
: base(mh, fh)
internal NonSeekableStreamFilePart(MarkHeader mh, FileHeader fh, int index = 0)
: base(mh, fh, index)
{
}
@@ -18,4 +18,4 @@ namespace SharpCompress.Readers.Rar
internal override string FilePartName => "Unknown Stream - File Entry: " + FileHeader.FileName;
}
}
}

View File

@@ -1,4 +1,4 @@
using System.Collections.Generic;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
@@ -8,14 +8,14 @@ namespace SharpCompress.Readers.Rar
{
public class RarReaderVolume : RarVolume
{
internal RarReaderVolume(Stream stream, ReaderOptions options)
: base(StreamingMode.Streaming, stream, options)
internal RarReaderVolume(Stream stream, ReaderOptions options, int index = 0)
: base(StreamingMode.Streaming, stream, options, index)
{
}
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader)
{
return new NonSeekableStreamFilePart(markHeader, fileHeader);
return new NonSeekableStreamFilePart(markHeader, fileHeader, this.Index);
}
internal override IEnumerable<RarFilePart> ReadFileParts()
@@ -23,4 +23,4 @@ namespace SharpCompress.Readers.Rar
return GetVolumeFileParts();
}
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.IO;
using SharpCompress.Archives.GZip;
using SharpCompress.Archives.Rar;
@@ -58,7 +58,7 @@ namespace SharpCompress.Readers
if (BZip2Stream.IsBZip2(rewindableStream))
{
rewindableStream.Rewind(false);
BZip2Stream testStream = new BZip2Stream(new NonDisposingStream(rewindableStream), CompressionMode.Decompress, false);
BZip2Stream testStream = new BZip2Stream(NonDisposingStream.Create(rewindableStream), CompressionMode.Decompress, false);
if (TarArchive.IsTarFile(testStream))
{
rewindableStream.Rewind(true);
@@ -70,7 +70,7 @@ namespace SharpCompress.Readers
if (LZipStream.IsLZipFile(rewindableStream))
{
rewindableStream.Rewind(false);
LZipStream testStream = new LZipStream(new NonDisposingStream(rewindableStream), CompressionMode.Decompress);
LZipStream testStream = new LZipStream(NonDisposingStream.Create(rewindableStream), CompressionMode.Decompress);
if (TarArchive.IsTarFile(testStream))
{
rewindableStream.Rewind(true);

View File

@@ -2,11 +2,11 @@
<PropertyGroup>
<AssemblyTitle>SharpCompress - Pure C# Decompression/Compression</AssemblyTitle>
<NeutralLanguage>en-US</NeutralLanguage>
<VersionPrefix>0.29.0</VersionPrefix>
<AssemblyVersion>0.29.0</AssemblyVersion>
<FileVersion>0.29.0</FileVersion>
<VersionPrefix>0.32.2</VersionPrefix>
<AssemblyVersion>0.32.2</AssemblyVersion>
<FileVersion>0.32.2</FileVersion>
<Authors>Adam Hathcock</Authors>
<TargetFrameworks>netstandard2.0;netstandard2.1;netcoreapp3.1;net5.0</TargetFrameworks>
<TargetFrameworks>net461;netstandard2.0;netstandard2.1;netcoreapp3.1;net6.0</TargetFrameworks>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<AllowUnsafeBlocks>false</AllowUnsafeBlocks>
<AssemblyName>SharpCompress</AssemblyName>
@@ -15,28 +15,36 @@
<PackageId>SharpCompress</PackageId>
<PackageTags>rar;unrar;zip;unzip;bzip2;gzip;tar;7zip;lzip;xz</PackageTags>
<PackageProjectUrl>https://github.com/adamhathcock/sharpcompress</PackageProjectUrl>
<PackageLicense>https://github.com/adamhathcock/sharpcompress/blob/master/LICENSE.txt</PackageLicense>
<PackageLicenseFile>LICENSE.txt</PackageLicenseFile>
<GenerateAssemblyTitleAttribute>false</GenerateAssemblyTitleAttribute>
<GenerateAssemblyProductAttribute>false</GenerateAssemblyProductAttribute>
<Description>SharpCompress is a compression library for NET Standard 2.0/2.1/NET 5.0 that can unrar, decompress 7zip, decompress xz, zip/unzip, tar/untar lzip/unlzip, bzip2/unbzip2 and gzip/ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip is implemented.</Description>
<LangVersion>9</LangVersion>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
<PublishRepositoryUrl>true</PublishRepositoryUrl>
<IncludeSymbols>true</IncludeSymbols>
<SymbolPackageFormat>snupkg</SymbolPackageFormat>
<IsTrimmable>true</IsTrimmable>
<RunAnalyzersDuringLiveAnalysis>False</RunAnalyzersDuringLiveAnalysis>
<RunAnalyzersDuringBuild>False</RunAnalyzersDuringBuild>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.SourceLink.GitHub" Version="1.0.0" PrivateAssets="All" />
</ItemGroup>
<None Include="..\..\LICENSE.txt" Pack="true" Visible="false" PackagePath=""/>
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.SourceLink.GitHub" Version="1.1.1" PrivateAssets="All" />
</ItemGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'netstandard2.1' ">
<PackageReference Include="System.Text.Encoding.CodePages" Version="5.0.0" />
<PackageReference Include="System.Text.Encoding.CodePages" Version="6.0.0" />
</ItemGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'netstandard2.0' ">
<PackageReference Include="System.Text.Encoding.CodePages" Version="5.0.0" />
<PackageReference Include="System.Memory" Version="4.5.3" />
<PackageReference Include="System.Buffers" Version="4.4.0" />
<PackageReference Include="System.Text.Encoding.CodePages" Version="6.0.0" />
<PackageReference Include="System.Memory" Version="4.5.4" />
</ItemGroup>
<ItemGroup Condition=" '$(VersionlessImplicitFrameworkDefine)' == 'NETFRAMEWORK' ">
<PackageReference Include="System.Text.Encoding.CodePages" Version="6.0.0" />
<PackageReference Include="System.Memory" Version="4.5.4" />
</ItemGroup>
</Project>

View File

@@ -72,17 +72,17 @@ namespace SharpCompress
{
if (sourceIndex > Int32.MaxValue || sourceIndex < Int32.MinValue)
{
throw new ArgumentOutOfRangeException();
throw new ArgumentOutOfRangeException(nameof(sourceIndex));
}
if (destinationIndex > Int32.MaxValue || destinationIndex < Int32.MinValue)
{
throw new ArgumentOutOfRangeException();
throw new ArgumentOutOfRangeException(nameof(destinationIndex));
}
if (length > Int32.MaxValue || length < Int32.MinValue)
{
throw new ArgumentOutOfRangeException();
throw new ArgumentOutOfRangeException(nameof(length));
}
Array.Copy(sourceArray, (int)sourceIndex, destinationArray, (int)destinationIndex, (int)length);
@@ -165,6 +165,41 @@ namespace SharpCompress
}
}
public static bool Find(this Stream source, byte[] array)
{
byte[] buffer = GetTransferByteArray();
try
{
var pos = source.Position;
int count = 0;
var len = source.Read(buffer, 0, buffer.Length);
source.Position = pos + len;
do
{
for (int i = 0; i < len; i++)
{
if (array[count] == buffer[i])
{
count++;
if (count == array.Length)
{
source.Position = source.Position - len + i - array.Length +1;
return true;
}
}
}
}
while ((len = source.Read(buffer, 0, buffer.Length)) > 0);
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
}
return false;
}
public static DateTime DosDateToDateTime(UInt16 iDate, UInt16 iTime)
{
int year = iDate / 512 + 1980;
@@ -280,7 +315,7 @@ namespace SharpCompress
{
return ArrayPool<byte>.Shared.Rent(81920);
}
public static bool ReadFully(this Stream stream, byte[] buffer)
{
int total = 0;
@@ -295,7 +330,7 @@ namespace SharpCompress
}
return (total >= buffer.Length);
}
public static bool ReadFully(this Stream stream, Span<byte> buffer)
{
int total = 0;
@@ -315,5 +350,46 @@ namespace SharpCompress
{
return source.Replace('\0', ' ').Trim();
}
/// <summary>
/// Swap the endianness of a UINT32
/// </summary>
/// <param name="number">The UINT32 you want to swap his endianness</param>
/// <returns>Return the new UINT32 in the other endianness format</returns>
public static UInt32 SwapUINT32(UInt32 number)
{
return (number >> 24) |
((number << 8) & 0x00FF0000) |
((number >> 8) & 0x0000FF00) |
(number << 24);
}
/// <summary>
/// Insert a little endian UINT32 into a byte array
/// </summary>
/// <param name="buffer">The buffer to insert into</param>
/// <param name="number">The UINT32 to insert</param>
/// <param name="offset">Offset of the buffer to insert into</param>
public static void SetLittleUInt32(ref byte[] buffer, UInt32 number, long offset)
{
buffer[offset] = (byte)(number);
buffer[offset + 1] = (byte)(number >> 8);
buffer[offset + 2] = (byte)(number >> 16);
buffer[offset + 3] = (byte)(number >> 24);
}
/// <summary>
/// Insert a big endian UINT32 into a byte array
/// </summary>
/// <param name="buffer">The buffer to insert into</param>
/// <param name="number">The UINT32 to insert</param>
/// <param name="offset">Offset of the buffer to insert into</param>
public static void SetBigUInt32(ref byte[] buffer, UInt32 number, long offset)
{
buffer[offset] = (byte)(number >> 24);
buffer[offset + 1] = (byte)(number >> 16);
buffer[offset + 2] = (byte)(number >> 8);
buffer[offset + 3] = (byte)number;
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Compressors;
@@ -16,7 +16,7 @@ namespace SharpCompress.Writers.GZip
{
if (WriterOptions.LeaveStreamOpen)
{
destination = new NonDisposingStream(destination);
destination = NonDisposingStream.Create(destination);
}
InitalizeStream(new GZipStream(destination, CompressionMode.Compress,
options?.CompressionLevel ?? CompressionLevel.Default,
@@ -46,4 +46,4 @@ namespace SharpCompress.Writers.GZip
_wroteToStream = true;
}
}
}
}

View File

@@ -37,7 +37,7 @@ namespace SharpCompress.Writers
public static void WriteAll(this IWriter writer,
string directory,
string searchPattern = "*",
Expression<Func<string, bool>>? fileSearchFunc = null,
Func<string, bool>? fileSearchFunc = null,
SearchOption option = SearchOption.TopDirectoryOnly)
{
if (!Directory.Exists(directory))
@@ -49,10 +49,10 @@ namespace SharpCompress.Writers
{
fileSearchFunc = n => true;
}
foreach (var file in Directory.EnumerateFiles(directory, searchPattern, option).Where(fileSearchFunc.Compile()))
foreach (var file in Directory.EnumerateFiles(directory, searchPattern, option).Where(fileSearchFunc))
{
writer.Write(file.Substring(directory.Length), file);
}
}
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Common.Tar.Headers;
@@ -25,7 +25,7 @@ namespace SharpCompress.Writers.Tar
}
if (WriterOptions.LeaveStreamOpen)
{
destination = new NonDisposingStream(destination);
destination = NonDisposingStream.Create(destination);
}
switch (options.CompressionType)
{

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Buffers.Binary;
using System.IO;
using System.Text;
@@ -98,10 +98,24 @@ namespace SharpCompress.Writers.Zip
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 0);
outputStream.Write(intBuf.Slice(0, 2)); // disk=0
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)flags);
outputStream.Write(intBuf.Slice(0, 2)); // file type: binary
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)flags);
outputStream.Write(intBuf.Slice(0, 2)); // Internal file attributes
// Internal file attributes:
// Bit 0: apparent ASCII/ text file
// Bit 1: reserved
// Bit 2: control field records precede logical records
// Bits 3 - 16: unused
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 0);
outputStream.Write(intBuf.Slice(0, 2)); // file type: binary, Internal file attributes
// External flags are host-dependent, this might match DOS
// Bit 0: Read-Only
// Bit 1: Hidden
// Bit 2: System
// Bit 3: Label
// Bit 4: Directory
// Bit 5: Archive
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 0);
outputStream.Write(intBuf.Slice(0, 2)); // External file attributes
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 0x8100);
outputStream.Write(intBuf.Slice(0, 2));

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
@@ -40,7 +40,7 @@ namespace SharpCompress.Writers.Zip
if (WriterOptions.LeaveStreamOpen)
{
destination = new NonDisposingStream(destination);
destination = NonDisposingStream.Create(destination);
}
InitalizeStream(destination);
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -12,6 +12,15 @@ namespace SharpCompress.Test
{
public class ArchiveTests : ReaderTests
{
protected void ArchiveGetParts(IEnumerable<string> testArchives)
{
string[] arcs = testArchives.Select(a => Path.Combine(TEST_ARCHIVES_PATH, a)).ToArray();
string[] found = ArchiveFactory.GetFileParts(arcs[0]).ToArray();
Assert.Equal(arcs.Length, found.Length);
for (int i = 0; i < arcs.Length; i++)
Assert.Equal(arcs[i], found[i]);
}
protected void ArchiveStreamReadExtractAll(string testArchive, CompressionType compression)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
@@ -23,31 +32,42 @@ namespace SharpCompress.Test
{
foreach (var path in testArchives)
{
using (var stream = new NonDisposingStream(File.OpenRead(path), true))
using (var archive = ArchiveFactory.Open(stream))
using (var stream = NonDisposingStream.Create(File.OpenRead(path), true))
{
Assert.True(archive.IsSolid);
using (var reader = archive.ExtractAllEntries())
try
{
UseReader(reader, compression);
}
VerifyFiles();
using (var archive = ArchiveFactory.Open(stream))
{
Assert.True(archive.IsSolid);
using (var reader = archive.ExtractAllEntries())
{
UseReader(reader, compression);
}
VerifyFiles();
if (archive.Entries.First().CompressionType == CompressionType.Rar)
if (archive.Entries.First().CompressionType == CompressionType.Rar)
{
stream.ThrowOnDispose = false;
return;
}
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(SCRATCH_FILES_PATH,
new ExtractionOptions
{
ExtractFullPath = true,
Overwrite = true
});
}
stream.ThrowOnDispose = false;
}
}
catch (Exception)
{
// Otherwise this will hide the original exception.
stream.ThrowOnDispose = false;
return;
throw;
}
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(SCRATCH_FILES_PATH,
new ExtractionOptions
{
ExtractFullPath = true,
Overwrite = true
});
}
stream.ThrowOnDispose = false;
}
VerifyFiles();
}
@@ -68,7 +88,7 @@ namespace SharpCompress.Test
{
foreach (var path in testArchives)
{
using (var stream = new NonDisposingStream(File.OpenRead(path), true))
using (var stream = NonDisposingStream.Create(File.OpenRead(path), true))
using (var archive = ArchiveFactory.Open(stream, readerOptions))
{
try
@@ -95,6 +115,97 @@ namespace SharpCompress.Test
}
}
protected void ArchiveStreamMultiRead(ReaderOptions readerOptions = null, params string[] testArchives)
{
ArchiveStreamMultiRead(readerOptions, testArchives.Select(x => Path.Combine(TEST_ARCHIVES_PATH, x)));
}
protected void ArchiveStreamMultiRead(ReaderOptions readerOptions, IEnumerable<string> testArchives)
{
using (var archive = ArchiveFactory.Open(testArchives.Select(a => new FileInfo(a)), readerOptions))
{
try
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(SCRATCH_FILES_PATH,
new ExtractionOptions()
{
ExtractFullPath = true,
Overwrite = true
});
}
}
catch (IndexOutOfRangeException)
{
throw;
}
}
VerifyFiles();
}
protected void ArchiveOpenStreamRead(ReaderOptions readerOptions = null, params string[] testArchives)
{
ArchiveOpenStreamRead(readerOptions, testArchives.Select(x => Path.Combine(TEST_ARCHIVES_PATH, x)));
}
protected void ArchiveOpenStreamRead(ReaderOptions readerOptions, IEnumerable<string> testArchives)
{
using (var archive = ArchiveFactory.Open(testArchives.Select(f => new FileInfo(f)), null))
{
try
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory(SCRATCH_FILES_PATH,
new ExtractionOptions()
{
ExtractFullPath = true,
Overwrite = true
});
}
}
catch (IndexOutOfRangeException)
{
throw;
}
}
VerifyFiles();
}
protected void ArchiveOpenEntryVolumeIndexTest(int[][] results, ReaderOptions readerOptions = null, params string[] testArchives)
{
ArchiveOpenEntryVolumeIndexTest(results, readerOptions, testArchives.Select(x => Path.Combine(TEST_ARCHIVES_PATH, x)));
}
protected void ArchiveOpenEntryVolumeIndexTest(int[][] results, ReaderOptions readerOptions, IEnumerable<string> testArchives)
{
string[] src = testArchives.ToArray();
using (var archive = ArchiveFactory.Open(testArchives.Select(f => new FileInfo(f)), null))
{
try
{
int idx = 0;
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
Assert.Equal(entry.VolumeIndexFirst, results[idx][0]);
Assert.Equal(entry.VolumeIndexLast, results[idx][1]);
Assert.Equal(src[entry.VolumeIndexFirst], archive.Volumes.First(a => a.Index == entry.VolumeIndexFirst).FileName);
Assert.Equal(src[entry.VolumeIndexLast], archive.Volumes.First(a => a.Index == entry.VolumeIndexLast).FileName);
idx++;
}
}
catch (IndexOutOfRangeException)
{
throw;
}
}
}
protected void ArchiveFileRead(string testArchive, ReaderOptions readerOptions = null)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);

View File

@@ -0,0 +1,157 @@
/*
* BranchExecTests.cs -- Test for converters
* <Contribution by Louis-Michel Bergeron, on behalf of aDolus Technolog Inc., 2022>
*
* All data is extracted from busybox, where "ip" is the offset within the busybox file.
*/
using SharpCompress.Compressors.Filters;
using Xunit;
namespace SharpCompress.Test.Filters
{
public class BranchExecTests
{
private static byte[] x86resultData { get; } = new byte[] {
0x12, 0x00, 0x00, 0x00, 0x02, 0x0B, 0x00, 0x00, 0xE8, 0xBD, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x12, 0x00, 0x00, 0x00, 0x6D, 0x01, 0x00, 0x00, 0xF0, 0xCA,
0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x12, 0x00, 0x00, 0x00, 0xBC, 0x09, 0x00, 0x00, 0x14, 0xC2, 0x00, 0x00, 0xE0, 0x01, 0x00, 0x00, 0x12, 0x00, 0x00, 0x00,
0x98, 0x0B, 0x00, 0x00, 0x60, 0x75, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0xF1, 0xFF, 0x42, 0x01, 0x00, 0x00, 0x08, 0xC8, 0x00, 0x00, 0x1C, 0x00,
};
private static byte[] x86Data { get; } = new byte[] {
0x12, 0x00, 0x00, 0x00, 0x02, 0x0B, 0x00, 0x00, 0xE8, 0xCA, 0x20, 0x00, 0x00, 0x07, 0x00, 0x00, 0x12, 0x00, 0x00, 0x00, 0x6D, 0x01, 0x00, 0x00, 0xF0, 0xCA,
0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x12, 0x00, 0x00, 0x00, 0xBC, 0x09, 0x00, 0x00, 0x14, 0xC2, 0x00, 0x00, 0xE0, 0x01, 0x00, 0x00, 0x12, 0x00, 0x00, 0x00,
0x98, 0x0B, 0x00, 0x00, 0x60, 0x75, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0xF1, 0xFF, 0x42, 0x01, 0x00, 0x00, 0x08, 0xC8, 0x00, 0x00, 0x1C, 0x00,
};
private static byte[] ppcResultData { get; } = new byte[] {
0xF8, 0x6B, 0x2E, 0x8C, 0x95, 0xC5, 0x4B, 0x1B, 0x94, 0x78, 0x9E, 0x7C, 0xBD, 0x8B, 0xA8, 0xAF, 0x31, 0x20, 0xFE, 0x0F, 0xB3, 0x15, 0x9A, 0x7C, 0xD5, 0x5C,
0xC2, 0xC0, 0xEC, 0xE9, 0x43, 0x2B, 0xD0, 0x9F, 0x2C, 0xFC, 0xB8, 0x2B, 0x6B, 0x15, 0xCD, 0x3F, 0x0C, 0xAF, 0x8F, 0x68, 0xB0, 0x6E, 0x6B, 0x30, 0x2E, 0x8C,
0x3F, 0x7E, 0x96, 0x7C, 0x93, 0xB2, 0xA4, 0x0E, 0x43, 0xEA, 0x20, 0x10, 0x38, 0x6D, 0x37, 0xF8, 0x87, 0xFE, 0xA9, 0x63, 0x75, 0xF5, 0x56, 0x34, 0x4A, 0xE3,
0xCF, 0x89, 0x18, 0x08, 0xC2, 0x76, 0x74, 0x12, 0xEC, 0xA7, 0x6D, 0xC2, 0xB7, 0x1B, 0x7A, 0xB2, 0xD4, 0xED
};
private static byte[] ppcData { get; } = new byte[] {
0xF8, 0x6B, 0x2E, 0x8C, 0x95, 0xC5, 0x4B, 0x1B, 0x94, 0x78, 0x9E, 0x7C, 0xBD, 0x8B, 0xA8, 0xAF, 0x31, 0x20, 0xFE, 0x0F, 0xB3, 0x15, 0x9A, 0x7C, 0xD5, 0x5C,
0xC2, 0xC0, 0xEC, 0xE9, 0x43, 0x2B, 0xD0, 0x9F, 0x2C, 0xFC, 0xB8, 0x2B, 0x6B, 0x15, 0xCD, 0x3F, 0x0C, 0xAF, 0x8F, 0x68, 0xB0, 0x6E, 0x6B, 0x30, 0x2E, 0x8C,
0x3F, 0x7E, 0x96, 0x7C, 0x93, 0xB2, 0xA4, 0x0E, 0x43, 0xEA, 0x20, 0x10, 0x38, 0x6D, 0x37, 0xF8, 0x87, 0xFE, 0xA9, 0x63, 0x75, 0xF5, 0x56, 0x34, 0x4A, 0xE3,
0xD6, 0x75, 0x18, 0x08, 0xC2, 0x76, 0x74, 0x12, 0xEC, 0xA7, 0x6D, 0xC2, 0xB7, 0x1B, 0x7A, 0xB2, 0xD4, 0xED
};
private static byte[] armResultData { get; } = new byte[] {
0x7C, 0xFC, 0x0A, 0x00, 0x16, 0x42, 0x01, 0x00, 0x80, 0xFC, 0x0A, 0x00, 0x16, 0xB8, 0x00, 0x00, 0x84, 0xFC, 0x0A, 0x00, 0x16, 0x3A, 0x00, 0x00, 0x04, 0xE0,
0x2D, 0xE5, 0x04, 0xD0, 0x4D, 0xE2, 0x0E, 0x04, 0x00, 0xEB, 0x04, 0xD0, 0x8D, 0xE2, 0x04, 0xE0, 0x9D, 0xE4, 0x1E, 0xFF, 0x2F, 0xE1, 0x04, 0xE0, 0x2D, 0xE5,
0x04, 0xE0, 0x9F, 0xE5, 0x0E, 0xE0, 0x8F, 0xE0, 0x08, 0xF0, 0xBE, 0xE5, 0xF0, 0x3A, 0x0A, 0x00, 0x00, 0xC6, 0x8F, 0xE2, 0xA3, 0xCA, 0x8C, 0xE2, 0xF0, 0xFA,
0xBC, 0xE5, 0x00, 0xC6, 0x8F, 0xE2, 0xA3, 0xCA, 0x8C, 0xE2, 0xE8, 0xFA, 0xBC, 0xE5, 0x00, 0xC6, 0x8F, 0xE2
};
private static byte[] armData { get; } = new byte[] {
0x7C, 0xFC, 0x0A, 0x00, 0x16, 0x42, 0x01, 0x00, 0x80, 0xFC, 0x0A, 0x00, 0x16, 0xB8, 0x00, 0x00, 0x84, 0xFC, 0x0A, 0x00, 0x16, 0x3A, 0x00, 0x00, 0x04, 0xE0,
0x2D, 0xE5, 0x04, 0xD0, 0x4D, 0xE2, 0x18, 0x13, 0x00, 0xEB, 0x04, 0xD0, 0x8D, 0xE2, 0x04, 0xE0, 0x9D, 0xE4, 0x1E, 0xFF, 0x2F, 0xE1, 0x04, 0xE0, 0x2D, 0xE5,
0x04, 0xE0, 0x9F, 0xE5, 0x0E, 0xE0, 0x8F, 0xE0, 0x08, 0xF0, 0xBE, 0xE5, 0xF0, 0x3A, 0x0A, 0x00, 0x00, 0xC6, 0x8F, 0xE2, 0xA3, 0xCA, 0x8C, 0xE2, 0xF0, 0xFA,
0xBC, 0xE5, 0x00, 0xC6, 0x8F, 0xE2, 0xA3, 0xCA, 0x8C, 0xE2, 0xE8, 0xFA, 0xBC, 0xE5, 0x00, 0xC6, 0x8F, 0xE2
};
private static byte[] armtResultData { get; } = new byte[] {
0x95, 0x23, 0xB6, 0xB1, 0xBE, 0x60, 0x79, 0xF0, 0xF6, 0x01, 0xD9, 0x7F, 0x2E, 0x03, 0x31, 0x1C, 0xFD, 0xD3, 0x40, 0x0F, 0x21, 0x3C, 0x06, 0x97, 0xE5, 0xC3,
0x57, 0x11, 0x76, 0x6F, 0xE3, 0x70, 0xED, 0x49, 0xCB, 0xB5, 0xC9, 0x42, 0x59, 0x10, 0x2F, 0xBD, 0xAE, 0xB1, 0x40, 0x4D, 0x9D, 0x7C, 0xE9, 0xFC, 0x48, 0x3E,
0xBC, 0x7F, 0x0B, 0x23, 0xB0, 0x8A, 0x4D, 0x02, 0x39, 0xC4, 0xFB, 0x66, 0x83, 0x7F, 0xA7, 0xBD, 0x12, 0xAC, 0xED, 0x31, 0x34, 0x93, 0x4D, 0x8D, 0xD7, 0x94,
0x93, 0x1C, 0x0A, 0x50, 0x54, 0x4B, 0x03, 0x55, 0x27, 0xFE, 0xCE, 0x29, 0x66, 0x52, 0x81, 0xAE, 0x69, 0xA0, 0x69, 0xF2, 0x3D, 0xFF, 0xA1, 0x8A, 0x5D, 0x61,
0x7D, 0xC5, 0x94, 0x0A, 0x7D, 0xED, 0x11, 0x0F
};
private static byte[] armtData { get; } = new byte[] {
0x95, 0x23, 0xB6, 0xB1, 0xBE, 0x60, 0x79, 0xF0, 0xF6, 0x01, 0xD9, 0x7F, 0x2E, 0x03, 0x31, 0x1C, 0xFD, 0xD3, 0x40, 0x0F, 0x21, 0x3C, 0x06, 0x97, 0xE5, 0xC3,
0x57, 0x11, 0x76, 0x6F, 0xE3, 0x70, 0xED, 0x49, 0xCB, 0xB5, 0xC9, 0x42, 0x59, 0x10, 0x2F, 0xBD, 0xAE, 0xB1, 0x40, 0x4D, 0x9D, 0x7C, 0xE9, 0xFC, 0x48, 0x3E,
0xBC, 0x7F, 0x0B, 0x23, 0xB0, 0x8A, 0x4D, 0x02, 0x39, 0xC4, 0xFB, 0x66, 0x83, 0x7F, 0xA7, 0xBD, 0x12, 0xAC, 0xED, 0x31, 0x34, 0x93, 0x4D, 0x8D, 0xD7, 0x94,
0x93, 0x1C, 0x0A, 0x50, 0x54, 0x4B, 0x03, 0x55, 0x27, 0xFE, 0xCE, 0x29, 0x66, 0x52, 0x81, 0xAE, 0x69, 0xA0, 0x6A, 0xF2, 0x6F, 0xFC, 0xA1, 0x8A, 0x5D, 0x61,
0x7D, 0xC5, 0x94, 0x0A, 0x7D, 0xED, 0x11, 0x0F
};
private static byte[] ia64ResultData { get; } = new byte[] {
0x4D, 0xF8, 0xF2, 0x0D, 0x06, 0x2F, 0x74, 0x0F, 0xF0, 0x91, 0x06, 0x0B, 0x19, 0x22, 0x91, 0x5A, 0x66, 0x56, 0xA7, 0x15, 0x77, 0x1E, 0x2F, 0xA3, 0xE4, 0xDE,
0x93, 0x1C, 0xD5, 0xCE, 0x6E, 0x45, 0x36, 0x15, 0x15, 0x65, 0x4E, 0xC5, 0xA3, 0x8C, 0x5A, 0x8B, 0x8A, 0x1C, 0x12, 0x5B, 0x39, 0x1F, 0xA0, 0xF2, 0x93, 0x7C,
0x7F, 0x5D, 0xD9, 0x30, 0x1F, 0xF6, 0x5C, 0x10, 0x62, 0x3E, 0xB4, 0x64, 0x56, 0x48, 0xB2, 0x20, 0x39, 0xE8, 0x44, 0x10, 0x87, 0x9E, 0x2C, 0xFC, 0x29, 0x0E,
0x20, 0x76, 0xCE, 0xDA, 0x93, 0x1C, 0xED, 0x54, 0x0D, 0xAF, 0xEC, 0xDE, 0x93, 0x1C, 0x2B, 0x72, 0xD5, 0x0D
};
private static byte[] ia64Data { get; } = new byte[] {
0x4D, 0xF8, 0xF2, 0x0D, 0x06, 0x2F, 0x74, 0x0F, 0xF0, 0x91, 0x06, 0x0B, 0x19, 0x22, 0x91, 0x5A, 0x66, 0x56, 0xA7, 0x15, 0x77, 0x1E, 0x2F, 0xA3, 0xE4, 0xDE,
0x93, 0x1C, 0xD5, 0xCE, 0x6E, 0x45, 0x36, 0x15, 0x15, 0x65, 0x4E, 0xC5, 0xA3, 0x8C, 0x5A, 0x8B, 0x8A, 0x1C, 0x12, 0x5B, 0x39, 0x1F, 0xA0, 0xF2, 0x93, 0x7C,
0x7F, 0x5D, 0xD9, 0x30, 0x1F, 0xF6, 0x5C, 0x10, 0x62, 0x3E, 0xB4, 0x64, 0x56, 0x48, 0xB2, 0x20, 0x39, 0xE8, 0x44, 0x80, 0x8C, 0x9E, 0x2C, 0xFC, 0x29, 0x0E,
0x20, 0x76, 0xCE, 0xDA, 0x93, 0x1C, 0xED, 0x54, 0x0D, 0xAF, 0xEC, 0xDE, 0x93, 0x1C, 0x2B, 0x72, 0xD5, 0x0D
};
private static byte[] sparcResultData { get; } = new byte[] {
0x78, 0x2E, 0x73, 0x6F, 0x2E, 0x33, 0x00, 0x00, 0x07, 0x01, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x0B, 0x00, 0x00, 0x00, 0xA2, 0x0D,
0x10, 0x12, 0x30, 0x03, 0xC9, 0x37, 0x40, 0x1A, 0x85, 0xD9, 0x44, 0x34, 0x40, 0x32, 0x85, 0xA0, 0x30, 0x40, 0x00, 0x70, 0x00, 0x40, 0x84, 0x80, 0x04, 0x00,
0xE4, 0xAC, 0x07, 0x04, 0x21, 0x44, 0x02, 0x20, 0x10, 0x00, 0x40, 0xC2, 0x89, 0x98, 0x85, 0x00, 0x58, 0x6A, 0x41, 0x8E, 0x18, 0xA1, 0x91, 0x00, 0x10, 0x00,
};
private static byte[] sparcData { get; } = new byte[] {
0x78, 0x2E, 0x73, 0x6F, 0x2E, 0x33, 0x00, 0x00, 0x07, 0x01, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x44, 0x0B, 0x00, 0x00, 0x00, 0xA2, 0x0D,
0x10, 0x12, 0x30, 0x03, 0xC9, 0x37, 0x40, 0x1A, 0x86, 0x21, 0x44, 0x34, 0x40, 0x32, 0x85, 0xA0, 0x30, 0x40, 0x00, 0x70, 0x00, 0x40, 0x84, 0x80, 0x04, 0x00,
0xE4, 0xAC, 0x07, 0x04, 0x21, 0x44, 0x02, 0x20, 0x10, 0x00, 0x40, 0xC2, 0x89, 0x98, 0x85, 0x00, 0x58, 0x6A, 0x41, 0x8E, 0x18, 0xA1, 0x91, 0x00, 0x10, 0x00,
};
private void CompareBuffer(byte[] testBuffer, byte[] targetBuffer)
{
Assert.Equal(testBuffer, targetBuffer);
}
[Fact]
public void X86ConverterDecodeTest()
{
uint state = 0;
uint ip = 0x2000;
var testData = x86Data;
BranchExecFilter.X86Converter(testData, ip, ref state);
CompareBuffer(testData, x86resultData);
}
[Fact]
public void PowerPCConverterDecodeTest()
{
uint ip = 0x6A0;
var testData = ppcData;
BranchExecFilter.PowerPCConverter(testData, ip);
CompareBuffer(testData, ppcResultData);
}
[Fact]
public void ARMConverteDecoderTest()
{
uint ip = 0x3C00;
var testData = armData;
BranchExecFilter.ARMConverter(testData, ip);
CompareBuffer(testData, armResultData);
}
[Fact]
public void ARMTConverterDecodeTest()
{
uint ip = 0xA00;
var testData = armtData;
BranchExecFilter.ARMTConverter(testData, ip);
CompareBuffer(testData, armtResultData);
}
[Fact]
public void IA64ConverterDecodeTest()
{
uint ip = 0xAA0;
var testData = ia64Data;
BranchExecFilter.IA64Converter(testData, ip);
CompareBuffer(testData, ia64ResultData);
}
[Fact]
public void SPARCConverterDecodeTest()
{
uint ip = 0x100;
var testData = sparcData;
BranchExecFilter.SPARCConverter(testData, ip);
CompareBuffer(testData, sparcResultData);
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.IO;
using System.Linq;
using SharpCompress.Archives;
@@ -107,5 +107,21 @@ namespace SharpCompress.Test.GZip
Assert.Equal(size, tarStream.Length);
}
}
[Fact]
public void TestGzCrcWithMostSignificaltBitNotNegative()
{
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz")))
{
using (var archive = GZipArchive.Open(stream))
{
//process all entries in solid archive until the one we want to test
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
Assert.InRange(entry.Crc, 0L, 0xFFFFFFFFL);
}
}
}
}
}
}

View File

@@ -1,8 +1,9 @@
using System.IO;
using System.IO;
using System.Linq;
using SharpCompress.Archives;
using SharpCompress.Archives.Rar;
using SharpCompress.Common;
using SharpCompress.Compressors.LZMA.Utilites;
using SharpCompress.Readers;
using Xunit;
@@ -16,12 +17,24 @@ namespace SharpCompress.Test.Rar
ReadRarPassword("Rar.encrypted_filesAndHeader.rar", "test");
}
[Fact]
public void Rar_EncryptedFileAndHeader_NoPasswordExceptionTest()
{
Assert.Throws(typeof(CryptographicException), () => ReadRarPassword("Rar.encrypted_filesAndHeader.rar", null));
}
/*[Fact]
public void Rar5_EncryptedFileAndHeader_Archive()
{
ReadRarPassword("Rar5.encrypted_filesAndHeader.rar", "test");
}*/
[Fact]
public void Rar5_EncryptedFileAndHeader_NoPasswordExceptionTest()
{
Assert.Throws(typeof(CryptographicException), () => ReadRarPassword("Rar5.encrypted_filesAndHeader.rar", null));
}
[Fact]
public void Rar_EncryptedFileOnly_Archive()
{
@@ -203,6 +216,43 @@ namespace SharpCompress.Test.Rar
VerifyFiles();
}
[Fact]
public void Rar_IsSolidEntryStreamCheck()
{
DoRar_IsSolidEntryStreamCheck("Rar.solid.rar");
}
//Extract the 2nd file in a solid archive to check that the first file is skipped properly
private void DoRar_IsSolidEntryStreamCheck(string filename)
{
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename)))
{
using (var archive = RarArchive.Open(stream))
{
Assert.True(archive.IsSolid);
IArchiveEntry[] entries = archive.Entries.Where(a => !a.IsDirectory).ToArray();
Assert.NotInRange(entries.Length, 0, 1);
Assert.False(entries[0].IsSolid); //first item in a solid archive is not marked solid and is seekable
IArchiveEntry testEntry = entries[1];
Assert.True(testEntry.IsSolid); //the target. The non seekable entry
//process all entries in solid archive until the one we want to test
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
using (CrcCheckStream crcStream = new CrcCheckStream((uint)entry.Crc)) //use the 7zip CRC stream for convenience (required a bug fix)
{
using (Stream eStream = entry.OpenEntryStream()) //bug fix in RarStream to report the correct Position
eStream.CopyTo(crcStream);
} //throws if not valid
if (entry == testEntry)
break;
}
}
}
}
[Fact]
public void Rar_Solid_ArchiveStreamRead()
{
@@ -361,6 +411,226 @@ namespace SharpCompress.Test.Rar
}
[Fact]
public void Rar2_Multi_ArchiveStreamRead()
{
DoRar_Multi_ArchiveStreamRead(new string[] {
"Rar2.multi.rar",
"Rar2.multi.r00",
"Rar2.multi.r01",
"Rar2.multi.r02",
"Rar2.multi.r03",
"Rar2.multi.r04",
"Rar2.multi.r05"}, false);
}
[Fact]
public void Rar2_Multi_ArchiveFileRead()
{
ArchiveFileRead("Rar2.multi.rar"); //r00, r01...
}
[Fact]
public void Rar2_ArchiveFileRead()
{
ArchiveFileRead("Rar2.rar");
}
[Fact]
public void Rar2_ArchiveVersionTest()
{
string testArchive = Path.Combine(TEST_ARCHIVES_PATH, "Rar2.rar");
using (var archive = RarArchive.Open(testArchive))
{
Assert.Equal(2, archive.MinVersion);
Assert.Equal(2, archive.MaxVersion);
}
}
[Fact]
public void Rar4_ArchiveVersionTest()
{
string testArchive = Path.Combine(TEST_ARCHIVES_PATH, "Rar4.multi.part01.rar");
using (var archive = RarArchive.Open(testArchive))
{
Assert.Equal(3, archive.MinVersion);
Assert.Equal(4, archive.MaxVersion);
}
}
[Fact]
public void Rar5_ArchiveVersionTest()
{
string testArchive = Path.Combine(TEST_ARCHIVES_PATH, "Rar5.solid.rar");
using (var archive = RarArchive.Open(testArchive))
{
Assert.Equal(5, archive.MinVersion);
Assert.Equal(6, archive.MaxVersion);
}
}
[Fact]
public void Rar4_Multi_ArchiveFileRead()
{
ArchiveFileRead("Rar4.multi.part01.rar");
}
[Fact]
public void Rar4_ArchiveFileRead()
{
ArchiveFileRead("Rar4.rar");
}
[Fact]
public void Rar_GetPartsSplit()
{
//uses first part to search for all parts and compares against this array
ArchiveGetParts(new string[] {
"Rar4.split.001",
"Rar4.split.002",
"Rar4.split.003",
"Rar4.split.004",
"Rar4.split.005",
"Rar4.split.006"});
}
[Fact]
public void Rar_GetPartsOld()
{
//uses first part to search for all parts and compares against this array
ArchiveGetParts(new string[] {
"Rar2.multi.rar",
"Rar2.multi.r00",
"Rar2.multi.r01",
"Rar2.multi.r02",
"Rar2.multi.r03",
"Rar2.multi.r04",
"Rar2.multi.r05"});
}
[Fact]
public void Rar_GetPartsNew()
{
//uses first part to search for all parts and compares against this array
ArchiveGetParts(new string[] {
"Rar4.multi.part01.rar",
"Rar4.multi.part02.rar",
"Rar4.multi.part03.rar",
"Rar4.multi.part04.rar",
"Rar4.multi.part05.rar",
"Rar4.multi.part06.rar",
"Rar4.multi.part07.rar"});
}
[Fact]
public void Rar4_Multi_ArchiveStreamRead()
{
DoRar_Multi_ArchiveStreamRead(new string[] {
"Rar4.multi.part01.rar",
"Rar4.multi.part02.rar",
"Rar4.multi.part03.rar",
"Rar4.multi.part04.rar",
"Rar4.multi.part05.rar",
"Rar4.multi.part06.rar",
"Rar4.multi.part07.rar"}, false);
}
//no extension to test the lib identifies the archive by content not ext
[Fact]
public void Rar4_Split_ArchiveStreamRead()
{
ArchiveStreamMultiRead(null, new string[] {
"Rar4.split.001",
"Rar4.split.002",
"Rar4.split.003",
"Rar4.split.004",
"Rar4.split.005",
"Rar4.split.006"});
}
//will detect and load other files
[Fact]
public void Rar4_Multi_ArchiveFirstFileRead()
{
ArchiveFileRead("Rar4.multi.part01.rar");
//"Rar4.multi.part02.rar",
//"Rar4.multi.part03.rar",
//"Rar4.multi.part04.rar",
//"Rar4.multi.part05.rar",
//"Rar4.multi.part06.rar",
//"Rar4.multi.part07.rar"
}
//will detect and load other files
[Fact]
public void Rar4_Split_ArchiveFirstFileRead()
{
ArchiveFileRead("Rar4.split.001");
//"Rar4.split.002",
//"Rar4.split.003",
//"Rar4.split.004",
//"Rar4.split.005",
//"Rar4.split.006"
}
//will detect and load other files
[Fact]
public void Rar4_Split_ArchiveStreamFirstFileRead()
{
ArchiveStreamMultiRead(null, new string[] {
"Rar4.split.001",
//"Rar4.split.002",
//"Rar4.split.003",
//"Rar4.split.004",
//"Rar4.split.005",
//"Rar4.split.006"
});
}
//open with ArchiveFactory.Open and stream
[Fact]
public void Rar4_Split_ArchiveOpen()
{
ArchiveOpenStreamRead(null,
"Rar4.split.001",
"Rar4.split.002",
"Rar4.split.003",
"Rar4.split.004",
"Rar4.split.005",
"Rar4.split.006");
}
//open with ArchiveFactory.Open and stream
[Fact]
public void Rar4_Multi_ArchiveOpen()
{
ArchiveOpenStreamRead(null,
"Rar4.multi.part01.rar",
"Rar4.multi.part02.rar",
"Rar4.multi.part03.rar",
"Rar4.multi.part04.rar",
"Rar4.multi.part05.rar",
"Rar4.multi.part06.rar",
"Rar4.multi.part07.rar");
}
[Fact]
public void Rar4_Multi_ArchiveOpenEntryVolumeIndexTest()
{
ArchiveOpenEntryVolumeIndexTest(
new[] {
new[] { 0, 1 }, //exe - Rar4.multi.part01.rar to Rar4.multi.part02.rar
new[] { 1, 5 }, //jpg - Rar4.multi.part02.rar to Rar4.multi.part06.rar
new[] { 5, 6 } //txt - Rar4.multi.part06.rar to Rar4.multi.part07.rar
},
null,
"Rar4.multi.part01.rar",
"Rar4.multi.part02.rar",
"Rar4.multi.part03.rar",
"Rar4.multi.part04.rar",
"Rar4.multi.part05.rar",
"Rar4.multi.part06.rar",
"Rar4.multi.part07.rar");
}
[Fact]
public void Rar_Multi_ArchiveFileRead()
{
ArchiveFileRead("Rar.multi.part01.rar");

View File

@@ -1,4 +1,4 @@
using System.IO;
using System.IO;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -27,7 +27,7 @@ namespace SharpCompress.Test
{
using (var file = File.OpenRead(testArchive))
{
using (var protectedStream = new NonDisposingStream(new ForwardOnlyStream(file), throwOnDispose: true))
using (var protectedStream = NonDisposingStream.Create(new ForwardOnlyStream(file), throwOnDispose: true))
{
using (var testStream = new TestStream(protectedStream))
{

View File

@@ -1,4 +1,4 @@
using System;
using System;
using SharpCompress.Common;
using SharpCompress.Readers;
using Xunit;
@@ -7,6 +7,16 @@ namespace SharpCompress.Test.SevenZip
{
public class SevenZipArchiveTests : ArchiveTests
{
[Fact]
public void SevenZipArchive_Solid_StreamRead()
{
ArchiveStreamRead("7Zip.solid.7z");
}
[Fact]
public void SevenZipArchive_NonSolid_StreamRead()
{
ArchiveStreamRead("7Zip.nonsolid.7z");
}
[Fact]
public void SevenZipArchive_LZMA_StreamRead()
{
@@ -31,6 +41,11 @@ namespace SharpCompress.Test.SevenZip
ArchiveFileRead("7Zip.LZMA.Aes.7z", new ReaderOptions() { Password = "testpassword" });
}
[Fact]
public void SevenZipArchive_LZMAAES_NoPasswordExceptionTest()
{
Assert.Throws(typeof(CryptographicException), () => ArchiveFileRead("7Zip.LZMA.Aes.7z", new ReaderOptions() { Password = null })); //was failing with ArgumentNullException not CryptographicException like rar
}
[Fact]
public void SevenZipArchive_PPMd_StreamRead()
{
ArchiveStreamRead("7Zip.PPMd.7z");
@@ -93,13 +108,41 @@ namespace SharpCompress.Test.SevenZip
[Fact]
public void SevenZipArchive_BZip2_Split()
{
Assert.Throws<IndexOutOfRangeException>(() => ArchiveStreamRead(null, "Original.7z.001",
"Original.7z.002",
"Original.7z.003",
"Original.7z.004",
"Original.7z.005",
"Original.7z.006",
"Original.7z.007"));
Assert.Throws<InvalidOperationException>(() => ArchiveStreamRead(null, "Original.7z.001",
"Original.7z.002",
"Original.7z.003",
"Original.7z.004",
"Original.7z.005",
"Original.7z.006",
"Original.7z.007"));
}
//Same as archive as Original.7z.001 ... 007 files without the root directory 'Original\' in the archive - this caused the verify to fail
[Fact]
public void SevenZipArchive_BZip2_Split_Working()
{
ArchiveStreamMultiRead(null, "7Zip.BZip2.split.001",
"7Zip.BZip2.split.002",
"7Zip.BZip2.split.003",
"7Zip.BZip2.split.004",
"7Zip.BZip2.split.005",
"7Zip.BZip2.split.006",
"7Zip.BZip2.split.007");
}
//will detect and load other files
[Fact]
public void SevenZipArchive_BZip2_Split_FirstFileRead()
{
ArchiveFileRead("7Zip.BZip2.split.001");
//"7Zip.BZip2.split.002",
//"7Zip.BZip2.split.003",
//"7Zip.BZip2.split.004",
//"7Zip.BZip2.split.005",
//"7Zip.BZip2.split.006",
//"7Zip.BZip2.split.007"
}
}
}
}

Some files were not shown because too many files have changed in this diff Show More