Compare commits

..

8 Commits
0.32.1 ... dmg

Author SHA1 Message Date
Adam Hathcock
83e8bf8462 Add test dmg 2021-06-04 13:38:09 +01:00
Adam Hathcock
65bcfadfde Merge pull request #573 from Artentus/master
Add read-only support for Dmg archives
2021-06-04 13:25:32 +01:00
Adam Hathcock
5acc195cf7 Merge branch 'master' into master 2021-06-04 13:23:07 +01:00
Mathis Rech
d5cbe71cae Revert global.json 2021-02-17 11:05:19 +01:00
Mathis Rech
014ecd4fc1 Merge pull request #3 from adamhathcock/master
Merge master
2021-02-17 11:00:20 +01:00
Mathis Rech
9600709219 Add read-only support for DMG archives 2021-02-17 10:57:41 +01:00
Mathis Rech
d5e6c31a9f Merge pull request #2 from adamhathcock/master
Merge master
2021-02-09 16:44:48 +01:00
Mathis Rech
5faa603d59 Merge pull request #1 from adamhathcock/master
Merge master
2020-09-24 20:05:04 +02:00
178 changed files with 4551 additions and 3726 deletions

View File

@@ -1,543 +0,0 @@
# Version: 2.0.1 (Using https://semver.org/)
# Updated: 2020-12-11
# See https://github.com/RehanSaeed/EditorConfig/releases for release notes.
# See https://github.com/RehanSaeed/EditorConfig for updates to this file.
# See http://EditorConfig.org for more information about .editorconfig files.
##########################################
# Common Settings
##########################################
# This file is the top-most EditorConfig file
root = true
# All Files
[*]
charset = utf-8
indent_style = space
indent_size = 4
insert_final_newline = true
trim_trailing_whitespace = true
##########################################
# File Extension Settings
##########################################
# Visual Studio Solution Files
[*.sln]
indent_style = tab
# Visual Studio XML Project Files
[*.{csproj,vbproj,vcxproj.filters,proj,projitems,shproj}]
indent_size = 2
# XML Configuration Files
[*.{xml,config,props,targets,nuspec,resx,ruleset,vsixmanifest,vsct}]
indent_size = 2
# JSON Files
[*.{json,json5,webmanifest}]
indent_size = 2
# YAML Files
[*.{yml,yaml}]
indent_size = 2
# Markdown Files
[*.md]
trim_trailing_whitespace = false
# Web Files
[*.{htm,html,js,jsm,ts,tsx,css,sass,scss,less,svg,vue}]
indent_size = 2
# Batch Files
[*.{cmd,bat}]
end_of_line = crlf
# Bash Files
[*.sh]
end_of_line = lf
# Makefiles
[Makefile]
indent_style = tab
##########################################
# Default .NET Code Style Severities
# https://docs.microsoft.com/dotnet/fundamentals/code-analysis/configuration-options#scope
##########################################
[*.{cs,csx,cake,vb,vbx}]
# Default Severity for all .NET Code Style rules below
dotnet_analyzer_diagnostic.severity = warning
##########################################
# File Header (Uncomment to support file headers)
# https://docs.microsoft.com/visualstudio/ide/reference/add-file-header
##########################################
# [*.{cs,csx,cake,vb,vbx}]
# file_header_template = <copyright file="{fileName}" company="PROJECT-AUTHOR">\n© PROJECT-AUTHOR\n</copyright>
# SA1636: File header copyright text should match
# Justification: .editorconfig supports file headers. If this is changed to a value other than "none", a stylecop.json file will need to added to the project.
# dotnet_diagnostic.SA1636.severity = none
##########################################
# .NET Language Conventions
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions
##########################################
# .NET Code Style Settings
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#net-code-style-settings
[*.{cs,csx,cake,vb,vbx}]
# "this." and "Me." qualifiers
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#this-and-me
#dotnet_style_qualification_for_field = true:warning
#dotnet_style_qualification_for_property = true:warning
#dotnet_style_qualification_for_method = true:warning
#dotnet_style_qualification_for_event = true:warning
# Language keywords instead of framework type names for type references
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#language-keywords
dotnet_style_predefined_type_for_locals_parameters_members = true:warning
dotnet_style_predefined_type_for_member_access = true:warning
# Modifier preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#normalize-modifiers
dotnet_style_require_accessibility_modifiers = always:warning
csharp_preferred_modifier_order = public,private,protected,internal,static,extern,new,virtual,abstract,sealed,override,readonly,unsafe,volatile,async:warning
visual_basic_preferred_modifier_order = Partial,Default,Private,Protected,Public,Friend,NotOverridable,Overridable,MustOverride,Overloads,Overrides,MustInherit,NotInheritable,Static,Shared,Shadows,ReadOnly,WriteOnly,Dim,Const,WithEvents,Widening,Narrowing,Custom,Async:warning
dotnet_style_readonly_field = true:warning
# Parentheses preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#parentheses-preferences
dotnet_style_parentheses_in_arithmetic_binary_operators = always_for_clarity:warning
dotnet_style_parentheses_in_relational_binary_operators = always_for_clarity:warning
dotnet_style_parentheses_in_other_binary_operators = always_for_clarity:warning
dotnet_style_parentheses_in_other_operators = always_for_clarity:suggestion
# Expression-level preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#expression-level-preferences
dotnet_style_object_initializer = true:warning
dotnet_style_collection_initializer = true:warning
dotnet_style_explicit_tuple_names = true:warning
dotnet_style_prefer_inferred_tuple_names = true:warning
dotnet_style_prefer_inferred_anonymous_type_member_names = true:warning
dotnet_style_prefer_auto_properties = true:warning
dotnet_style_prefer_is_null_check_over_reference_equality_method = true:warning
dotnet_style_prefer_conditional_expression_over_assignment = false:suggestion
dotnet_diagnostic.IDE0045.severity = suggestion
dotnet_style_prefer_conditional_expression_over_return = false:suggestion
dotnet_diagnostic.IDE0046.severity = suggestion
dotnet_style_prefer_compound_assignment = true:warning
# Null-checking preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#null-checking-preferences
dotnet_style_coalesce_expression = true:warning
dotnet_style_null_propagation = true:warning
# Parameter preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#parameter-preferences
dotnet_code_quality_unused_parameters = all:warning
# More style options (Undocumented)
# https://github.com/MicrosoftDocs/visualstudio-docs/issues/3641
dotnet_style_operator_placement_when_wrapping = end_of_line
# https://github.com/dotnet/roslyn/pull/40070
dotnet_style_prefer_simplified_interpolation = true:warning
# C# Code Style Settings
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#c-code-style-settings
[*.{cs,csx,cake}]
# Implicit and explicit types
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#implicit-and-explicit-types
csharp_style_var_for_built_in_types = true:warning
csharp_style_var_when_type_is_apparent = true:warning
csharp_style_var_elsewhere = true:warning
# Expression-bodied members
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#expression-bodied-members
csharp_style_expression_bodied_methods = true:warning
csharp_style_expression_bodied_constructors = true:warning
csharp_style_expression_bodied_operators = true:warning
csharp_style_expression_bodied_properties = true:warning
csharp_style_expression_bodied_indexers = true:warning
csharp_style_expression_bodied_accessors = true:warning
csharp_style_expression_bodied_lambdas = true:warning
csharp_style_expression_bodied_local_functions = true:warning
# Pattern matching
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#pattern-matching
csharp_style_pattern_matching_over_is_with_cast_check = true:warning
csharp_style_pattern_matching_over_as_with_null_check = true:warning
# Inlined variable declarations
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#inlined-variable-declarations
csharp_style_inlined_variable_declaration = true:warning
# Expression-level preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#expression-level-preferences
csharp_prefer_simple_default_expression = true:warning
# "Null" checking preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#c-null-checking-preferences
csharp_style_throw_expression = true:warning
csharp_style_conditional_delegate_call = true:warning
# Code block preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#code-block-preferences
csharp_prefer_braces = true:warning
# Unused value preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#unused-value-preferences
csharp_style_unused_value_expression_statement_preference = discard_variable:suggestion
dotnet_diagnostic.IDE0058.severity = suggestion
csharp_style_unused_value_assignment_preference = discard_variable:suggestion
dotnet_diagnostic.IDE0059.severity = suggestion
# Index and range preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#index-and-range-preferences
csharp_style_prefer_index_operator = true:warning
csharp_style_prefer_range_operator = true:warning
# Miscellaneous preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#miscellaneous-preferences
csharp_style_deconstructed_variable_declaration = true:warning
csharp_style_pattern_local_over_anonymous_function = true:warning
csharp_using_directive_placement = outside_namespace:warning
csharp_prefer_static_local_function = true:warning
csharp_prefer_simple_using_statement = true:suggestion
dotnet_diagnostic.IDE0063.severity = suggestion
csharp_style_namespace_declarations = file_scoped
##########################################
# .NET Formatting Conventions
# https://docs.microsoft.com/visualstudio/ide/editorconfig-code-style-settings-reference#formatting-conventions
##########################################
# Organize usings
# https://docs.microsoft.com/visualstudio/ide/editorconfig-formatting-conventions#organize-using-directives
dotnet_sort_system_directives_first = true
# Newline options
# https://docs.microsoft.com/visualstudio/ide/editorconfig-formatting-conventions#new-line-options
csharp_new_line_before_open_brace = all
csharp_new_line_before_else = true
csharp_new_line_before_catch = true
csharp_new_line_before_finally = true
csharp_new_line_before_members_in_object_initializers = true
csharp_new_line_before_members_in_anonymous_types = true
csharp_new_line_between_query_expression_clauses = true
# Indentation options
# https://docs.microsoft.com/visualstudio/ide/editorconfig-formatting-conventions#indentation-options
csharp_indent_case_contents = true
csharp_indent_switch_labels = true
csharp_indent_labels = no_change
csharp_indent_block_contents = true
csharp_indent_braces = false
csharp_indent_case_contents_when_block = false
# Spacing options
# https://docs.microsoft.com/visualstudio/ide/editorconfig-formatting-conventions#spacing-options
csharp_space_after_cast = false
csharp_space_after_keywords_in_control_flow_statements = true
csharp_space_between_parentheses = false
csharp_space_before_colon_in_inheritance_clause = true
csharp_space_after_colon_in_inheritance_clause = true
csharp_space_around_binary_operators = before_and_after
csharp_space_between_method_declaration_parameter_list_parentheses = false
csharp_space_between_method_declaration_empty_parameter_list_parentheses = false
csharp_space_between_method_declaration_name_and_open_parenthesis = false
csharp_space_between_method_call_parameter_list_parentheses = false
csharp_space_between_method_call_empty_parameter_list_parentheses = false
csharp_space_between_method_call_name_and_opening_parenthesis = false
csharp_space_after_comma = true
csharp_space_before_comma = false
csharp_space_after_dot = false
csharp_space_before_dot = false
csharp_space_after_semicolon_in_for_statement = true
csharp_space_before_semicolon_in_for_statement = false
csharp_space_around_declaration_statements = false
csharp_space_before_open_square_brackets = false
csharp_space_between_empty_square_brackets = false
csharp_space_between_square_brackets = false
# Wrapping options
# https://docs.microsoft.com/visualstudio/ide/editorconfig-formatting-conventions#wrap-options
csharp_preserve_single_line_statements = false
csharp_preserve_single_line_blocks = true
##########################################
# .NET Naming Conventions
# https://docs.microsoft.com/visualstudio/ide/editorconfig-naming-conventions
##########################################
[*.{cs,csx,cake,vb,vbx}]
dotnet_diagnostic.CA1000.severity = suggestion
dotnet_diagnostic.CA1001.severity = error
dotnet_diagnostic.CA1018.severity = error
dotnet_diagnostic.CA1051.severity = suggestion
dotnet_diagnostic.CA1068.severity = error
dotnet_diagnostic.CA1069.severity = error
dotnet_diagnostic.CA1304.severity = error
dotnet_diagnostic.CA1305.severity = suggestion
dotnet_diagnostic.CA1309.severity = suggestion
dotnet_diagnostic.CA1310.severity = error
dotnet_diagnostic.CA1707.severity = suggestion
dotnet_diagnostic.CA1708.severity = suggestion
dotnet_diagnostic.CA1711.severity = suggestion
dotnet_diagnostic.CA1716.severity = suggestion
dotnet_diagnostic.CA1720.severity = suggestion
dotnet_diagnostic.CA1725.severity = suggestion
dotnet_diagnostic.CA1805.severity = suggestion
dotnet_diagnostic.CA1816.severity = suggestion
dotnet_diagnostic.CA1822.severity = suggestion
dotnet_diagnostic.CA1825.severity = error
dotnet_diagnostic.CA1826.severity = silent
dotnet_diagnostic.CA1827.severity = error
dotnet_diagnostic.CA1829.severity = suggestion
dotnet_diagnostic.CA1834.severity = error
dotnet_diagnostic.CA1845.severity = suggestion
dotnet_diagnostic.CA1848.severity = suggestion
dotnet_diagnostic.CA2016.severity = suggestion
dotnet_diagnostic.CA2201.severity = error
dotnet_diagnostic.CA2206.severity = error
dotnet_diagnostic.CA2208.severity = error
dotnet_diagnostic.CA2211.severity = error
dotnet_diagnostic.CA2249.severity = error
dotnet_diagnostic.CA2251.severity = error
dotnet_diagnostic.CA2252.severity = none
dotnet_diagnostic.CA2254.severity = suggestion
dotnet_diagnostic.CS0169.severity = error
dotnet_diagnostic.CS0219.severity = error
dotnet_diagnostic.CS1998.severity = error
dotnet_diagnostic.CS8602.severity = error
dotnet_diagnostic.CS8604.severity = error
dotnet_diagnostic.CS8618.severity = error
dotnet_diagnostic.CS0618.severity = error
dotnet_diagnostic.CS1998.severity = error
dotnet_diagnostic.CS4014.severity = error
dotnet_diagnostic.CS8600.severity = error
dotnet_diagnostic.CS8603.severity = error
dotnet_diagnostic.CS8625.severity = error
dotnet_diagnostic.BL0005.severity = suggestion
dotnet_diagnostic.MVC1000.severity = suggestion
dotnet_diagnostic.IDE0055.severity = suggestion # Fix formatting
dotnet_diagnostic.IDE0023.severity = suggestion # use expression body for operators
dotnet_diagnostic.IDE0025.severity = suggestion # use expression body for properties
dotnet_diagnostic.IDE1006.severity = suggestion # Naming rule violation: These words cannot contain lower case characters
dotnet_diagnostic.IDE0072.severity = suggestion # Populate switch - forces population of all cases even when default specified
dotnet_diagnostic.IDE0027.severity = suggestion # Use expression body for accessors
dotnet_diagnostic.IDE0032.severity = suggestion # Use auto property
dotnet_diagnostic.IDE0007.severity = error # Use var
dotnet_diagnostic.IDE0160.severity = suggestion # Use block scoped
dotnet_diagnostic.IDE0011.severity = error # Use braces on if statements
dotnet_diagnostic.IDE0057.severity = suggestion # substring can be simplified
dotnet_diagnostic.IDE0004.severity = error # redundant cast
dotnet_diagnostic.IDE0010.severity = silent # populate switch
dotnet_diagnostic.IDE0021.severity = silent # expression body for constructors
dotnet_diagnostic.IDE0022.severity = silent # expression body for methods
dotnet_diagnostic.IDE0024.severity = silent # expression body for operators
dotnet_diagnostic.IDE0028.severity = silent
dotnet_diagnostic.IDE0033.severity = error # prefer tuple name
dotnet_diagnostic.IDE0040.severity = error # modifiers required
dotnet_diagnostic.IDE0041.severity = error # simplify null
dotnet_diagnostic.IDE0042.severity = error # deconstruct variable
dotnet_diagnostic.IDE0044.severity = error # make field only when possible
dotnet_diagnostic.IDE0047.severity = suggestion # paratemeter name
dotnet_diagnostic.IDE0051.severity = error # unused field
dotnet_diagnostic.IDE0052.severity = error # unused member
dotnet_diagnostic.IDE0060.severity = suggestion # unused parameters
dotnet_diagnostic.IDE0066.severity = suggestion # switch expression
dotnet_diagnostic.IDE0078.severity = suggestion # use pattern matching
dotnet_diagnostic.IDE0090.severity = suggestion # new can be simplified
dotnet_diagnostic.IDE0130.severity = suggestion # namespace folder structure
dotnet_diagnostic.IDE0160.severity = silent # Use block namespaces ARE NOT required
dotnet_diagnostic.IDE0161.severity = suggestion # Please use file namespaces
##########################################
# Styles
##########################################
# camel_case_style - Define the camelCase style
dotnet_naming_style.camel_case_style.capitalization = camel_case
# pascal_case_style - Define the PascalCase style
dotnet_naming_style.pascal_case_style.capitalization = pascal_case
# constant_case - Define the CONSTANT_CASE style
dotnet_naming_style.constant_case.capitalization = all_upper
dotnet_naming_style.constant_case.word_separator = _
# first_upper_style - The first character must start with an upper-case character
dotnet_naming_style.first_upper_style.capitalization = first_word_upper
# prefix_interface_with_i_style - Interfaces must be PascalCase and the first character of an interface must be an 'I'
dotnet_naming_style.prefix_interface_with_i_style.capitalization = pascal_case
dotnet_naming_style.prefix_interface_with_i_style.required_prefix = I
# prefix_type_parameters_with_t_style - Generic Type Parameters must be PascalCase and the first character must be a 'T'
dotnet_naming_style.prefix_type_parameters_with_t_style.capitalization = pascal_case
dotnet_naming_style.prefix_type_parameters_with_t_style.required_prefix = T
# disallowed_style - Anything that has this style applied is marked as disallowed
dotnet_naming_style.disallowed_style.capitalization = pascal_case
dotnet_naming_style.disallowed_style.required_prefix = ____RULE_VIOLATION____
dotnet_naming_style.disallowed_style.required_suffix = ____RULE_VIOLATION____
# internal_error_style - This style should never occur... if it does, it indicates a bug in file or in the parser using the file
dotnet_naming_style.internal_error_style.capitalization = pascal_case
dotnet_naming_style.internal_error_style.required_prefix = ____INTERNAL_ERROR____
dotnet_naming_style.internal_error_style.required_suffix = ____INTERNAL_ERROR____
# prefix_interface_with_i_style - Interfaces must be PascalCase and the first character of an interface must be an 'I'
dotnet_naming_style.underscore_camel_case_style.capitalization = camel_case
dotnet_naming_style.underscore_camel_case_style.required_prefix = _
##########################################
# .NET Design Guideline Field Naming Rules
# Naming rules for fields follow the .NET Framework design guidelines
# https://docs.microsoft.com/dotnet/standard/design-guidelines/index
##########################################
# All public/protected/protected_internal constant fields must be constant_case
# https://docs.microsoft.com/dotnet/standard/design-guidelines/field
dotnet_naming_symbols.public_protected_constant_fields_group.applicable_accessibilities = public, protected, protected_internal
dotnet_naming_symbols.public_protected_constant_fields_group.required_modifiers = const
dotnet_naming_symbols.public_protected_constant_fields_group.applicable_kinds = field
dotnet_naming_rule.public_protected_constant_fields_must_be_pascal_case_rule.symbols = public_protected_constant_fields_group
dotnet_naming_rule.public_protected_constant_fields_must_be_pascal_case_rule.style = constant_case
dotnet_naming_rule.public_protected_constant_fields_must_be_pascal_case_rule.severity = warning
# All public/protected/protected_internal static readonly fields must be constant_case
# https://docs.microsoft.com/dotnet/standard/design-guidelines/field
dotnet_naming_symbols.public_protected_static_readonly_fields_group.applicable_accessibilities = public, protected, protected_internal
dotnet_naming_symbols.public_protected_static_readonly_fields_group.required_modifiers = static, readonly
dotnet_naming_symbols.public_protected_static_readonly_fields_group.applicable_kinds = field
dotnet_naming_rule.public_protected_static_readonly_fields_must_be_pascal_case_rule.symbols = public_protected_static_readonly_fields_group
dotnet_naming_rule.public_protected_static_readonly_fields_must_be_pascal_case_rule.style = constant_case
dotnet_naming_rule.public_protected_static_readonly_fields_must_be_pascal_case_rule.severity = warning
# No other public/protected/protected_internal fields are allowed
# https://docs.microsoft.com/dotnet/standard/design-guidelines/field
dotnet_naming_symbols.other_public_protected_fields_group.applicable_accessibilities = public, protected, protected_internal
dotnet_naming_symbols.other_public_protected_fields_group.applicable_kinds = field
dotnet_naming_rule.other_public_protected_fields_disallowed_rule.symbols = other_public_protected_fields_group
dotnet_naming_rule.other_public_protected_fields_disallowed_rule.style = disallowed_style
dotnet_naming_rule.other_public_protected_fields_disallowed_rule.severity = error
##########################################
# StyleCop Field Naming Rules
# Naming rules for fields follow the StyleCop analyzers
# This does not override any rules using disallowed_style above
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers
##########################################
# All constant fields must be constant_case
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1303.md
dotnet_naming_symbols.stylecop_constant_fields_group.applicable_accessibilities = public, internal, protected_internal, protected, private_protected, private
dotnet_naming_symbols.stylecop_constant_fields_group.required_modifiers = const
dotnet_naming_symbols.stylecop_constant_fields_group.applicable_kinds = field
dotnet_naming_rule.stylecop_constant_fields_must_be_pascal_case_rule.symbols = stylecop_constant_fields_group
dotnet_naming_rule.stylecop_constant_fields_must_be_pascal_case_rule.style = constant_case
dotnet_naming_rule.stylecop_constant_fields_must_be_pascal_case_rule.severity = warning
# All static readonly fields must be constant_case
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1311.md
dotnet_naming_symbols.stylecop_static_readonly_fields_group.applicable_accessibilities = public, internal, protected_internal, protected, private_protected, private
dotnet_naming_symbols.stylecop_static_readonly_fields_group.required_modifiers = static, readonly
dotnet_naming_symbols.stylecop_static_readonly_fields_group.applicable_kinds = field
dotnet_naming_rule.stylecop_static_readonly_fields_must_be_pascal_case_rule.symbols = stylecop_static_readonly_fields_group
dotnet_naming_rule.stylecop_static_readonly_fields_must_be_pascal_case_rule.style = constant_case
dotnet_naming_rule.stylecop_static_readonly_fields_must_be_pascal_case_rule.severity = warning
# No non-private instance fields are allowed
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1401.md
dotnet_naming_symbols.stylecop_fields_must_be_private_group.applicable_accessibilities = public, internal, protected_internal, protected, private_protected
dotnet_naming_symbols.stylecop_fields_must_be_private_group.applicable_kinds = field
dotnet_naming_rule.stylecop_instance_fields_must_be_private_rule.symbols = stylecop_fields_must_be_private_group
dotnet_naming_rule.stylecop_instance_fields_must_be_private_rule.style = disallowed_style
dotnet_naming_rule.stylecop_instance_fields_must_be_private_rule.severity = error
# Private fields must be camelCase
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1306.md
dotnet_naming_symbols.stylecop_private_fields_group.applicable_accessibilities = private
dotnet_naming_symbols.stylecop_private_fields_group.applicable_kinds = field
dotnet_naming_rule.stylecop_private_fields_must_be_camel_case_rule.symbols = stylecop_private_fields_group
dotnet_naming_rule.stylecop_private_fields_must_be_camel_case_rule.style = underscore_camel_case_style
dotnet_naming_rule.stylecop_private_fields_must_be_camel_case_rule.severity = warning
# Local variables must be camelCase
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1312.md
dotnet_naming_symbols.stylecop_local_fields_group.applicable_accessibilities = local
dotnet_naming_symbols.stylecop_local_fields_group.applicable_kinds = local
dotnet_naming_rule.stylecop_local_fields_must_be_camel_case_rule.symbols = stylecop_local_fields_group
dotnet_naming_rule.stylecop_local_fields_must_be_camel_case_rule.style = camel_case_style
dotnet_naming_rule.stylecop_local_fields_must_be_camel_case_rule.severity = warning
# This rule should never fire. However, it's included for at least two purposes:
# First, it helps to understand, reason about, and root-case certain types of issues, such as bugs in .editorconfig parsers.
# Second, it helps to raise immediate awareness if a new field type is added (as occurred recently in C#).
dotnet_naming_symbols.sanity_check_uncovered_field_case_group.applicable_accessibilities = *
dotnet_naming_symbols.sanity_check_uncovered_field_case_group.applicable_kinds = field
dotnet_naming_rule.sanity_check_uncovered_field_case_rule.symbols = sanity_check_uncovered_field_case_group
dotnet_naming_rule.sanity_check_uncovered_field_case_rule.style = internal_error_style
dotnet_naming_rule.sanity_check_uncovered_field_case_rule.severity = error
##########################################
# Other Naming Rules
##########################################
# All of the following must be PascalCase:
# - Namespaces
# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-namespaces
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1300.md
# - Classes and Enumerations
# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-classes-structs-and-interfaces
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1300.md
# - Delegates
# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-classes-structs-and-interfaces#names-of-common-types
# - Constructors, Properties, Events, Methods
# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-type-members
dotnet_naming_symbols.element_group.applicable_kinds = namespace, class, enum, struct, delegate, event, method, property
dotnet_naming_rule.element_rule.symbols = element_group
dotnet_naming_rule.element_rule.style = pascal_case_style
dotnet_naming_rule.element_rule.severity = warning
# Interfaces use PascalCase and are prefixed with uppercase 'I'
# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-classes-structs-and-interfaces
dotnet_naming_symbols.interface_group.applicable_kinds = interface
dotnet_naming_rule.interface_rule.symbols = interface_group
dotnet_naming_rule.interface_rule.style = prefix_interface_with_i_style
dotnet_naming_rule.interface_rule.severity = warning
# Generics Type Parameters use PascalCase and are prefixed with uppercase 'T'
# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-classes-structs-and-interfaces
dotnet_naming_symbols.type_parameter_group.applicable_kinds = type_parameter
dotnet_naming_rule.type_parameter_rule.symbols = type_parameter_group
dotnet_naming_rule.type_parameter_rule.style = prefix_type_parameters_with_t_style
dotnet_naming_rule.type_parameter_rule.severity = warning
# Function parameters use camelCase
# https://docs.microsoft.com/dotnet/standard/design-guidelines/naming-parameters
dotnet_naming_symbols.parameters_group.applicable_kinds = parameter
dotnet_naming_rule.parameters_rule.symbols = parameters_group
dotnet_naming_rule.parameters_rule.style = camel_case_style
dotnet_naming_rule.parameters_rule.severity = warning
##########################################
# License
##########################################
# The following applies as to the .editorconfig file ONLY, and is
# included below for reference, per the requirements of the license
# corresponding to this .editorconfig file.
# See: https://github.com/RehanSaeed/EditorConfig
#
# MIT License
#
# Copyright (c) 2017-2019 Muhammad Rehan Saeed
# Copyright (c) 2019 Henry Gabryjelski
#
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the
# Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute,
# sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject
# to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
##########################################

View File

@@ -1,6 +0,0 @@
version: 2
updates:
- package-ecosystem: "github-actions" # search for actions - there are other options available
directory: "/" # search in .github/workflows under root `/`
schedule:
interval: "weekly" # check for action update every week

View File

@@ -9,12 +9,16 @@ jobs:
os: [windows-latest, ubuntu-latest]
steps:
- uses: actions/checkout@v3
- uses: actions/setup-dotnet@v2
- uses: actions/checkout@v1
- uses: actions/setup-dotnet@v1
with:
dotnet-version: 6.0.x
dotnet-version: 5.0.101
- run: dotnet run -p build/build.csproj
- uses: actions/upload-artifact@v3
- uses: actions/upload-artifact@v2
with:
name: ${{ matrix.os }}-sharpcompress.nupkg
path: artifacts/*
- uses: actions/upload-artifact@v2
with:
name: ${{ matrix.os }}-sharpcompress.snupkg
path: artifacts/*

1
.gitignore vendored
View File

@@ -18,4 +18,3 @@ tools
.DS_Store
*.snupkg
/tests/TestArchives/6d23a38c-f064-4ef1-ad89-b942396f53b9/Scratch

View File

@@ -182,8 +182,6 @@ I'm always looking for help or ideas. Please submit code or email with ideas. Un
XZ implementation based on: https://github.com/sambott/XZ.NET by @sambott
XZ BCJ filters support contributed by Louis-Michel Bergeron, on behalf of aDolus Technology Inc. - 2022
7Zip implementation based on: https://code.google.com/p/managed-lzma/
LICENSE

View File

@@ -6,78 +6,78 @@ using GlobExpressions;
using static Bullseye.Targets;
using static SimpleExec.Command;
const string Clean = "clean";
const string Format = "format";
const string Build = "build";
const string Test = "test";
const string Publish = "publish";
class Program
{
private const string Clean = "clean";
private const string Format = "format";
private const string Build = "build";
private const string Test = "test";
private const string Publish = "publish";
Target(Clean,
ForEach("**/bin", "**/obj"),
dir =>
{
IEnumerable<string> GetDirectories(string d)
{
return Glob.Directories(".", d);
}
static void Main(string[] args)
{
Target(Clean,
ForEach("**/bin", "**/obj"),
dir =>
{
IEnumerable<string> GetDirectories(string d)
{
return Glob.Directories(".", d);
}
void RemoveDirectory(string d)
{
if (Directory.Exists(d))
void RemoveDirectory(string d)
{
if (Directory.Exists(d))
{
Console.WriteLine(d);
Directory.Delete(d, true);
}
}
foreach (var d in GetDirectories(dir))
{
RemoveDirectory(d);
}
});
Target(Format, () =>
{
Run("dotnet", "tool restore");
Run("dotnet", "format --check");
});
Target(Build, DependsOn(Format),
framework =>
{
Console.WriteLine(d);
Directory.Delete(d, true);
}
}
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows) && framework == "net46")
{
return;
}
Run("dotnet", "build src/SharpCompress/SharpCompress.csproj -c Release");
});
foreach (var d in GetDirectories(dir))
{
RemoveDirectory(d);
}
});
Target(Test, DependsOn(Build), ForEach("net5.0"),
framework =>
{
IEnumerable<string> GetFiles(string d)
{
return Glob.Files(".", d);
}
Target(Format,
() =>
{
Run("dotnet", "tool restore");
Run("dotnet", "format --check");
});
foreach (var file in GetFiles("**/*.Test.csproj"))
{
Run("dotnet", $"test {file} -c Release -f {framework}");
}
});
Target(Build,
DependsOn(Format),
framework =>
{
Run("dotnet", "build src/SharpCompress/SharpCompress.csproj -c Release");
});
Target(Publish, DependsOn(Test),
() =>
{
Run("dotnet", "pack src/SharpCompress/SharpCompress.csproj -c Release -o artifacts/");
});
Target(Test,
DependsOn(Build),
ForEach("net6.0", "net461"),
framework =>
{
IEnumerable<string> GetFiles(string d)
{
return Glob.Files(".", d);
}
Target("default", DependsOn(Publish), () => Console.WriteLine("Done!"));
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows) && framework == "net461")
{
return;
}
foreach (var file in GetFiles("**/*.Test.csproj"))
{
Run("dotnet", $"test {file} -c Release -f {framework}");
}
});
Target(Publish,
DependsOn(Test),
() =>
{
Run("dotnet", "pack src/SharpCompress/SharpCompress.csproj -c Release -o artifacts/");
});
Target("default", DependsOn(Publish), () => Console.WriteLine("Done!"));
await RunTargetsAndExitAsync(args);
RunTargetsAndExit(args);
}
}

View File

@@ -2,13 +2,13 @@
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net6.0</TargetFramework>
<TargetFramework>net5.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Bullseye" Version="4.0.0" />
<PackageReference Include="Glob" Version="1.1.9" />
<PackageReference Include="SimpleExec" Version="10.0.0" />
<PackageReference Include="Bullseye" Version="3.6.0" />
<PackageReference Include="Glob" Version="1.1.8" />
<PackageReference Include="SimpleExec" Version="6.4.0" />
</ItemGroup>
</Project>

View File

@@ -1,6 +1,5 @@
{
"sdk": {
"version": "6.0.200",
"rollForward": "latestFeature"
"version": "5.0.101"
}
}

View File

@@ -1,420 +0,0 @@
// Copyright (c) Six Labors.
// Licensed under the Apache License, Version 2.0.
#if !NETSTANDARD2_0 && !NETSTANDARD2_1 && !NETFRAMEWORK
#define SUPPORTS_RUNTIME_INTRINSICS
#define SUPPORTS_HOTPATH
#endif
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
#if SUPPORTS_RUNTIME_INTRINSICS
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
#endif
#pragma warning disable IDE0007 // Use implicit type
namespace SharpCompress.Algorithms
{
/// <summary>
/// Calculates the 32 bit Adler checksum of a given buffer according to
/// RFC 1950. ZLIB Compressed Data Format Specification version 3.3)
/// </summary>
internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/blob/main/src/ImageSharp/Compression/Zlib/Adler32.cs
{
/// <summary>
/// Global inlining options. Helps temporarily disable inlining for better profiler output.
/// </summary>
private static class InliningOptions // From https://github.com/SixLabors/ImageSharp/blob/main/src/ImageSharp/Common/Helpers/InliningOptions.cs
{
/// <summary>
/// <see cref="MethodImplOptions.AggressiveInlining"/> regardless of the build conditions.
/// </summary>
public const MethodImplOptions AlwaysInline = MethodImplOptions.AggressiveInlining;
#if PROFILING
public const MethodImplOptions HotPath = MethodImplOptions.NoInlining;
public const MethodImplOptions ShortMethod = MethodImplOptions.NoInlining;
#else
#if SUPPORTS_HOTPATH
public const MethodImplOptions HotPath = MethodImplOptions.AggressiveOptimization;
#else
public const MethodImplOptions HotPath = MethodImplOptions.AggressiveInlining;
#endif
public const MethodImplOptions ShortMethod = MethodImplOptions.AggressiveInlining;
#endif
public const MethodImplOptions ColdPath = MethodImplOptions.NoInlining;
}
#if SUPPORTS_RUNTIME_INTRINSICS
/// <summary>
/// Provides optimized static methods for trigonometric, logarithmic,
/// and other common mathematical functions.
/// </summary>
private static class Numerics // From https://github.com/SixLabors/ImageSharp/blob/main/src/ImageSharp/Common/Helpers/Numerics.cs
{
/// <summary>
/// Reduces elements of the vector into one sum.
/// </summary>
/// <param name="accumulator">The accumulator to reduce.</param>
/// <returns>The sum of all elements.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int ReduceSum(Vector256<int> accumulator)
{
// Add upper lane to lower lane.
Vector128<int> vsum = Sse2.Add(accumulator.GetLower(), accumulator.GetUpper());
// Add odd to even.
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_11_01_01));
// Add high to low.
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_10_11_10));
return Sse2.ConvertToInt32(vsum);
}
/// <summary>
/// Reduces even elements of the vector into one sum.
/// </summary>
/// <param name="accumulator">The accumulator to reduce.</param>
/// <returns>The sum of even elements.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int EvenReduceSum(Vector256<int> accumulator)
{
Vector128<int> vsum = Sse2.Add(accumulator.GetLower(), accumulator.GetUpper()); // add upper lane to lower lane
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_10_11_10)); // add high to low
// Vector128<int>.ToScalar() isn't optimized pre-net5.0 https://github.com/dotnet/runtime/pull/37882
return Sse2.ConvertToInt32(vsum);
}
}
#endif
/// <summary>
/// The default initial seed value of a Adler32 checksum calculation.
/// </summary>
public const uint SeedValue = 1U;
// Largest prime smaller than 65536
private const uint BASE = 65521;
// NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1
private const uint NMAX = 5552;
#if SUPPORTS_RUNTIME_INTRINSICS
private const int MinBufferSize = 64;
private const int BlockSize = 1 << 5;
// The C# compiler emits this as a compile-time constant embedded in the PE file.
private static ReadOnlySpan<byte> Tap1Tap2 => new byte[]
{
32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17, // tap1
16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1 // tap2
};
#endif
/// <summary>
/// Calculates the Adler32 checksum with the bytes taken from the span.
/// </summary>
/// <param name="buffer">The readonly span of bytes.</param>
/// <returns>The <see cref="uint"/>.</returns>
[MethodImpl(InliningOptions.ShortMethod)]
public static uint Calculate(ReadOnlySpan<byte> buffer)
=> Calculate(SeedValue, buffer);
/// <summary>
/// Calculates the Adler32 checksum with the bytes taken from the span and seed.
/// </summary>
/// <param name="adler">The input Adler32 value.</param>
/// <param name="buffer">The readonly span of bytes.</param>
/// <returns>The <see cref="uint"/>.</returns>
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
public static uint Calculate(uint adler, ReadOnlySpan<byte> buffer)
{
if (buffer.IsEmpty)
{
return adler;
}
#if SUPPORTS_RUNTIME_INTRINSICS
if (Avx2.IsSupported && buffer.Length >= MinBufferSize)
{
return CalculateAvx2(adler, buffer);
}
if (Ssse3.IsSupported && buffer.Length >= MinBufferSize)
{
return CalculateSse(adler, buffer);
}
return CalculateScalar(adler, buffer);
#else
return CalculateScalar(adler, buffer);
#endif
}
// Based on https://github.com/chromium/chromium/blob/master/third_party/zlib/adler32_simd.c
#if SUPPORTS_RUNTIME_INTRINSICS
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
private static unsafe uint CalculateSse(uint adler, ReadOnlySpan<byte> buffer)
{
uint s1 = adler & 0xFFFF;
uint s2 = (adler >> 16) & 0xFFFF;
// Process the data in blocks.
uint length = (uint)buffer.Length;
uint blocks = length / BlockSize;
length -= blocks * BlockSize;
fixed (byte* bufferPtr = &MemoryMarshal.GetReference(buffer))
{
fixed (byte* tapPtr = &MemoryMarshal.GetReference(Tap1Tap2))
{
byte* localBufferPtr = bufferPtr;
// _mm_setr_epi8 on x86
Vector128<sbyte> tap1 = Sse2.LoadVector128((sbyte*)tapPtr);
Vector128<sbyte> tap2 = Sse2.LoadVector128((sbyte*)(tapPtr + 0x10));
Vector128<byte> zero = Vector128<byte>.Zero;
var ones = Vector128.Create((short)1);
while (blocks > 0)
{
uint n = NMAX / BlockSize; /* The NMAX constraint. */
if (n > blocks)
{
n = blocks;
}
blocks -= n;
// Process n blocks of data. At most NMAX data bytes can be
// processed before s2 must be reduced modulo BASE.
Vector128<uint> v_ps = Vector128.CreateScalar(s1 * n);
Vector128<uint> v_s2 = Vector128.CreateScalar(s2);
Vector128<uint> v_s1 = Vector128<uint>.Zero;
do
{
// Load 32 input bytes.
Vector128<byte> bytes1 = Sse3.LoadDquVector128(localBufferPtr);
Vector128<byte> bytes2 = Sse3.LoadDquVector128(localBufferPtr + 0x10);
// Add previous block byte sum to v_ps.
v_ps = Sse2.Add(v_ps, v_s1);
// Horizontally add the bytes for s1, multiply-adds the
// bytes by [ 32, 31, 30, ... ] for s2.
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes1, zero).AsUInt32());
Vector128<short> mad1 = Ssse3.MultiplyAddAdjacent(bytes1, tap1);
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad1, ones).AsUInt32());
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes2, zero).AsUInt32());
Vector128<short> mad2 = Ssse3.MultiplyAddAdjacent(bytes2, tap2);
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad2, ones).AsUInt32());
localBufferPtr += BlockSize;
}
while (--n > 0);
v_s2 = Sse2.Add(v_s2, Sse2.ShiftLeftLogical(v_ps, 5));
// Sum epi32 ints v_s1(s2) and accumulate in s1(s2).
const byte S2301 = 0b1011_0001; // A B C D -> B A D C
const byte S1032 = 0b0100_1110; // A B C D -> C D A B
v_s1 = Sse2.Add(v_s1, Sse2.Shuffle(v_s1, S1032));
s1 += v_s1.ToScalar();
v_s2 = Sse2.Add(v_s2, Sse2.Shuffle(v_s2, S2301));
v_s2 = Sse2.Add(v_s2, Sse2.Shuffle(v_s2, S1032));
s2 = v_s2.ToScalar();
// Reduce.
s1 %= BASE;
s2 %= BASE;
}
if (length > 0)
{
HandleLeftOver(localBufferPtr, length, ref s1, ref s2);
}
return s1 | (s2 << 16);
}
}
}
// Based on: https://github.com/zlib-ng/zlib-ng/blob/develop/arch/x86/adler32_avx2.c
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
public static unsafe uint CalculateAvx2(uint adler, ReadOnlySpan<byte> buffer)
{
uint s1 = adler & 0xFFFF;
uint s2 = (adler >> 16) & 0xFFFF;
uint length = (uint)buffer.Length;
fixed (byte* bufferPtr = &MemoryMarshal.GetReference(buffer))
{
byte* localBufferPtr = bufferPtr;
Vector256<byte> zero = Vector256<byte>.Zero;
var dot3v = Vector256.Create((short)1);
var dot2v = Vector256.Create(32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1);
// Process n blocks of data. At most NMAX data bytes can be
// processed before s2 must be reduced modulo BASE.
var vs1 = Vector256.CreateScalar(s1);
var vs2 = Vector256.CreateScalar(s2);
while (length >= 32)
{
int k = length < NMAX ? (int)length : (int)NMAX;
k -= k % 32;
length -= (uint)k;
Vector256<uint> vs10 = vs1;
Vector256<uint> vs3 = Vector256<uint>.Zero;
while (k >= 32)
{
// Load 32 input bytes.
Vector256<byte> block = Avx.LoadVector256(localBufferPtr);
// Sum of abs diff, resulting in 2 x int32's
Vector256<ushort> vs1sad = Avx2.SumAbsoluteDifferences(block, zero);
vs1 = Avx2.Add(vs1, vs1sad.AsUInt32());
vs3 = Avx2.Add(vs3, vs10);
// sum 32 uint8s to 16 shorts.
Vector256<short> vshortsum2 = Avx2.MultiplyAddAdjacent(block, dot2v);
// sum 16 shorts to 8 uint32s.
Vector256<int> vsum2 = Avx2.MultiplyAddAdjacent(vshortsum2, dot3v);
vs2 = Avx2.Add(vsum2.AsUInt32(), vs2);
vs10 = vs1;
localBufferPtr += BlockSize;
k -= 32;
}
// Defer the multiplication with 32 to outside of the loop.
vs3 = Avx2.ShiftLeftLogical(vs3, 5);
vs2 = Avx2.Add(vs2, vs3);
s1 = (uint)Numerics.EvenReduceSum(vs1.AsInt32());
s2 = (uint)Numerics.ReduceSum(vs2.AsInt32());
s1 %= BASE;
s2 %= BASE;
vs1 = Vector256.CreateScalar(s1);
vs2 = Vector256.CreateScalar(s2);
}
if (length > 0)
{
HandleLeftOver(localBufferPtr, length, ref s1, ref s2);
}
return s1 | (s2 << 16);
}
}
private static unsafe void HandleLeftOver(byte* localBufferPtr, uint length, ref uint s1, ref uint s2)
{
if (length >= 16)
{
s2 += s1 += localBufferPtr[0];
s2 += s1 += localBufferPtr[1];
s2 += s1 += localBufferPtr[2];
s2 += s1 += localBufferPtr[3];
s2 += s1 += localBufferPtr[4];
s2 += s1 += localBufferPtr[5];
s2 += s1 += localBufferPtr[6];
s2 += s1 += localBufferPtr[7];
s2 += s1 += localBufferPtr[8];
s2 += s1 += localBufferPtr[9];
s2 += s1 += localBufferPtr[10];
s2 += s1 += localBufferPtr[11];
s2 += s1 += localBufferPtr[12];
s2 += s1 += localBufferPtr[13];
s2 += s1 += localBufferPtr[14];
s2 += s1 += localBufferPtr[15];
localBufferPtr += 16;
length -= 16;
}
while (length-- > 0)
{
s2 += s1 += *localBufferPtr++;
}
if (s1 >= BASE)
{
s1 -= BASE;
}
s2 %= BASE;
}
#endif
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
private static unsafe uint CalculateScalar(uint adler, ReadOnlySpan<byte> buffer)
{
uint s1 = adler & 0xFFFF;
uint s2 = (adler >> 16) & 0xFFFF;
uint k;
fixed (byte* bufferPtr = buffer)
{
var localBufferPtr = bufferPtr;
uint length = (uint)buffer.Length;
while (length > 0)
{
k = length < NMAX ? length : NMAX;
length -= k;
while (k >= 16)
{
s2 += s1 += localBufferPtr[0];
s2 += s1 += localBufferPtr[1];
s2 += s1 += localBufferPtr[2];
s2 += s1 += localBufferPtr[3];
s2 += s1 += localBufferPtr[4];
s2 += s1 += localBufferPtr[5];
s2 += s1 += localBufferPtr[6];
s2 += s1 += localBufferPtr[7];
s2 += s1 += localBufferPtr[8];
s2 += s1 += localBufferPtr[9];
s2 += s1 += localBufferPtr[10];
s2 += s1 += localBufferPtr[11];
s2 += s1 += localBufferPtr[12];
s2 += s1 += localBufferPtr[13];
s2 += s1 += localBufferPtr[14];
s2 += s1 += localBufferPtr[15];
localBufferPtr += 16;
k -= 16;
}
while (k-- > 0)
{
s2 += s1 += *localBufferPtr++;
}
s1 %= BASE;
s2 %= BASE;
}
return (s2 << 16) | s1;
}
}
}
}

View File

@@ -0,0 +1,285 @@
// Copyright (c) Six Labors and contributors.
// Licensed under the GNU Affero General Public License, Version 3.
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
#if !NETSTANDARD2_0 && !NETSTANDARD2_1
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
#endif
namespace SharpCompress.Algorithms
{
/// <summary>
/// Calculates the 32 bit Adler checksum of a given buffer according to
/// RFC 1950. ZLIB Compressed Data Format Specification version 3.3)
/// </summary>
internal static class Adler32
{
/// <summary>
/// The default initial seed value of a Adler32 checksum calculation.
/// </summary>
public const uint SeedValue = 1U;
#if !NETSTANDARD2_0 && !NETSTANDARD2_1
private const int MinBufferSize = 64;
#endif
// Largest prime smaller than 65536
private const uint BASE = 65521;
// NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1
private const uint NMAX = 5552;
/// <summary>
/// Calculates the Adler32 checksum with the bytes taken from the span.
/// </summary>
/// <param name="buffer">The readonly span of bytes.</param>
/// <returns>The <see cref="uint"/>.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static uint Calculate(ReadOnlySpan<byte> buffer)
{
return Calculate(SeedValue, buffer);
}
/// <summary>
/// Calculates the Adler32 checksum with the bytes taken from the span and seed.
/// </summary>
/// <param name="adler">The input Adler32 value.</param>
/// <param name="buffer">The readonly span of bytes.</param>
/// <returns>The <see cref="uint"/>.</returns>
public static uint Calculate(uint adler, ReadOnlySpan<byte> buffer)
{
if (buffer.IsEmpty)
{
return SeedValue;
}
#if !NETSTANDARD2_0 && !NETSTANDARD2_1
if (Sse3.IsSupported && buffer.Length >= MinBufferSize)
{
return CalculateSse(adler, buffer);
}
return CalculateScalar(adler, buffer);
#else
return CalculateScalar(adler, buffer);
#endif
}
// Based on https://github.com/chromium/chromium/blob/master/third_party/zlib/adler32_simd.c
#if !NETSTANDARD2_0 && !NETSTANDARD2_1
private static unsafe uint CalculateSse(uint adler, ReadOnlySpan<byte> buffer)
{
uint s1 = adler & 0xFFFF;
uint s2 = (adler >> 16) & 0xFFFF;
// Process the data in blocks.
const int BLOCK_SIZE = 1 << 5;
uint length = (uint)buffer.Length;
uint blocks = length / BLOCK_SIZE;
length -= blocks * BLOCK_SIZE;
int index = 0;
fixed (byte* bufferPtr = &buffer[0])
{
index += (int)blocks * BLOCK_SIZE;
var localBufferPtr = bufferPtr;
// _mm_setr_epi8 on x86
var tap1 = Vector128.Create(32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17);
var tap2 = Vector128.Create(16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1);
Vector128<byte> zero = Vector128<byte>.Zero;
var ones = Vector128.Create((short)1);
while (blocks > 0)
{
uint n = NMAX / BLOCK_SIZE; /* The NMAX constraint. */
if (n > blocks)
{
n = blocks;
}
blocks -= n;
// Process n blocks of data. At most NMAX data bytes can be
// processed before s2 must be reduced modulo BASE.
Vector128<int> v_ps = Vector128.CreateScalar(s1 * n).AsInt32();
Vector128<int> v_s2 = Vector128.CreateScalar(s2).AsInt32();
Vector128<int> v_s1 = Vector128<int>.Zero;
do
{
// Load 32 input bytes.
Vector128<byte> bytes1 = Sse3.LoadDquVector128(localBufferPtr);
Vector128<byte> bytes2 = Sse3.LoadDquVector128(localBufferPtr + 16);
// Add previous block byte sum to v_ps.
v_ps = Sse2.Add(v_ps, v_s1);
// Horizontally add the bytes for s1, multiply-adds the
// bytes by [ 32, 31, 30, ... ] for s2.
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes1, zero).AsInt32());
Vector128<short> mad1 = Ssse3.MultiplyAddAdjacent(bytes1, tap1);
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad1, ones));
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes2, zero).AsInt32());
Vector128<short> mad2 = Ssse3.MultiplyAddAdjacent(bytes2, tap2);
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad2, ones));
localBufferPtr += BLOCK_SIZE;
}
while (--n > 0);
v_s2 = Sse2.Add(v_s2, Sse2.ShiftLeftLogical(v_ps, 5));
// Sum epi32 ints v_s1(s2) and accumulate in s1(s2).
const byte S2301 = 0b1011_0001; // A B C D -> B A D C
const byte S1032 = 0b0100_1110; // A B C D -> C D A B
v_s1 = Sse2.Add(v_s1, Sse2.Shuffle(v_s1, S2301));
v_s1 = Sse2.Add(v_s1, Sse2.Shuffle(v_s1, S1032));
s1 += (uint)v_s1.ToScalar();
v_s2 = Sse2.Add(v_s2, Sse2.Shuffle(v_s2, S2301));
v_s2 = Sse2.Add(v_s2, Sse2.Shuffle(v_s2, S1032));
s2 = (uint)v_s2.ToScalar();
// Reduce.
s1 %= BASE;
s2 %= BASE;
}
}
ref byte bufferRef = ref MemoryMarshal.GetReference(buffer);
if (length > 0)
{
if (length >= 16)
{
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
length -= 16;
}
while (length-- > 0)
{
s2 += s1 += Unsafe.Add(ref bufferRef, index++);
}
if (s1 >= BASE)
{
s1 -= BASE;
}
s2 %= BASE;
}
return s1 | (s2 << 16);
}
#endif
private static uint CalculateScalar(uint adler, ReadOnlySpan<byte> buffer)
{
uint s1 = adler & 0xFFFF;
uint s2 = (adler >> 16) & 0xFFFF;
uint k;
ref byte bufferRef = ref MemoryMarshal.GetReference<byte>(buffer);
uint length = (uint)buffer.Length;
int index = 0;
while (length > 0)
{
k = length < NMAX ? length : NMAX;
length -= k;
while (k >= 16)
{
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
k -= 16;
}
if (k != 0)
{
do
{
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
}
while (--k != 0);
}
s1 %= BASE;
s2 %= BASE;
}
return (s2 << 16) | s1;
}
}
}

View File

@@ -1,9 +1,8 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives
@@ -24,14 +23,28 @@ namespace SharpCompress.Archives
protected ReaderOptions ReaderOptions { get; }
private bool disposed;
protected SourceStream SrcStream;
internal AbstractArchive(ArchiveType type, SourceStream srcStream)
internal AbstractArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerOptions)
{
Type = type;
ReaderOptions = srcStream.ReaderOptions;
SrcStream = srcStream;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(SrcStream));
if (!fileInfo.Exists)
{
throw new ArgumentException("File does not exist: " + fileInfo.FullName);
}
ReaderOptions = readerOptions;
readerOptions.LeaveStreamOpen = false;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(fileInfo));
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
}
protected abstract IEnumerable<TVolume> LoadVolumes(FileInfo file);
internal AbstractArchive(ArchiveType type, IEnumerable<Stream> streams, ReaderOptions readerOptions)
{
Type = type;
ReaderOptions = readerOptions;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(streams.Select(CheckStreams)));
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
}
@@ -85,7 +98,7 @@ namespace SharpCompress.Archives
/// </summary>
public virtual long TotalUncompressSize => Entries.Aggregate(0L, (total, cf) => total + cf.Size);
protected abstract IEnumerable<TVolume> LoadVolumes(SourceStream srcStream);
protected abstract IEnumerable<TVolume> LoadVolumes(IEnumerable<Stream> streams);
protected abstract IEnumerable<TEntry> LoadEntries(IEnumerable<TVolume> volumes);
IEnumerable<IArchiveEntry> IArchive.Entries => Entries.Cast<IArchiveEntry>();
@@ -98,8 +111,6 @@ namespace SharpCompress.Archives
{
lazyVolumes.ForEach(v => v.Dispose());
lazyEntries.GetLoaded().Cast<Entry>().ForEach(x => x.Close());
if (SrcStream != null)
SrcStream.Dispose();
disposed = true;
}
}

View File

@@ -1,9 +1,8 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Writers;
@@ -41,8 +40,13 @@ namespace SharpCompress.Archives
{
}
internal AbstractWritableArchive(ArchiveType type, SourceStream srcStream)
: base(type, srcStream)
internal AbstractWritableArchive(ArchiveType type, Stream stream, ReaderOptions readerFactoryOptions)
: base(type, stream.AsEnumerable(), readerFactoryOptions)
{
}
internal AbstractWritableArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerFactoryOptions)
: base(type, fileInfo, readerFactoryOptions)
{
}

View File

@@ -1,14 +1,12 @@
using System;
using System.Collections.Generic;
using System;
using System.IO;
using System.Linq;
using SharpCompress.Archives.Dmg;
using SharpCompress.Archives.GZip;
using SharpCompress.Archives.Rar;
using SharpCompress.Archives.SevenZip;
using SharpCompress.Archives.Tar;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives
@@ -29,27 +27,42 @@ namespace SharpCompress.Archives
throw new ArgumentException("Stream should be readable and seekable");
}
readerOptions ??= new ReaderOptions();
ArchiveType? type;
IsArchive(stream, out type); //test and reset stream position
if (type != null)
if (ZipArchive.IsZipFile(stream, null))
{
switch (type.Value)
{
case ArchiveType.Zip:
return ZipArchive.Open(stream, readerOptions);
case ArchiveType.SevenZip:
return SevenZipArchive.Open(stream, readerOptions);
case ArchiveType.GZip:
return GZipArchive.Open(stream, readerOptions);
case ArchiveType.Rar:
return RarArchive.Open(stream, readerOptions);
case ArchiveType.Tar:
return TarArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
return ZipArchive.Open(stream, readerOptions);
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip, LZip");
stream.Seek(0, SeekOrigin.Begin);
if (SevenZipArchive.IsSevenZipFile(stream))
{
stream.Seek(0, SeekOrigin.Begin);
return SevenZipArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
if (GZipArchive.IsGZipFile(stream))
{
stream.Seek(0, SeekOrigin.Begin);
return GZipArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
if (DmgArchive.IsDmgFile(stream))
{
stream.Seek(0, SeekOrigin.Begin);
return DmgArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
if (RarArchive.IsRarFile(stream, readerOptions))
{
stream.Seek(0, SeekOrigin.Begin);
return RarArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
if (TarArchive.IsTarFile(stream))
{
stream.Seek(0, SeekOrigin.Begin);
return TarArchive.Open(stream, readerOptions);
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip, LZip, Dmg");
}
public static IWritableArchive Create(ArchiveType type)
@@ -84,110 +97,38 @@ namespace SharpCompress.Archives
fileInfo.CheckNotNull(nameof(fileInfo));
options ??= new ReaderOptions { LeaveStreamOpen = false };
ArchiveType? type;
using (Stream stream = fileInfo.OpenRead())
using var stream = fileInfo.OpenRead();
if (ZipArchive.IsZipFile(stream, null))
{
IsArchive(stream, out type); //test and reset stream position
if (type != null)
{
switch (type.Value)
{
case ArchiveType.Zip:
return ZipArchive.Open(fileInfo, options);
case ArchiveType.SevenZip:
return SevenZipArchive.Open(fileInfo, options);
case ArchiveType.GZip:
return GZipArchive.Open(fileInfo, options);
case ArchiveType.Rar:
return RarArchive.Open(fileInfo, options);
case ArchiveType.Tar:
return TarArchive.Open(fileInfo, options);
}
}
return ZipArchive.Open(fileInfo, options);
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="options"></param>
public static IArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? options = null)
{
fileInfos.CheckNotNull(nameof(fileInfos));
FileInfo[] files = fileInfos.ToArray();
if (files.Length == 0)
throw new InvalidOperationException("No files to open");
FileInfo fileInfo = files[0];
if (files.Length == 1)
return Open(fileInfo, options);
fileInfo.CheckNotNull(nameof(fileInfo));
options ??= new ReaderOptions { LeaveStreamOpen = false };
ArchiveType? type;
using (Stream stream = fileInfo.OpenRead())
IsArchive(stream, out type); //test and reset stream position
if (type != null)
stream.Seek(0, SeekOrigin.Begin);
if (SevenZipArchive.IsSevenZipFile(stream))
{
switch (type.Value)
{
case ArchiveType.Zip:
return ZipArchive.Open(files, options);
case ArchiveType.SevenZip:
return SevenZipArchive.Open(files, options);
case ArchiveType.GZip:
return GZipArchive.Open(files, options);
case ArchiveType.Rar:
return RarArchive.Open(files, options);
case ArchiveType.Tar:
return TarArchive.Open(files, options);
}
return SevenZipArchive.Open(fileInfo, options);
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
public static IArchive Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
{
streams.CheckNotNull(nameof(streams));
if (streams.Count() == 0)
throw new InvalidOperationException("No streams");
if (streams.Count() == 1)
return Open(streams.First(), options);
options ??= new ReaderOptions();
ArchiveType? type;
using (Stream stream = streams.First())
IsArchive(stream, out type); //test and reset stream position
if (type != null)
stream.Seek(0, SeekOrigin.Begin);
if (GZipArchive.IsGZipFile(stream))
{
switch (type.Value)
{
case ArchiveType.Zip:
return ZipArchive.Open(streams, options);
case ArchiveType.SevenZip:
return SevenZipArchive.Open(streams, options);
case ArchiveType.GZip:
return GZipArchive.Open(streams, options);
case ArchiveType.Rar:
return RarArchive.Open(streams, options);
case ArchiveType.Tar:
return TarArchive.Open(streams, options);
}
return GZipArchive.Open(fileInfo, options);
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
stream.Seek(0, SeekOrigin.Begin);
if (DmgArchive.IsDmgFile(stream))
{
stream.Seek(0, SeekOrigin.Begin);
return DmgArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (RarArchive.IsRarFile(stream, options))
{
return RarArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (TarArchive.IsTarFile(stream))
{
return TarArchive.Open(fileInfo, options);
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip, Dmg");
}
/// <summary>
@@ -202,95 +143,5 @@ namespace SharpCompress.Archives
entry.WriteToDirectory(destinationDirectory, options);
}
}
public static bool IsArchive(string filePath, out ArchiveType? type)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
using (Stream s = File.OpenRead(filePath))
return IsArchive(s, out type);
}
private static bool IsArchive(Stream stream, out ArchiveType? type)
{
type = null;
stream.CheckNotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
{
throw new ArgumentException("Stream should be readable and seekable");
}
if (ZipArchive.IsZipFile(stream, null))
type = ArchiveType.Zip;
stream.Seek(0, SeekOrigin.Begin);
if (type == null)
{
if (SevenZipArchive.IsSevenZipFile(stream))
type = ArchiveType.SevenZip;
stream.Seek(0, SeekOrigin.Begin);
}
if (type == null)
{
if (GZipArchive.IsGZipFile(stream))
type = ArchiveType.GZip;
stream.Seek(0, SeekOrigin.Begin);
}
if (type == null)
{
if (RarArchive.IsRarFile(stream))
type = ArchiveType.Rar;
stream.Seek(0, SeekOrigin.Begin);
}
if (type == null)
{
if (TarArchive.IsTarFile(stream))
type = ArchiveType.Tar;
stream.Seek(0, SeekOrigin.Begin);
}
if (type == null) //test multipartzip as it could find zips in other non compressed archive types?
{
if (ZipArchive.IsZipMulti(stream)) //test the zip (last) file of a multipart zip
type = ArchiveType.Zip;
stream.Seek(0, SeekOrigin.Begin);
}
return type != null;
}
/// <summary>
/// From a passed in archive (zip, rar, 7z, 001), return all parts.
/// </summary>
/// <param name="part1"></param>
/// <returns></returns>
public static IEnumerable<string> GetFileParts(string part1)
{
part1.CheckNotNullOrEmpty(nameof(part1));
return GetFileParts(new FileInfo(part1)).Select(a => a.FullName);
}
/// <summary>
/// From a passed in archive (zip, rar, 7z, 001), return all parts.
/// </summary>
/// <param name="part1"></param>
/// <returns></returns>
public static IEnumerable<FileInfo> GetFileParts(FileInfo part1)
{
part1.CheckNotNull(nameof(part1));
yield return part1;
int i = 1;
FileInfo? part = RarArchiveVolumeFactory.GetFilePart(i++, part1);
if (part != null)
{
yield return part;
while ((part = RarArchiveVolumeFactory.GetFilePart(i++, part1)) != null) //tests split too
yield return part;
}
else
{
i = 1;
while ((part = ZipArchiveVolumeFactory.GetFilePart(i++, part1)) != null) //tests split too
yield return part;
}
}
}
}

View File

@@ -1,29 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Readers;
using System.Linq;
using System.Text;
using SharpCompress.Common.Rar.Headers;
using System.Text.RegularExpressions;
namespace SharpCompress.Archives
{
internal abstract class ArchiveVolumeFactory
{
internal static FileInfo? GetFilePart(int index, FileInfo part1) //base the name on the first part
{
FileInfo? item = null;
//split 001, 002 ...
Match m = Regex.Match(part1.Name, @"^(.*\.)([0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(Path.Combine(part1.DirectoryName!, String.Concat(m.Groups[1].Value, (index + 1).ToString().PadLeft(m.Groups[2].Value.Length, '0'))));
if (item != null && item.Exists)
return item;
return null;
}
}
}

View File

@@ -0,0 +1,117 @@
using SharpCompress.Common;
using SharpCompress.Common.Dmg;
using SharpCompress.Common.Dmg.Headers;
using SharpCompress.Common.Dmg.HFS;
using SharpCompress.Readers;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
namespace SharpCompress.Archives.Dmg
{
public class DmgArchive : AbstractArchive<DmgArchiveEntry, DmgVolume>
{
private readonly string _fileName;
internal DmgArchive(FileInfo fileInfo, ReaderOptions readerOptions)
: base(ArchiveType.Dmg, fileInfo, readerOptions)
{
_fileName = fileInfo.FullName;
}
internal DmgArchive(Stream stream, ReaderOptions readerOptions)
: base(ArchiveType.Dmg, stream.AsEnumerable(), readerOptions)
{
_fileName = string.Empty;
}
protected override IReader CreateReaderForSolidExtraction()
=> new DmgReader(ReaderOptions, this, _fileName);
protected override IEnumerable<DmgArchiveEntry> LoadEntries(IEnumerable<DmgVolume> volumes)
=> volumes.Single().LoadEntries();
protected override IEnumerable<DmgVolume> LoadVolumes(FileInfo file)
=> new DmgVolume(this, file.OpenRead(), file.FullName, ReaderOptions).AsEnumerable();
protected override IEnumerable<DmgVolume> LoadVolumes(IEnumerable<Stream> streams)
=> new DmgVolume(this, streams.Single(), string.Empty, ReaderOptions).AsEnumerable();
public static bool IsDmgFile(FileInfo fileInfo)
{
if (!fileInfo.Exists) return false;
using var stream = fileInfo.OpenRead();
return IsDmgFile(stream);
}
public static bool IsDmgFile(Stream stream)
{
long headerPos = stream.Length - DmgHeader.HeaderSize;
if (headerPos < 0) return false;
stream.Position = headerPos;
return DmgHeader.TryRead(stream, out _);
}
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static DmgArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static DmgArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new DmgArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static DmgArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
return new DmgArchive(stream, readerOptions ?? new ReaderOptions());
}
private sealed class DmgReader : AbstractReader<DmgEntry, DmgVolume>
{
private readonly DmgArchive _archive;
private readonly string _fileName;
private readonly Stream? _partitionStream;
public override DmgVolume Volume { get; }
internal DmgReader(ReaderOptions readerOptions, DmgArchive archive, string fileName)
: base(readerOptions, ArchiveType.Dmg)
{
_archive = archive;
_fileName = fileName;
Volume = archive.Volumes.Single();
using var compressedStream = DmgUtil.LoadHFSPartitionStream(Volume.Stream, Volume.Header);
_partitionStream = compressedStream?.Decompress();
}
protected override IEnumerable<DmgEntry> GetEntries(Stream stream)
{
if (_partitionStream is null) return Array.Empty<DmgArchiveEntry>();
else return HFSUtil.LoadEntriesFromPartition(_partitionStream, _fileName, _archive);
}
}
}
}

View File

@@ -0,0 +1,32 @@
using SharpCompress.Common.Dmg;
using SharpCompress.Common.Dmg.HFS;
using System;
using System.IO;
namespace SharpCompress.Archives.Dmg
{
public sealed class DmgArchiveEntry : DmgEntry, IArchiveEntry
{
private readonly Stream? _stream;
public bool IsComplete { get; } = true;
public IArchive Archive { get; }
internal DmgArchiveEntry(Stream? stream, DmgArchive archive, HFSCatalogRecord record, string path, DmgFilePart part)
: base(record, path, stream?.Length ?? 0, part)
{
_stream = stream;
Archive = archive;
}
public Stream OpenEntryStream()
{
if (IsDirectory)
throw new NotSupportedException("Directories cannot be opened as stream");
_stream!.Position = 0;
return _stream;
}
}
}

View File

@@ -1,10 +1,9 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.Common.GZip;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.GZip;
using SharpCompress.Writers;
@@ -33,31 +32,7 @@ namespace SharpCompress.Archives.GZip
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new GZipArchive(new SourceStream(fileInfo, i => ArchiveVolumeFactory.GetFilePart(i, fileInfo), readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? readerOptions = null)
{
fileInfos.CheckNotNull(nameof(fileInfos));
FileInfo[] files = fileInfos.ToArray();
return new GZipArchive(new SourceStream(files[0], i => i < files.Length ? files[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.CheckNotNull(nameof(streams));
Stream[] strms = streams.ToArray();
return new GZipArchive(new SourceStream(strms[0], i => i < strms.Length ? strms[i] : null, readerOptions ?? new ReaderOptions()));
return new GZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
/// <summary>
@@ -68,7 +43,7 @@ namespace SharpCompress.Archives.GZip
public static GZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
return new GZipArchive(new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions()));
return new GZipArchive(stream, readerOptions ?? new ReaderOptions());
}
public static GZipArchive Create()
@@ -77,20 +52,20 @@ namespace SharpCompress.Archives.GZip
}
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="srcStream"></param>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
internal GZipArchive(SourceStream srcStream)
: base(ArchiveType.Tar, srcStream)
internal GZipArchive(FileInfo fileInfo, ReaderOptions options)
: base(ArchiveType.GZip, fileInfo, options)
{
}
protected override IEnumerable<GZipVolume> LoadVolumes(SourceStream srcStream)
protected override IEnumerable<GZipVolume> LoadVolumes(FileInfo file)
{
srcStream.LoadAllParts();
return srcStream.Streams.Select(a => new GZipVolume(a, ReaderOptions));
return new GZipVolume(file, ReaderOptions).AsEnumerable();
}
public static bool IsGZipFile(string filePath)
{
return IsGZipFile(new FileInfo(filePath));
@@ -139,6 +114,16 @@ namespace SharpCompress.Archives.GZip
return true;
}
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
internal GZipArchive(Stream stream, ReaderOptions options)
: base(ArchiveType.GZip, stream, options)
{
}
internal GZipArchive()
: base(ArchiveType.GZip)
{
@@ -175,6 +160,11 @@ namespace SharpCompress.Archives.GZip
}
}
protected override IEnumerable<GZipVolume> LoadVolumes(IEnumerable<Stream> streams)
{
return new GZipVolume(streams.First(), ReaderOptions).AsEnumerable();
}
protected override IEnumerable<GZipArchiveEntry> LoadEntries(IEnumerable<GZipVolume> volumes)
{
Stream stream = volumes.Single().Stream;

View File

@@ -1,4 +1,4 @@
#nullable disable
#nullable disable
using System;
using System.Collections.Generic;
@@ -54,7 +54,7 @@ namespace SharpCompress.Archives.GZip
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return NonDisposingStream.Create(stream);
return new NonDisposingStream(stream);
}
internal override void Close()
@@ -65,4 +65,4 @@ namespace SharpCompress.Archives.GZip
}
}
}
}
}

View File

@@ -5,26 +5,39 @@ using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Compressors.Rar;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.Rar;
namespace SharpCompress.Archives.Rar
{
public class
public class
RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
{
internal Lazy<IRarUnpack> UnpackV2017 { get; } = new Lazy<IRarUnpack>(() => new SharpCompress.Compressors.Rar.UnpackV2017.Unpack());
internal Lazy<IRarUnpack> UnpackV1 { get; } = new Lazy<IRarUnpack>(() => new SharpCompress.Compressors.Rar.UnpackV1.Unpack());
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
internal RarArchive(FileInfo fileInfo, ReaderOptions options)
: base(ArchiveType.Rar, fileInfo, options)
{
}
protected override IEnumerable<RarVolume> LoadVolumes(FileInfo file)
{
return RarArchiveVolumeFactory.GetParts(file, ReaderOptions);
}
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// Takes multiple seekable Streams for a multi-part archive
/// </summary>
/// <param name="srcStream"></param>
/// <param name="streams"></param>
/// <param name="options"></param>
internal RarArchive(SourceStream srcStream)
: base(ArchiveType.Rar, srcStream)
internal RarArchive(IEnumerable<Stream> streams, ReaderOptions options)
: base(ArchiveType.Rar, streams, options)
{
}
@@ -33,20 +46,9 @@ namespace SharpCompress.Archives.Rar
return RarArchiveEntryFactory.GetEntries(this, volumes, ReaderOptions);
}
protected override IEnumerable<RarVolume> LoadVolumes(SourceStream srcStream)
protected override IEnumerable<RarVolume> LoadVolumes(IEnumerable<Stream> streams)
{
base.SrcStream.LoadAllParts(); //request all streams
Stream[] streams = base.SrcStream.Streams.ToArray();
if (streams.Length > 1 && IsRarFile(streams[1], ReaderOptions)) //test part 2 - true = multipart not split
{
base.SrcStream.IsVolumes = true;
streams[1].Position = 0;
base.SrcStream.Position = 0;
return srcStream.Streams.Select(a => new StreamRarArchiveVolume(a, ReaderOptions));
}
else //split mode or single file
return new StreamRarArchiveVolume(base.SrcStream, ReaderOptions).AsEnumerable();
return RarArchiveVolumeFactory.GetParts(streams, ReaderOptions);
}
protected override IReader CreateReaderForSolidExtraction()
@@ -67,8 +69,7 @@ namespace SharpCompress.Archives.Rar
public static RarArchive Open(string filePath, ReaderOptions? options = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
FileInfo fileInfo = new FileInfo(filePath);
return new RarArchive(new SourceStream(fileInfo, i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo), options ?? new ReaderOptions()));
return new RarArchive(new FileInfo(filePath), options ?? new ReaderOptions());
}
/// <summary>
@@ -79,7 +80,7 @@ namespace SharpCompress.Archives.Rar
public static RarArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new RarArchive(new SourceStream(fileInfo, i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo), options ?? new ReaderOptions()));
return new RarArchive(fileInfo, options ?? new ReaderOptions());
}
/// <summary>
@@ -90,34 +91,20 @@ namespace SharpCompress.Archives.Rar
public static RarArchive Open(Stream stream, ReaderOptions? options = null)
{
stream.CheckNotNull(nameof(stream));
return new RarArchive(new SourceStream(stream, i => null, options ?? new ReaderOptions()));
return Open(stream.AsEnumerable(), options ?? new ReaderOptions());
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static RarArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? readerOptions = null)
{
fileInfos.CheckNotNull(nameof(fileInfos));
FileInfo[] files = fileInfos.ToArray();
return new RarArchive(new SourceStream(files[0], i => i < files.Length ? files[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all stream parts passed in
/// Takes multiple seekable Streams for a multi-part archive
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static RarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
/// <param name="options"></param>
public static RarArchive Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
{
streams.CheckNotNull(nameof(streams));
Stream[] strms = streams.ToArray();
return new RarArchive(new SourceStream(strms[0], i => i < strms.Length ? strms[i] : null, readerOptions ?? new ReaderOptions()));
return new RarArchive(streams, options ?? new ReaderOptions());
}
public static bool IsRarFile(string filePath)
{
return IsRarFile(new FileInfo(filePath));

View File

@@ -21,7 +21,6 @@ namespace SharpCompress.Archives.Rar
this.parts = parts.ToList();
this.archive = archive;
this.readerOptions = readerOptions;
this.IsSolid = this.FileHeader.IsSolid;
}
public override CompressionType CompressionType => CompressionType.Rar;

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Rar;
@@ -6,35 +6,135 @@ using SharpCompress.Readers;
using System.Linq;
using System.Text;
using SharpCompress.Common.Rar.Headers;
using System.Text.RegularExpressions;
namespace SharpCompress.Archives.Rar
{
internal static class RarArchiveVolumeFactory
{
internal static FileInfo? GetFilePart(int index, FileInfo part1) //base the name on the first part
internal static IEnumerable<RarVolume> GetParts(IEnumerable<Stream> streams, ReaderOptions options)
{
FileInfo? item = null;
//new style rar - ..part1 | /part01 | part001 ....
Match m = Regex.Match(part1.Name, @"^(.*\.part)([0-9]+)(\.rar)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(Path.Combine(part1.DirectoryName!, String.Concat(m.Groups[1].Value, (index + 1).ToString().PadLeft(m.Groups[2].Value.Length, '0'), m.Groups[3].Value)));
else
foreach (Stream s in streams)
{
//old style - ...rar, .r00, .r01 ...
m = Regex.Match(part1.Name, @"^(.*\.r)(ar|[0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(Path.Combine(part1.DirectoryName!, String.Concat(m.Groups[1].Value, index == 0 ? "ar" : (index - 1).ToString().PadLeft(m.Groups[2].Value.Length, '0'))));
else //split .001, .002 ....
return ArchiveVolumeFactory.GetFilePart(index, part1);
if (!s.CanRead || !s.CanSeek)
{
throw new ArgumentException("Stream is not readable and seekable");
}
StreamRarArchiveVolume part = new StreamRarArchiveVolume(s, options);
yield return part;
}
if (item != null && item.Exists)
return item;
return null; //no more items
}
internal static IEnumerable<RarVolume> GetParts(FileInfo fileInfo, ReaderOptions options)
{
FileInfoRarArchiveVolume part = new FileInfoRarArchiveVolume(fileInfo, options);
yield return part;
ArchiveHeader ah = part.ArchiveHeader;
if (!ah.IsVolume)
{
yield break; //if file isn't volume then there is no reason to look
}
fileInfo = GetNextFileInfo(ah, part.FileParts.FirstOrDefault() as FileInfoRarFilePart)!;
//we use fileinfo because rar is dumb and looks at file names rather than archive info for another volume
while (fileInfo != null && fileInfo.Exists)
{
part = new FileInfoRarArchiveVolume(fileInfo, options);
fileInfo = GetNextFileInfo(ah, part.FileParts.FirstOrDefault() as FileInfoRarFilePart)!;
yield return part;
}
}
private static FileInfo? GetNextFileInfo(ArchiveHeader ah, FileInfoRarFilePart? currentFilePart)
{
if (currentFilePart is null)
{
return null;
}
bool oldNumbering = ah.OldNumberingFormat
|| currentFilePart.MarkHeader.OldNumberingFormat;
if (oldNumbering)
{
return FindNextFileWithOldNumbering(currentFilePart.FileInfo);
}
else
{
return FindNextFileWithNewNumbering(currentFilePart.FileInfo);
}
}
private static FileInfo FindNextFileWithOldNumbering(FileInfo currentFileInfo)
{
// .rar, .r00, .r01, ...
string extension = currentFileInfo.Extension;
var buffer = new StringBuilder(currentFileInfo.FullName.Length);
buffer.Append(currentFileInfo.FullName.Substring(0,
currentFileInfo.FullName.Length - extension.Length));
if (string.Compare(extension, ".rar", StringComparison.OrdinalIgnoreCase) == 0)
{
buffer.Append(".r00");
}
else
{
if (int.TryParse(extension.Substring(2, 2), out int num))
{
num++;
buffer.Append(".r");
if (num < 10)
{
buffer.Append('0');
}
buffer.Append(num);
}
else
{
ThrowInvalidFileName(currentFileInfo);
}
}
return new FileInfo(buffer.ToString());
}
private static FileInfo FindNextFileWithNewNumbering(FileInfo currentFileInfo)
{
// part1.rar, part2.rar, ...
string extension = currentFileInfo.Extension;
if (string.Compare(extension, ".rar", StringComparison.OrdinalIgnoreCase) != 0)
{
throw new ArgumentException("Invalid extension, expected 'rar': " + currentFileInfo.FullName);
}
int startIndex = currentFileInfo.FullName.LastIndexOf(".part");
if (startIndex < 0)
{
ThrowInvalidFileName(currentFileInfo);
}
StringBuilder buffer = new StringBuilder(currentFileInfo.FullName.Length);
buffer.Append(currentFileInfo.FullName, 0, startIndex);
string numString = currentFileInfo.FullName.Substring(startIndex + 5,
currentFileInfo.FullName.IndexOf('.', startIndex + 5) -
startIndex - 5);
buffer.Append(".part");
if (int.TryParse(numString, out int num))
{
num++;
for (int i = 0; i < numString.Length - num.ToString().Length; i++)
{
buffer.Append('0');
}
buffer.Append(num);
}
else
{
ThrowInvalidFileName(currentFileInfo);
}
buffer.Append(".rar");
return new FileInfo(buffer.ToString());
}
private static void ThrowInvalidFileName(FileInfo fileInfo)
{
throw new ArgumentException("Filename invalid or next archive could not be found:"
+ fileInfo.FullName);
}
}
}
}

View File

@@ -1,4 +1,4 @@
#nullable disable
#nullable disable
using System;
using System.Collections.Generic;
@@ -34,33 +34,8 @@ namespace SharpCompress.Archives.SevenZip
public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
{
fileInfo.CheckNotNull("fileInfo");
return new SevenZipArchive(new SourceStream(fileInfo, i => ArchiveVolumeFactory.GetFilePart(i, fileInfo), readerOptions ?? new ReaderOptions()));
return new SevenZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions readerOptions = null)
{
fileInfos.CheckNotNull(nameof(fileInfos));
FileInfo[] files = fileInfos.ToArray();
return new SevenZipArchive(new SourceStream(files[0], i => i < files.Length ? files[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(IEnumerable<Stream> streams, ReaderOptions readerOptions = null)
{
streams.CheckNotNull(nameof(streams));
Stream[] strms = streams.ToArray();
return new SevenZipArchive(new SourceStream(strms[0], i => i < strms.Length ? strms[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
@@ -69,23 +44,17 @@ namespace SharpCompress.Archives.SevenZip
public static SevenZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
{
stream.CheckNotNull("stream");
return new SevenZipArchive(new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions()));
return new SevenZipArchive(stream, readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal SevenZipArchive(SourceStream srcStream)
: base(ArchiveType.SevenZip, srcStream)
internal SevenZipArchive(FileInfo fileInfo, ReaderOptions readerOptions)
: base(ArchiveType.SevenZip, fileInfo, readerOptions)
{
}
protected override IEnumerable<SevenZipVolume> LoadVolumes(SourceStream srcStream)
protected override IEnumerable<SevenZipVolume> LoadVolumes(FileInfo file)
{
base.SrcStream.LoadAllParts(); //request all streams
return new SevenZipVolume(srcStream, ReaderOptions).AsEnumerable(); //simple single volume or split, multivolume not supported
return new SevenZipVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
}
public static bool IsSevenZipFile(string filePath)
@@ -105,32 +74,38 @@ namespace SharpCompress.Archives.SevenZip
}
}
internal SevenZipArchive(Stream stream, ReaderOptions readerOptions)
: base(ArchiveType.SevenZip, stream.AsEnumerable(), readerOptions)
{
}
internal SevenZipArchive()
: base(ArchiveType.SevenZip)
{
}
protected override IEnumerable<SevenZipVolume> LoadVolumes(IEnumerable<Stream> streams)
{
foreach (Stream s in streams)
{
if (!s.CanRead || !s.CanSeek)
{
throw new ArgumentException("Stream is not readable and seekable");
}
SevenZipVolume volume = new SevenZipVolume(s, ReaderOptions);
yield return volume;
}
}
protected override IEnumerable<SevenZipArchiveEntry> LoadEntries(IEnumerable<SevenZipVolume> volumes)
{
var stream = volumes.Single().Stream;
LoadFactory(stream);
var entries = new SevenZipArchiveEntry[database._files.Count];
for (int i = 0; i < database._files.Count; i++)
{
var file = database._files[i];
entries[i] = new SevenZipArchiveEntry(this, new SevenZipFilePart(stream, database, i, file, ReaderOptions.ArchiveEncoding));
yield return new SevenZipArchiveEntry(this, new SevenZipFilePart(stream, database, i, file, ReaderOptions.ArchiveEncoding));
}
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
{
var isSolid = false;
foreach (var entry in group)
{
entry.IsSolid = isSolid;
isSolid = true; //mark others in this group as solid - same as rar behaviour.
}
}
return entries;
}
private void LoadFactory(Stream stream)

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -34,31 +34,7 @@ namespace SharpCompress.Archives.Tar
public static TarArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new TarArchive(new SourceStream(fileInfo, i => ArchiveVolumeFactory.GetFilePart(i, fileInfo), readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? readerOptions = null)
{
fileInfos.CheckNotNull(nameof(fileInfos));
FileInfo[] files = fileInfos.ToArray();
return new TarArchive(new SourceStream(files[0], i => i < files.Length ? files[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.CheckNotNull(nameof(streams));
Stream[] strms = streams.ToArray();
return new TarArchive(new SourceStream(strms[0], i => i < strms.Length ? strms[i] : null, readerOptions ?? new ReaderOptions()));
return new TarArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
/// <summary>
@@ -69,7 +45,7 @@ namespace SharpCompress.Archives.Tar
public static TarArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
return new TarArchive(new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions()));
return new TarArchive(stream, readerOptions ?? new ReaderOptions());
}
public static bool IsTarFile(string filePath)
@@ -104,19 +80,28 @@ namespace SharpCompress.Archives.Tar
return false;
}
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream srcStream)
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
internal TarArchive(FileInfo fileInfo, ReaderOptions readerOptions)
: base(ArchiveType.Tar, fileInfo, readerOptions)
{
base.SrcStream.LoadAllParts(); //request all streams
return new TarVolume(srcStream, ReaderOptions).AsEnumerable(); //simple single volume or split, multivolume not supported
}
protected override IEnumerable<TarVolume> LoadVolumes(FileInfo file)
{
return new TarVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
}
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// Takes multiple seekable Streams for a multi-part archive
/// </summary>
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal TarArchive(SourceStream srcStream)
: base(ArchiveType.Tar, srcStream)
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
internal TarArchive(Stream stream, ReaderOptions readerOptions)
: base(ArchiveType.Tar, stream, readerOptions)
{
}
@@ -125,6 +110,11 @@ namespace SharpCompress.Archives.Tar
{
}
protected override IEnumerable<TarVolume> LoadVolumes(IEnumerable<Stream> streams)
{
return new TarVolume(streams.First(), ReaderOptions).AsEnumerable();
}
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
{
Stream stream = volumes.Single().Stream;

View File

@@ -1,4 +1,4 @@
#nullable disable
#nullable disable
using System;
using System.Collections.Generic;
@@ -53,7 +53,7 @@ namespace SharpCompress.Archives.Tar
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return NonDisposingStream.Create(stream);
return new NonDisposingStream(stream);
}
internal override void Close()
@@ -64,4 +64,4 @@ namespace SharpCompress.Archives.Tar
}
}
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -6,7 +6,6 @@ using SharpCompress.Common;
using SharpCompress.Common.Zip;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors.Deflate;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.Zip;
using SharpCompress.Writers;
@@ -26,17 +25,6 @@ namespace SharpCompress.Archives.Zip
/// </summary>
public CompressionLevel DeflateCompressionLevel { get; set; }
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal ZipArchive(SourceStream srcStream)
: base(ArchiveType.Zip, srcStream)
{
headerFactory = new SeekableZipHeaderFactory(srcStream.ReaderOptions.Password, srcStream.ReaderOptions.ArchiveEncoding);
}
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
@@ -56,31 +44,7 @@ namespace SharpCompress.Archives.Zip
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new ZipArchive(new SourceStream(fileInfo, i => ZipArchiveVolumeFactory.GetFilePart(i, fileInfo), readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? readerOptions = null)
{
fileInfos.CheckNotNull(nameof(fileInfos));
FileInfo[] files = fileInfos.ToArray();
return new ZipArchive(new SourceStream(files[0], i => i < files.Length ? files[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.CheckNotNull(nameof(streams));
Stream[] strms = streams.ToArray();
return new ZipArchive(new SourceStream(strms[0], i => i < strms.Length ? strms[i] : null, readerOptions ?? new ReaderOptions()));
return new ZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
/// <summary>
@@ -91,7 +55,7 @@ namespace SharpCompress.Archives.Zip
public static ZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
return new ZipArchive(new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions()));
return new ZipArchive(stream, readerOptions ?? new ReaderOptions());
}
public static bool IsZipFile(string filePath, string? password = null)
@@ -133,61 +97,20 @@ namespace SharpCompress.Archives.Zip
}
}
public static bool IsZipMulti(Stream stream, string? password = null)
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
internal ZipArchive(FileInfo fileInfo, ReaderOptions readerOptions)
: base(ArchiveType.Zip, fileInfo, readerOptions)
{
StreamingZipHeaderFactory headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding());
try
{
ZipHeader? header = headerFactory.ReadStreamHeader(stream).FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
if (stream.CanSeek) //could be multipart. Test for central directory - might not be z64 safe
{
SeekableZipHeaderFactory z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
var x = z.ReadSeekableHeader(stream).FirstOrDefault();
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
}
else
return false;
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
headerFactory = new SeekableZipHeaderFactory(readerOptions.Password, readerOptions.ArchiveEncoding);
}
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream srcStream)
protected override IEnumerable<ZipVolume> LoadVolumes(FileInfo file)
{
base.SrcStream.LoadAllParts(); //request all streams
base.SrcStream.Position = 0;
List<Stream> streams = base.SrcStream.Streams.ToList();
if (streams.Count > 1) //test part 2 - true = multipart not split
{
streams[1].Position += 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
bool isZip = IsZipFile(streams[1], ReaderOptions.Password);
streams[1].Position -= 4;
if (isZip)
{
base.SrcStream.IsVolumes = true;
var tmp = streams[0]; //arcs as zip, z01 ... swap the zip the end
streams.RemoveAt(0);
streams.Add(tmp);
//streams[0].Position = 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
return streams.Select(a => new ZipVolume(a, ReaderOptions));
}
}
//split mode or single file
return new ZipVolume(base.SrcStream, ReaderOptions).AsEnumerable();
return new ZipVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
}
internal ZipArchive()
@@ -195,35 +118,46 @@ namespace SharpCompress.Archives.Zip
{
}
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
internal ZipArchive(Stream stream, ReaderOptions readerOptions)
: base(ArchiveType.Zip, stream, readerOptions)
{
headerFactory = new SeekableZipHeaderFactory(readerOptions.Password, readerOptions.ArchiveEncoding);
}
protected override IEnumerable<ZipVolume> LoadVolumes(IEnumerable<Stream> streams)
{
return new ZipVolume(streams.First(), ReaderOptions).AsEnumerable();
}
protected override IEnumerable<ZipArchiveEntry> LoadEntries(IEnumerable<ZipVolume> volumes)
{
var vols = volumes.ToArray();
foreach (ZipHeader h in headerFactory.ReadSeekableHeader(vols.Last().Stream))
var volume = volumes.Single();
Stream stream = volume.Stream;
foreach (ZipHeader h in headerFactory.ReadSeekableHeader(stream))
{
if (h != null)
{
switch (h.ZipHeaderType)
{
case ZipHeaderType.DirectoryEntry:
{
DirectoryEntryHeader deh = (DirectoryEntryHeader)h;
Stream s;
if (deh.RelativeOffsetOfEntryHeader + deh.CompressedSize > vols[deh.DiskNumberStart].Stream.Length)
{
var v = vols.Skip(deh.DiskNumberStart).ToArray();
s = new SourceStream(v[0].Stream, i => i < v.Length ? v[i].Stream : null, new ReaderOptions() { LeaveStreamOpen = true });
yield return new ZipArchiveEntry(this,
new SeekableZipFilePart(headerFactory,
(DirectoryEntryHeader)h,
stream));
}
else
s = vols[deh.DiskNumberStart].Stream;
yield return new ZipArchiveEntry(this, new SeekableZipFilePart(headerFactory, deh, s));
}
break;
break;
case ZipHeaderType.DirectoryEnd:
{
byte[] bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
volumes.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
yield break;
}
{
byte[] bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
volume.Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
yield break;
}
}
}
}

View File

@@ -1,34 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Readers;
using System.Linq;
using System.Text;
using SharpCompress.Common.Rar.Headers;
using System.Text.RegularExpressions;
namespace SharpCompress.Archives.Zip
{
internal static class ZipArchiveVolumeFactory
{
internal static FileInfo? GetFilePart(int index, FileInfo part1) //base the name on the first part
{
FileInfo? item = null;
//load files with zip/zipx first. Swapped to end once loaded in ZipArchive
//new style .zip, z01.. | .zipx, zx01 - if the numbers go beyond 99 then they use 100 ...1000 etc
Match m = Regex.Match(part1.Name, @"^(.*\.)(zipx?|zx?[0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(Path.Combine(part1.DirectoryName!, String.Concat(m.Groups[1].Value, Regex.Replace(m.Groups[2].Value, @"[^xz]", ""), index.ToString().PadLeft(2, '0'))));
else //split - 001, 002 ...
return ArchiveVolumeFactory.GetFilePart(index, part1);
if (item != null && item.Exists)
return item;
return null; //no more items
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
@@ -53,7 +53,7 @@ namespace SharpCompress.Archives.Zip
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return NonDisposingStream.Create(stream);
return new NonDisposingStream(stream);
}
internal override void Close()
@@ -65,4 +65,4 @@ namespace SharpCompress.Archives.Zip
}
}
}
}
}

View File

@@ -36,7 +36,7 @@ namespace SharpCompress.Common
Password = password;
}
#if !NETFRAMEWORK
#if !NET461
static ArchiveEncoding()
{
Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);

View File

@@ -8,5 +8,10 @@ namespace SharpCompress.Common
: base(message)
{
}
public ArchiveException(string message, Exception inner)
: base(message, inner)
{
}
}
}

View File

@@ -6,6 +6,7 @@
Zip,
Tar,
SevenZip,
GZip
GZip,
Dmg
}
}

View File

@@ -0,0 +1,323 @@
using SharpCompress.Common.Dmg.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.ADC;
using SharpCompress.Compressors.BZip2;
using SharpCompress.Compressors.Deflate;
using SharpCompress.IO;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
namespace SharpCompress.Common.Dmg
{
internal sealed class DmgBlockDataStream : Stream
{
private readonly Stream _baseStream;
private readonly DmgHeader _header;
private readonly BlkxTable _table;
private long _position;
private bool _isEnded;
private int _chunkIndex;
private Stream? _chunkStream;
private long _chunkPos;
public override bool CanRead => true;
public override bool CanWrite => false;
public override bool CanSeek => true;
public override long Length { get; }
public override long Position
{
get => _position;
set
{
if ((value < 0) || (value > Length)) throw new ArgumentOutOfRangeException(nameof(value));
if (value == Length)
{
// End of the stream
_position = Length;
_isEnded = true;
_chunkIndex = -1;
_chunkStream = null;
}
else if (value != _position)
{
_position = value;
// We can only seek over entire chunks at a time because some chunks may be compressed.
// So we first find the chunk that we are now in, then we read to the exact position inside that chunk.
for (int i = 0; i < _table.Chunks.Count; i++)
{
var chunk = _table.Chunks[i];
if (IsChunkValid(chunk) && (chunk.UncompressedOffset <= (ulong)_position)
&& ((chunk.UncompressedOffset + chunk.UncompressedLength) > (ulong)_position))
{
if (i == _chunkIndex)
{
// We are still in the same chunk, so if the new position is
// behind the previous one we can just read to the new position.
long offset = (long)chunk.UncompressedOffset + _chunkPos;
if (offset <= _position)
{
long skip = _position - offset;
_chunkStream!.Skip(skip);
_chunkPos += skip;
break;
}
}
_chunkIndex = i;
_chunkStream = GetChunkStream();
_chunkPos = 0;
// If the chunk happens to not be compressed this read will still result in a fast seek
if ((ulong)_position != chunk.UncompressedOffset)
{
long skip = _position - (long)chunk.UncompressedOffset;
_chunkStream.Skip(skip);
_chunkPos = skip;
}
break;
}
}
}
}
}
public DmgBlockDataStream(Stream baseStream, DmgHeader header, BlkxTable table)
{
if (!baseStream.CanRead) throw new ArgumentException("Requires a readable stream", nameof(baseStream));
if (!baseStream.CanSeek) throw new ArgumentException("Requires a seekable stream", nameof(baseStream));
_baseStream = baseStream;
_header = header;
_table = table;
Length = 0;
foreach (var chunk in table.Chunks)
{
if (IsChunkValid(chunk))
Length += (long)chunk.UncompressedLength;
}
_position = 0;
_chunkIndex = -1;
_chunkIndex = GetNextChunk();
_isEnded = _chunkIndex < 0;
if (!_isEnded) _chunkStream = GetChunkStream();
_chunkPos = 0;
}
private static bool IsChunkValid(BlkxChunk chunk)
{
return chunk.Type switch
{
BlkxChunkType.Zero => true,
BlkxChunkType.Uncompressed => true,
BlkxChunkType.Ignore => true,
BlkxChunkType.AdcCompressed => true,
BlkxChunkType.ZlibCompressed => true,
BlkxChunkType.Bz2Compressed => true,
_ => false
};
}
private int GetNextChunk()
{
int index = _chunkIndex;
bool isValid = false;
while (!isValid)
{
index++;
if (index >= _table.Chunks.Count) return -1;
var chunk = _table.Chunks[index];
if (chunk.Type == BlkxChunkType.Last) return -1;
isValid = IsChunkValid(chunk);
}
return index;
}
private Stream GetChunkStream()
{
if (_chunkIndex < 0)
throw new InvalidOperationException("Invalid chunk index");
var chunk = _table.Chunks[_chunkIndex];
// For our purposes, ignore behaves the same as zero
if ((chunk.Type == BlkxChunkType.Zero) || (chunk.Type == BlkxChunkType.Ignore))
return new ConstantStream(0, (long)chunk.UncompressedLength);
// We first create a sub-stream on the region of the base stream where the
// (possibly compressed) data is physically located at.
var subStream = new SeekableSubStream(_baseStream,
(long)(_header.DataForkOffset + _table.DataOffset + chunk.CompressedOffset),
(long)chunk.CompressedLength);
// Then we nest that sub-stream into the apropriate compressed stream.
return chunk.Type switch
{
BlkxChunkType.Uncompressed => subStream,
BlkxChunkType.AdcCompressed => new ADCStream(subStream, CompressionMode.Decompress),
BlkxChunkType.ZlibCompressed => new ZlibStream(subStream, CompressionMode.Decompress),
BlkxChunkType.Bz2Compressed => new BZip2Stream(subStream, CompressionMode.Decompress, false),
_ => throw new InvalidOperationException("Invalid chunk type")
};
}
// Decompresses the entire stream in memory for faster extraction.
// This is about two orders of magnitude faster than decompressing
// on-the-fly while extracting, but also eats RAM for breakfest.
public Stream Decompress()
{
// We have to load all the chunks into separate memory streams first
// because otherwise the decompression threads would block each other
// and actually be slower than just a single decompression thread.
var rawStreams = new Stream?[_table.Chunks.Count];
for (int i = 0; i < rawStreams.Length; i++)
{
var chunk = _table.Chunks[i];
if (IsChunkValid(chunk))
{
if ((chunk.Type == BlkxChunkType.Zero) || (chunk.Type == BlkxChunkType.Ignore))
{
rawStreams[i] = new ConstantStream(0, (long)chunk.UncompressedLength);
}
else
{
var subStream = new SeekableSubStream(_baseStream,
(long)(_header.DataForkOffset + _table.DataOffset + chunk.CompressedOffset),
(long)chunk.CompressedLength);
var memStream = new MemoryStream();
subStream.CopyTo(memStream);
memStream.Position = 0;
rawStreams[i] = memStream;
}
}
else
{
rawStreams[i] = null;
}
}
// Now we can decompress the chunks multithreaded
var streams = new Stream?[_table.Chunks.Count];
Parallel.For(0, streams.Length, i =>
{
var rawStream = rawStreams[i];
if (rawStream is not null)
{
var chunk = _table.Chunks[i];
if ((chunk.Type == BlkxChunkType.Zero)
|| (chunk.Type == BlkxChunkType.Ignore)
|| (chunk.Type == BlkxChunkType.Uncompressed))
{
streams[i] = rawStream;
}
else
{
Stream compStream = chunk.Type switch
{
BlkxChunkType.AdcCompressed => new ADCStream(rawStream, CompressionMode.Decompress),
BlkxChunkType.ZlibCompressed => new ZlibStream(rawStream, CompressionMode.Decompress),
BlkxChunkType.Bz2Compressed => new BZip2Stream(rawStream, CompressionMode.Decompress, false),
_ => throw new InvalidOperationException("Invalid chunk type")
};
var memStream = new MemoryStream();
compStream.CopyTo(memStream);
compStream.Dispose();
memStream.Position = 0;
streams[i] = memStream;
}
rawStream.Dispose();
rawStreams[i] = null;
}
else
{
streams[i] = null;
}
});
return new CompositeStream((IEnumerable<Stream>)streams.Where(s => s is not null));
}
public override int Read(byte[] buffer, int offset, int count)
{
if (_isEnded) return 0;
int readCount = _chunkStream!.Read(buffer, offset, count);
_chunkPos += readCount;
while (readCount < count)
{
// Current chunk has ended, so we have to continue reading from the next chunk.
_chunkIndex = GetNextChunk();
if (_chunkIndex < 0)
{
// We have reached the last chunk
_isEnded = true;
_chunkPos = 0;
_position += readCount;
return readCount;
}
_chunkStream = GetChunkStream();
int rc = _chunkStream.Read(buffer, offset + readCount, count - readCount);
_chunkPos = rc;
readCount += rc;
}
_position += readCount;
return readCount;
}
public override void Flush()
{ }
public override long Seek(long offset, SeekOrigin origin)
{
switch (origin)
{
case SeekOrigin.Begin:
Position = offset;
break;
case SeekOrigin.Current:
Position += offset;
break;
case SeekOrigin.End:
Position = Length - offset;
break;
}
return Position;
}
public override void SetLength(long value)
=> throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count)
=> throw new NotSupportedException();
protected override void Dispose(bool disposing)
{ }
}
}

View File

@@ -0,0 +1,52 @@
using SharpCompress.Common.Dmg.HFS;
using System;
using System.Collections.Generic;
namespace SharpCompress.Common.Dmg
{
public abstract class DmgEntry : Entry
{
public override string Key { get; }
public override bool IsDirectory { get; }
public override long Size { get; }
public override long CompressedSize { get; }
public override CompressionType CompressionType { get; }
public override DateTime? LastModifiedTime { get; }
public override DateTime? CreatedTime { get; }
public override DateTime? LastAccessedTime { get; }
public override DateTime? ArchivedTime { get; }
public override long Crc { get; } = 0; // Not stored
public override string? LinkTarget { get; } = null;
public override bool IsEncrypted { get; } = false;
public override bool IsSplitAfter { get; } = false;
internal override IEnumerable<FilePart> Parts { get; }
internal DmgEntry(HFSCatalogRecord record, string path, long size, DmgFilePart part)
{
Key = path;
IsDirectory = record.Type == HFSCatalogRecordType.Folder;
Size = CompressedSize = size; // There is no way to get the actual compressed size or the compression type of
CompressionType = CompressionType.Unknown; // a file in a DMG archive since the files are nested inside the HFS partition.
Parts = part.AsEnumerable();
if (IsDirectory)
{
var folder = (HFSCatalogFolder)record;
LastModifiedTime = (folder.AttributeModDate > folder.ContentModDate) ? folder.AttributeModDate : folder.ContentModDate;
CreatedTime = folder.CreateDate;
LastAccessedTime = folder.AccessDate;
ArchivedTime = folder.BackupDate;
}
else
{
var file = (HFSCatalogFile)record;
LastModifiedTime = (file.AttributeModDate > file.ContentModDate) ? file.AttributeModDate : file.ContentModDate;
CreatedTime = file.CreateDate;
LastAccessedTime = file.AccessDate;
ArchivedTime = file.BackupDate;
}
}
}
}

View File

@@ -0,0 +1,21 @@
using System.IO;
namespace SharpCompress.Common.Dmg
{
internal sealed class DmgFilePart : FilePart
{
private readonly Stream _stream;
internal override string FilePartName { get; }
public DmgFilePart(Stream stream, string fileName)
: base(new ArchiveEncoding())
{
_stream = stream;
FilePartName = fileName;
}
internal override Stream GetCompressedStream() => _stream;
internal override Stream? GetRawStream() => null;
}
}

View File

@@ -0,0 +1,183 @@
using SharpCompress.Common.Dmg.Headers;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Text;
using System.Xml.Linq;
namespace SharpCompress.Common.Dmg
{
internal static class DmgUtil
{
private const string MalformedXmlMessage = "Malformed XML block";
private static T[] ParseArray<T>(in XElement parent, in Func<XElement, T> parseElement)
{
var list = new List<T>();
foreach (var node in parent.Elements())
list.Add(parseElement(node));
return list.ToArray();
}
private static Dictionary<string, T> ParseDict<T>(in XElement parent, in Func<XElement, T> parseValue)
{
var dict = new Dictionary<string, T>();
string? key = null;
foreach (var node in parent.Elements())
{
if (string.Equals(node.Name.LocalName, "key", StringComparison.Ordinal))
{
key = node.Value;
}
else if (key is not null)
{
var value = parseValue(node);
dict.Add(key, value);
key = null;
}
}
return dict;
}
private static Dictionary<string, Dictionary<string, Dictionary<string, string>[]>> ParsePList(in XDocument doc)
{
var dictNode = doc.Root?.Element("dict");
if (dictNode is null) throw new InvalidFormatException(MalformedXmlMessage);
static Dictionary<string, string> ParseObject(XElement parent)
=> ParseDict(parent, node => node.Value);
static Dictionary<string, string>[] ParseObjectArray(XElement parent)
=> ParseArray(parent, ParseObject);
static Dictionary<string, Dictionary<string, string>[]> ParseSubDict(XElement parent)
=> ParseDict(parent, ParseObjectArray);
return ParseDict(dictNode, ParseSubDict);
}
private static BlkxData CreateDataFromDict(in Dictionary<string, string> dict)
{
static bool TryParseHex(string? s, out uint value)
{
value = 0;
if (string.IsNullOrEmpty(s)) return false;
if (s!.StartsWith("0x", StringComparison.OrdinalIgnoreCase))
s = s.Substring(2);
return uint.TryParse(s, NumberStyles.HexNumber, CultureInfo.InvariantCulture, out value);
}
if (!dict.TryGetValue("ID", out string? idStr) || !int.TryParse(idStr, out int id))
throw new InvalidFormatException(MalformedXmlMessage);
if (!dict.TryGetValue("Name", out string? name))
throw new InvalidFormatException(MalformedXmlMessage);
if (!dict.TryGetValue("Attributes", out string? attribStr) || !TryParseHex(attribStr, out uint attribs))
throw new InvalidFormatException(MalformedXmlMessage);
if (!dict.TryGetValue("Data", out string? base64Data) || string.IsNullOrEmpty(base64Data))
throw new InvalidFormatException(MalformedXmlMessage);
try
{
var data = Convert.FromBase64String(base64Data);
if (!BlkxTable.TryRead(data, out var table))
throw new InvalidFormatException("Invalid BLKX table");
return new BlkxData(id, name, attribs, table!);
}
catch (FormatException ex)
{
throw new InvalidFormatException(MalformedXmlMessage, ex);
}
}
public static DmgBlockDataStream? LoadHFSPartitionStream(Stream baseStream, DmgHeader header)
{
if ((header.XMLOffset + header.XMLLength) >= (ulong)baseStream.Length)
throw new IncompleteArchiveException("XML block incomplete");
if ((header.DataForkOffset + header.DataForkLength) >= (ulong)baseStream.Length)
throw new IncompleteArchiveException("Data block incomplete");
baseStream.Position = (long)header.XMLOffset;
var xmlBuffer = new byte[header.XMLLength];
baseStream.Read(xmlBuffer, 0, (int)header.XMLLength);
var xml = Encoding.ASCII.GetString(xmlBuffer);
var doc = XDocument.Parse(xml);
var pList = ParsePList(doc);
if (!pList.TryGetValue("resource-fork", out var resDict) || !resDict.TryGetValue("blkx", out var blkxDicts))
throw new InvalidFormatException(MalformedXmlMessage);
var objs = new BlkxData[blkxDicts.Length];
for (int i = 0; i < objs.Length; i++)
objs[i] = CreateDataFromDict(blkxDicts[i]);
// Index 0 is the protective MBR partition
// Index 1 is the GPT header
// Index 2 is the GPT partition table
try
{
var headerData = objs[1];
using var headerStream = new DmgBlockDataStream(baseStream, header, headerData.Table);
if (!GptHeader.TryRead(headerStream, out var gptHeader))
throw new InvalidFormatException("Invalid GPT header");
var tableData = objs[2];
using var tableStream = new DmgBlockDataStream(baseStream, header, tableData.Table);
var gptTable = new GptPartitionEntry[gptHeader!.EntriesCount];
for (int i = 0; i < gptHeader.EntriesCount; i++)
gptTable[i] = GptPartitionEntry.Read(tableStream);
foreach (var entry in gptTable)
{
if (entry.TypeGuid == PartitionFormat.AppleHFS)
{
BlkxData? partitionData = null;
for (int i = 3; i < objs.Length; i++)
{
if (objs[i].Name.StartsWith(entry.Name, StringComparison.Ordinal))
{
partitionData = objs[i];
break;
}
}
if (partitionData is null)
throw new InvalidFormatException($"Missing partition {entry.Name}");
return new DmgBlockDataStream(baseStream, header, partitionData.Table);
}
}
return null;
}
catch (EndOfStreamException ex)
{
throw new IncompleteArchiveException("Partition incomplete", ex);
}
}
private sealed class BlkxData
{
public int Id { get; }
public string Name { get; }
public uint Attributes { get; }
public BlkxTable Table { get; }
public BlkxData(int id, string name, uint attributes, BlkxTable table)
{
Id = id;
Name = name;
Attributes = attributes;
Table = table;
}
}
}
}

View File

@@ -0,0 +1,38 @@
using SharpCompress.Archives.Dmg;
using SharpCompress.Common.Dmg.Headers;
using SharpCompress.Common.Dmg.HFS;
using System;
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Common.Dmg
{
public class DmgVolume : Volume
{
private readonly DmgArchive _archive;
private readonly string _fileName;
internal DmgHeader Header { get; }
public DmgVolume(DmgArchive archive, Stream stream, string fileName, Readers.ReaderOptions readerOptions)
: base(stream, readerOptions)
{
_archive = archive;
_fileName = fileName;
long pos = stream.Length - DmgHeader.HeaderSize;
if (pos < 0) throw new InvalidFormatException("Invalid DMG volume");
stream.Position = pos;
if (DmgHeader.TryRead(stream, out var header)) Header = header!;
else throw new InvalidFormatException("Invalid DMG volume");
}
internal IEnumerable<DmgArchiveEntry> LoadEntries()
{
var partitionStream = DmgUtil.LoadHFSPartitionStream(Stream, Header);
if (partitionStream is null) return Array.Empty<DmgArchiveEntry>();
else return HFSUtil.LoadEntriesFromPartition(partitionStream, _fileName, _archive);
}
}
}

View File

@@ -0,0 +1,336 @@
using System;
namespace SharpCompress.Common.Dmg.HFS
{
internal sealed class HFSCatalogKey : HFSStructBase, IEquatable<HFSCatalogKey>, IComparable<HFSCatalogKey>, IComparable
{
private readonly StringComparer _comparer;
public uint ParentId { get; }
public string Name { get; }
private static StringComparer GetComparer(HFSKeyCompareType compareType, bool isHFSX)
{
if (isHFSX)
{
return compareType switch
{
HFSKeyCompareType.CaseFolding => StringComparer.InvariantCultureIgnoreCase,
HFSKeyCompareType.BinaryCompare => StringComparer.Ordinal,
_ => StringComparer.InvariantCultureIgnoreCase
};
}
else
{
return StringComparer.InvariantCultureIgnoreCase;
}
}
public HFSCatalogKey(uint parentId, string name, HFSKeyCompareType compareType, bool isHFSX)
{
ParentId = parentId;
Name = name;
_comparer = GetComparer(compareType, isHFSX);
}
public HFSCatalogKey(byte[] key, HFSKeyCompareType compareType, bool isHFSX)
{
ReadOnlySpan<byte> data = key.AsSpan();
ParentId = ReadUInt32(ref data);
Name = ReadString(ref data, true);
_comparer = GetComparer(compareType, isHFSX);
}
public bool Equals(HFSCatalogKey? other)
{
if (other is null) return false;
else return (ParentId == other.ParentId) && _comparer.Equals(Name, other.Name);
}
public override bool Equals(object? obj)
{
if (obj is HFSCatalogKey other) return Equals(other);
else return false;
}
public int CompareTo(HFSCatalogKey? other)
{
if (other is null) return 1;
int result = ParentId.CompareTo(other.ParentId);
if (result == 0) result = _comparer.Compare(Name, other.Name);
return result;
}
public int CompareTo(object? obj)
{
if (obj is null) return 1;
else if (obj is HFSCatalogKey other) return CompareTo(other);
else throw new ArgumentException("Object is not of type CatalogKey", nameof(obj));
}
public override int GetHashCode()
=> ParentId.GetHashCode() ^ _comparer.GetHashCode(Name);
public static bool operator ==(HFSCatalogKey? left, HFSCatalogKey? right)
{
if (left is null) return right is null;
else return left.Equals(right);
}
public static bool operator !=(HFSCatalogKey? left, HFSCatalogKey? right)
{
if (left is null) return right is not null;
else return !left.Equals(right);
}
public static bool operator <(HFSCatalogKey? left, HFSCatalogKey? right)
{
if (left is null) return right is not null;
else return left.CompareTo(right) < 0;
}
public static bool operator >(HFSCatalogKey? left, HFSCatalogKey? right)
{
if (left is null) return false;
else return left.CompareTo(right) > 0;
}
public static bool operator <=(HFSCatalogKey? left, HFSCatalogKey? right)
{
if (left is null) return true;
else return left.CompareTo(right) <= 0;
}
public static bool operator >=(HFSCatalogKey? left, HFSCatalogKey? right)
{
if (left is null) return right is null;
else return left.CompareTo(right) >= 0;
}
}
internal enum HFSCatalogRecordType : ushort
{
Folder = 0x0001,
File = 0x0002,
FolderThread = 0x0003,
FileThread = 0x0004
}
internal abstract class HFSCatalogRecord : HFSStructBase
{
public HFSCatalogRecordType Type { get; }
protected HFSCatalogRecord(HFSCatalogRecordType type)
=> Type = type;
public static bool TryRead(ref ReadOnlySpan<byte> data, HFSKeyCompareType compareType, bool isHFSX, out HFSCatalogRecord? record)
{
record = null;
ushort rawType = ReadUInt16(ref data);
if (!Enum.IsDefined(typeof(HFSCatalogRecordType), rawType)) return false;
var type = (HFSCatalogRecordType)rawType;
switch (type)
{
case HFSCatalogRecordType.Folder:
record = HFSCatalogFolder.Read(ref data);
return true;
case HFSCatalogRecordType.File:
record = HFSCatalogFile.Read(ref data);
return true;
case HFSCatalogRecordType.FolderThread:
record = HFSCatalogThread.Read(ref data, false, compareType, isHFSX);
return true;
case HFSCatalogRecordType.FileThread:
record = HFSCatalogThread.Read(ref data, true, compareType, isHFSX);
return true;
}
return false;
}
}
internal sealed class HFSCatalogFolder : HFSCatalogRecord
{
public uint Valence { get; }
public uint FolderId { get; }
public DateTime CreateDate { get; }
public DateTime ContentModDate { get; }
public DateTime AttributeModDate { get; }
public DateTime AccessDate { get; }
public DateTime BackupDate { get; }
public HFSPermissions Permissions { get; }
public HFSFolderInfo Info { get; }
public uint TextEncoding { get; }
private HFSCatalogFolder(
uint valence,
uint folderId,
DateTime createDate,
DateTime contentModDate,
DateTime attributeModDate,
DateTime accessDate,
DateTime backupDate,
HFSPermissions permissions,
HFSFolderInfo info,
uint textEncoding)
: base(HFSCatalogRecordType.Folder)
{
Valence = valence;
FolderId = folderId;
CreateDate = createDate;
ContentModDate = contentModDate;
AttributeModDate = attributeModDate;
AccessDate = accessDate;
BackupDate = backupDate;
Permissions = permissions;
Info = info;
TextEncoding = textEncoding;
}
public static HFSCatalogFolder Read(ref ReadOnlySpan<byte> data)
{
_ = ReadUInt16(ref data); // reserved
uint valence = ReadUInt32(ref data);
uint folderId = ReadUInt32(ref data);
var createDate = ReadDate(ref data);
var contentModDate = ReadDate(ref data);
var attributeModDate = ReadDate(ref data);
var accessDate = ReadDate(ref data);
var backupDate = ReadDate(ref data);
var permissions = HFSPermissions.Read(ref data);
var info = HFSFolderInfo.Read(ref data);
uint textEncoding = ReadUInt32(ref data);
_ = ReadUInt32(ref data); // reserved
return new HFSCatalogFolder(
valence,
folderId,
createDate,
contentModDate,
attributeModDate,
accessDate,
backupDate,
permissions,
info,
textEncoding);
}
}
internal enum HFSFileFlags : ushort
{
LockedBit = 0x0000,
LockedMask = 0x0001,
ThreadExistsBit = 0x0001,
ThreadExistsMask = 0x0002
}
internal sealed class HFSCatalogFile : HFSCatalogRecord
{
public HFSFileFlags Flags { get; }
public uint FileId { get; }
public DateTime CreateDate { get; }
public DateTime ContentModDate { get; }
public DateTime AttributeModDate { get; }
public DateTime AccessDate { get; }
public DateTime BackupDate { get; }
public HFSPermissions Permissions { get; }
public HFSFileInfo Info { get; }
public uint TextEncoding { get; }
public HFSForkData DataFork { get; }
public HFSForkData ResourceFork { get; }
private HFSCatalogFile(
HFSFileFlags flags,
uint fileId,
DateTime createDate,
DateTime contentModDate,
DateTime attributeModDate,
DateTime accessDate,
DateTime backupDate,
HFSPermissions permissions,
HFSFileInfo info,
uint textEncoding,
HFSForkData dataFork,
HFSForkData resourceFork)
:base(HFSCatalogRecordType.File)
{
Flags = flags;
FileId = fileId;
CreateDate = createDate;
ContentModDate = contentModDate;
AttributeModDate = attributeModDate;
AccessDate = accessDate;
BackupDate = backupDate;
Permissions = permissions;
Info = info;
TextEncoding = textEncoding;
DataFork = dataFork;
ResourceFork = resourceFork;
}
public static HFSCatalogFile Read(ref ReadOnlySpan<byte> data)
{
var flags = (HFSFileFlags)ReadUInt16(ref data);
_ = ReadUInt32(ref data); // reserved
uint fileId = ReadUInt32(ref data);
var createDate = ReadDate(ref data);
var contentModDate = ReadDate(ref data);
var attributeModDate = ReadDate(ref data);
var accessDate = ReadDate(ref data);
var backupDate = ReadDate(ref data);
var permissions = HFSPermissions.Read(ref data);
var info = HFSFileInfo.Read(ref data);
uint textEncoding = ReadUInt32(ref data);
_ = ReadUInt32(ref data); // reserved
var dataFork = HFSForkData.Read(ref data);
var resourceFork = HFSForkData.Read(ref data);
return new HFSCatalogFile(
flags,
fileId,
createDate,
contentModDate,
attributeModDate,
accessDate,
backupDate,
permissions,
info,
textEncoding,
dataFork,
resourceFork);
}
}
internal sealed class HFSCatalogThread : HFSCatalogRecord
{
public uint ParentId { get; }
public string NodeName { get; }
public HFSCatalogKey CatalogKey { get; }
private HFSCatalogThread(uint parentId, string nodeName, bool isFile, HFSKeyCompareType compareType, bool isHFSX)
: base(isFile ? HFSCatalogRecordType.FileThread : HFSCatalogRecordType.FolderThread)
{
ParentId = parentId;
NodeName = nodeName;
CatalogKey = new HFSCatalogKey(ParentId, NodeName, compareType, isHFSX);
}
public static HFSCatalogThread Read(ref ReadOnlySpan<byte> data, bool isFile, HFSKeyCompareType compareType, bool isHFSX)
{
_ = ReadInt16(ref data); // reserved
uint parentId = ReadUInt32(ref data);
string nodeName = ReadString(ref data, true);
return new HFSCatalogThread(parentId, nodeName, isFile, compareType, isHFSX);
}
}
}

View File

@@ -0,0 +1,31 @@
using System;
using System.IO;
namespace SharpCompress.Common.Dmg.HFS
{
internal sealed class HFSExtentDescriptor : HFSStructBase
{
public uint StartBlock { get; }
public uint BlockCount { get; }
private HFSExtentDescriptor(uint startBlock, uint blockCount)
{
StartBlock = startBlock;
BlockCount = blockCount;
}
public static HFSExtentDescriptor Read(Stream stream)
{
return new HFSExtentDescriptor(
ReadUInt32(stream),
ReadUInt32(stream));
}
public static HFSExtentDescriptor Read(ref ReadOnlySpan<byte> data)
{
return new HFSExtentDescriptor(
ReadUInt32(ref data),
ReadUInt32(ref data));
}
}
}

View File

@@ -0,0 +1,115 @@
using System;
using System.Collections.Generic;
namespace SharpCompress.Common.Dmg.HFS
{
internal sealed class HFSExtentKey : HFSStructBase, IEquatable<HFSExtentKey>, IComparable<HFSExtentKey>, IComparable
{
public byte ForkType { get; }
public uint FileId { get; }
public uint StartBlock { get; }
public HFSExtentKey(byte forkType, uint fileId, uint startBlock)
{
ForkType = forkType;
FileId = fileId;
StartBlock = startBlock;
}
public HFSExtentKey(byte[] key)
{
ReadOnlySpan<byte> data = key.AsSpan();
ForkType = ReadUInt8(ref data);
_ = ReadUInt8(ref data); // padding
FileId = ReadUInt32(ref data);
StartBlock = ReadUInt32(ref data);
}
public bool Equals(HFSExtentKey? other)
{
if (other is null) return false;
else return (ForkType == other.ForkType) && (FileId == other.FileId) && (StartBlock == other.StartBlock);
}
public override bool Equals(object? obj)
{
if (obj is HFSExtentKey other) return Equals(other);
else return false;
}
public int CompareTo(HFSExtentKey? other)
{
if (other is null) return 1;
int result = FileId.CompareTo(other.FileId);
if (result == 0) result = ForkType.CompareTo(other.ForkType);
if (result == 0) result = StartBlock.CompareTo(other.StartBlock);
return result;
}
public int CompareTo(object? obj)
{
if (obj is null) return 1;
else if (obj is HFSExtentKey other) return CompareTo(other);
else throw new ArgumentException("Object is not of type ExtentKey", nameof(obj));
}
public override int GetHashCode()
=> ForkType.GetHashCode() ^ FileId.GetHashCode() ^ StartBlock.GetHashCode();
public static bool operator ==(HFSExtentKey? left, HFSExtentKey? right)
{
if (left is null) return right is null;
else return left.Equals(right);
}
public static bool operator !=(HFSExtentKey? left, HFSExtentKey? right)
{
if (left is null) return right is not null;
else return !left.Equals(right);
}
public static bool operator <(HFSExtentKey? left, HFSExtentKey? right)
{
if (left is null) return right is not null;
else return left.CompareTo(right) < 0;
}
public static bool operator >(HFSExtentKey? left, HFSExtentKey? right)
{
if (left is null) return false;
else return left.CompareTo(right) > 0;
}
public static bool operator <=(HFSExtentKey? left, HFSExtentKey? right)
{
if (left is null) return true;
else return left.CompareTo(right) <= 0;
}
public static bool operator >=(HFSExtentKey? left, HFSExtentKey? right)
{
if (left is null) return right is null;
else return left.CompareTo(right) >= 0;
}
}
internal sealed class HFSExtentRecord : HFSStructBase
{
private const int ExtentCount = 8;
public IReadOnlyList<HFSExtentDescriptor> Extents { get; }
private HFSExtentRecord(IReadOnlyList<HFSExtentDescriptor> extents)
=> Extents = extents;
public static HFSExtentRecord Read(ref ReadOnlySpan<byte> data)
{
var extents = new HFSExtentDescriptor[ExtentCount];
for (int i = 0; i < ExtentCount; i++)
extents[i] = HFSExtentDescriptor.Read(ref data);
return new HFSExtentRecord(extents);
}
}
}

View File

@@ -0,0 +1,145 @@
using System;
namespace SharpCompress.Common.Dmg.HFS
{
internal struct HFSPoint
{
public short V;
public short H;
}
internal struct HFSRect
{
public short Top;
public short Left;
public short Bottom;
public short Right;
}
[Flags]
internal enum HFSFinderFlags : ushort
{
None = 0x0000,
IsOnDesk = 0x0001, /* Files and folders (System 6) */
Color = 0x000E, /* Files and folders */
IsShared = 0x0040, /* Files only (Applications only) If */
/* clear, the application needs */
/* to write to its resource fork, */
/* and therefore cannot be shared */
/* on a server */
HasNoINITs = 0x0080, /* Files only (Extensions/Control */
/* Panels only) */
/* This file contains no INIT resource */
HasBeenInited = 0x0100, /* Files only. Clear if the file */
/* contains desktop database resources */
/* ('BNDL', 'FREF', 'open', 'kind'...) */
/* that have not been added yet. Set */
/* only by the Finder. */
/* Reserved for folders */
HasCustomIcon = 0x0400, /* Files and folders */
IsStationery = 0x0800, /* Files only */
NameLocked = 0x1000, /* Files and folders */
HasBundle = 0x2000, /* Files only */
IsInvisible = 0x4000, /* Files and folders */
IsAlias = 0x8000 /* Files only */
}
[Flags]
internal enum HFSExtendedFinderFlags : ushort
{
None = 0x0000,
ExtendedFlagsAreInvalid = 0x8000, /* The other extended flags */
/* should be ignored */
HasCustomBadge = 0x0100, /* The file or folder has a */
/* badge resource */
HasRoutingInfo = 0x0004 /* The file contains routing */
/* info resource */
}
internal sealed class HFSFileInfo : HFSStructBase
{
public string FileType { get; } /* The type of the file */
public string FileCreator { get; } /* The file's creator */
public HFSFinderFlags FinderFlags { get; }
public HFSPoint Location { get; } /* File's location in the folder. */
public HFSExtendedFinderFlags ExtendedFinderFlags { get; }
public int PutAwayFolderId { get; }
private HFSFileInfo(
string fileType,
string fileCreator,
HFSFinderFlags finderFlags,
HFSPoint location,
HFSExtendedFinderFlags extendedFinderFlags,
int putAwayFolderId)
{
FileType = fileType;
FileCreator = fileCreator;
FinderFlags = finderFlags;
Location = location;
ExtendedFinderFlags = extendedFinderFlags;
PutAwayFolderId = putAwayFolderId;
}
public static HFSFileInfo Read(ref ReadOnlySpan<byte> data)
{
string fileType = ReadOSType(ref data);
string fileCreator = ReadOSType(ref data);
var finderFlags = (HFSFinderFlags)ReadUInt16(ref data);
var location = ReadPoint(ref data);
_ = ReadUInt16(ref data); // reserved
data = data.Slice(4 * sizeof(short)); // reserved
var extendedFinderFlags = (HFSExtendedFinderFlags)ReadUInt16(ref data);
_ = ReadInt16(ref data); // reserved
int putAwayFolderId = ReadInt32(ref data);
return new HFSFileInfo(fileType, fileCreator, finderFlags, location, extendedFinderFlags, putAwayFolderId);
}
}
internal sealed class HFSFolderInfo : HFSStructBase
{
public HFSRect WindowBounds { get; } /* The position and dimension of the */
/* folder's window */
public HFSFinderFlags FinderFlags { get; }
public HFSPoint Location { get; } /* Folder's location in the parent */
/* folder. If set to {0, 0}, the Finder */
/* will place the item automatically */
public HFSPoint ScrollPosition { get; } /* Scroll position (for icon views) */
public HFSExtendedFinderFlags ExtendedFinderFlags { get; }
public int PutAwayFolderId { get; }
private HFSFolderInfo(
HFSRect windowBounds,
HFSFinderFlags finderFlags,
HFSPoint location,
HFSPoint scrollPosition,
HFSExtendedFinderFlags extendedFinderFlags,
int putAwayFolderId)
{
WindowBounds = windowBounds;
FinderFlags = finderFlags;
Location = location;
ScrollPosition = scrollPosition;
ExtendedFinderFlags = extendedFinderFlags;
PutAwayFolderId = putAwayFolderId;
}
public static HFSFolderInfo Read(ref ReadOnlySpan<byte> data)
{
var windowBounds = ReadRect(ref data);
var finderFlags = (HFSFinderFlags)ReadUInt16(ref data);
var location = ReadPoint(ref data);
_ = ReadUInt16(ref data); // reserved
var scrollPosition = ReadPoint(ref data);
_ = ReadInt32(ref data); // reserved
var extendedFinderFlags = (HFSExtendedFinderFlags)ReadUInt16(ref data);
_ = ReadInt16(ref data); // reserved
int putAwayFolderId = ReadInt32(ref data);
return new HFSFolderInfo(windowBounds, finderFlags, location, scrollPosition, extendedFinderFlags, putAwayFolderId);
}
}
}

View File

@@ -0,0 +1,50 @@
using System;
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Common.Dmg.HFS
{
internal sealed class HFSForkData : HFSStructBase
{
private const int ExtentCount = 8;
public ulong LogicalSize { get; }
public uint ClumpSize { get; }
public uint TotalBlocks { get; }
public IReadOnlyList<HFSExtentDescriptor> Extents { get; }
private HFSForkData(ulong logicalSize, uint clumpSize, uint totalBlocks, IReadOnlyList<HFSExtentDescriptor> extents)
{
LogicalSize = logicalSize;
ClumpSize = clumpSize;
TotalBlocks = totalBlocks;
Extents = extents;
}
public static HFSForkData Read(Stream stream)
{
ulong logicalSize = ReadUInt64(stream);
uint clumpSize = ReadUInt32(stream);
uint totalBlocks = ReadUInt32(stream);
var extents = new HFSExtentDescriptor[ExtentCount];
for (int i = 0; i < ExtentCount; i++)
extents[i] = HFSExtentDescriptor.Read(stream);
return new HFSForkData(logicalSize, clumpSize, totalBlocks, extents);
}
public static HFSForkData Read(ref ReadOnlySpan<byte> data)
{
ulong logicalSize = ReadUInt64(ref data);
uint clumpSize = ReadUInt32(ref data);
uint totalBlocks = ReadUInt32(ref data);
var extents = new HFSExtentDescriptor[ExtentCount];
for (int i = 0; i < ExtentCount; i++)
extents[i] = HFSExtentDescriptor.Read(ref data);
return new HFSForkData(logicalSize, clumpSize, totalBlocks, extents);
}
}
}

View File

@@ -0,0 +1,196 @@
using SharpCompress.IO;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
namespace SharpCompress.Common.Dmg.HFS
{
internal sealed class HFSForkStream : Stream
{
private readonly Stream _baseStream;
private readonly HFSVolumeHeader _volumeHeader;
private readonly IReadOnlyList<HFSExtentDescriptor> _extents;
private long _position;
private bool _isEnded;
private int _extentIndex;
private Stream? _extentStream;
public override bool CanRead => true;
public override bool CanWrite => false;
public override bool CanSeek => true;
public override long Length { get; }
public override long Position
{
get => _position;
set
{
if ((value < 0) || (value > Length)) throw new ArgumentOutOfRangeException(nameof(value));
if (value == Length)
{
// End of the stream
_position = Length;
_isEnded = true;
_extentIndex = -1;
_extentStream = null;
}
else if (value != _position)
{
_position = value;
// We first have to determine in which extent we are now, then we seek to the exact position in that extent.
long offsetInExtent = _position;
for (int i = 0; i < _extents.Count; i++)
{
var extent = _extents[i];
long extentSize = extent.BlockCount * _volumeHeader.BlockSize;
if (extentSize < offsetInExtent)
{
if (i == _extentIndex)
{
// We are in the same extent so just seek to the correct position
_extentStream!.Position = offsetInExtent;
}
else
{
_extentIndex = i;
_extentStream = GetExtentStream();
_extentStream.Position = offsetInExtent;
}
break;
}
else
{
offsetInExtent -= extentSize;
}
}
}
}
}
public HFSForkStream(Stream baseStream, HFSVolumeHeader volumeHeader, HFSForkData forkData)
{
_baseStream = baseStream;
_volumeHeader = volumeHeader;
_extents = forkData.Extents;
Length = (long)forkData.LogicalSize;
_position = 0;
_extentIndex = -1;
_extentIndex = GetNextExtent();
_isEnded = _extentIndex < 0;
if (!_isEnded) _extentStream = GetExtentStream();
}
public HFSForkStream(
Stream baseStream, HFSVolumeHeader volumeHeader, HFSForkData forkData, uint fileId,
IReadOnlyDictionary<HFSExtentKey, HFSExtentRecord> extents)
{
_baseStream = baseStream;
_volumeHeader = volumeHeader;
Length = (long)forkData.LogicalSize;
uint blocks = (uint)forkData.Extents.Sum(e => e.BlockCount);
var totalExtents = new List<HFSExtentDescriptor>(forkData.Extents);
_extents = totalExtents;
var nextKey = new HFSExtentKey(0, fileId, blocks);
while (extents.TryGetValue(nextKey, out var record))
{
blocks += (uint)record.Extents.Sum(e => e.BlockCount);
totalExtents.AddRange(record.Extents);
nextKey = new HFSExtentKey(0, fileId, blocks);
}
_position = 0;
_extentIndex = -1;
_extentIndex = GetNextExtent();
_isEnded = _extentIndex < 0;
if (!_isEnded) _extentStream = GetExtentStream();
}
private int GetNextExtent()
{
int index = _extentIndex + 1;
if (index >= _extents.Count) return -1;
var extent = _extents[index];
if ((extent.StartBlock == 0) && (extent.BlockCount == 0)) return -1;
return index;
}
private Stream GetExtentStream()
{
if (_extentIndex < 0)
throw new InvalidOperationException("Invalid extent index");
var extent = _extents[_extentIndex];
return new HFSExtentStream(_baseStream, _volumeHeader, extent);
}
public override void Flush()
{ }
public override int Read(byte[] buffer, int offset, int count)
{
if (_isEnded) return 0;
count = (int)Math.Min(count, Length - Position);
int readCount = _extentStream!.Read(buffer, offset, count);
while (readCount < count)
{
_extentIndex = GetNextExtent();
if (_extentIndex < 0)
{
_isEnded = true;
return readCount;
}
_extentStream = GetExtentStream();
readCount += _extentStream.Read(buffer, offset + readCount, count - readCount);
}
_position += readCount;
return readCount;
}
public override long Seek(long offset, SeekOrigin origin)
{
switch (origin)
{
case SeekOrigin.Begin:
Position = offset;
break;
case SeekOrigin.Current:
Position += offset;
break;
case SeekOrigin.End:
Position = Length - offset;
break;
}
return Position;
}
public override void SetLength(long value)
=> throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count)
=> throw new NotSupportedException();
private sealed class HFSExtentStream : SeekableSubStream
{
public HFSExtentStream(Stream stream, HFSVolumeHeader volumeHeader, HFSExtentDescriptor extent)
: base(stream, (long)extent.StartBlock * volumeHeader.BlockSize, (long)extent.BlockCount * volumeHeader.BlockSize)
{ }
}
}
}

View File

@@ -0,0 +1,91 @@
using System;
namespace SharpCompress.Common.Dmg.HFS
{
internal abstract class HFSKeyedRecord : HFSStructBase
{
private readonly HFSKeyCompareType _compareType;
private readonly bool _isHFSX;
private HFSCatalogKey? _catalogKey;
private HFSExtentKey? _extentKey;
public byte[] Key { get; }
public HFSCatalogKey GetCatalogKey() => _catalogKey ??= new HFSCatalogKey(Key, _compareType, _isHFSX);
public HFSExtentKey GetExtentKey() => _extentKey ??= new HFSExtentKey(Key);
protected HFSKeyedRecord(byte[] key, HFSKeyCompareType compareType, bool isHFSX)
{
Key = key;
_compareType = compareType;
_isHFSX = isHFSX;
}
}
internal sealed class HFSPointerRecord : HFSKeyedRecord
{
public uint NodeNumber { get; }
private HFSPointerRecord(byte[] key, uint nodeNumber, HFSKeyCompareType compareType, bool isHFSX)
: base(key, compareType, isHFSX)
{
NodeNumber = nodeNumber;
}
public static HFSPointerRecord Read(ref ReadOnlySpan<byte> data, HFSTreeHeaderRecord headerRecord, bool isHFSX)
{
bool isBigKey = headerRecord.Attributes.HasFlag(HFSTreeAttributes.BigKeys);
ushort keyLength = isBigKey ? ReadUInt16(ref data) : ReadUInt8(ref data);
if (!headerRecord.Attributes.HasFlag(HFSTreeAttributes.VariableIndexKeys)) keyLength = headerRecord.MaxKeyLength;
int keySize = (isBigKey ? 2 : 1) + keyLength;
var key = new byte[keyLength];
data.Slice(0, keyLength).CopyTo(key);
data = data.Slice(keyLength);
// data is always aligned to 2 bytes
if (keySize % 2 == 1) data = data.Slice(1);
uint nodeNumber = ReadUInt32(ref data);
return new HFSPointerRecord(key, nodeNumber, headerRecord.KeyCompareType, isHFSX);
}
}
internal sealed class HFSDataRecord : HFSKeyedRecord
{
public byte[] Data { get; }
private HFSDataRecord(byte[] key, byte[] data, HFSKeyCompareType compareType, bool isHFSX)
: base(key, compareType, isHFSX)
{
Data = data;
}
public static HFSDataRecord Read(ref ReadOnlySpan<byte> data, int size, HFSTreeHeaderRecord headerRecord, bool isHFSX)
{
bool isBigKey = headerRecord.Attributes.HasFlag(HFSTreeAttributes.BigKeys);
ushort keyLength = isBigKey ? ReadUInt16(ref data) : ReadUInt8(ref data);
int keySize = (isBigKey ? 2 : 1) + keyLength;
size -= keySize;
var key = new byte[keyLength];
data.Slice(0, keyLength).CopyTo(key);
data = data.Slice(keyLength);
// data is always aligned to 2 bytes
if (keySize % 2 == 1)
{
data = data.Slice(1);
size--;
}
var structData = new byte[size];
data.Slice(0, size).CopyTo(structData);
data = data.Slice(size);
return new HFSDataRecord(key, structData, headerRecord.KeyCompareType, isHFSX);
}
}
}

View File

@@ -0,0 +1,35 @@
using System;
namespace SharpCompress.Common.Dmg.HFS
{
internal sealed class HFSPermissions : HFSStructBase
{
public uint OwnerID { get; }
public uint GroupID { get; }
public byte AdminFlags { get; }
public byte OwnerFlags { get; }
public ushort FileMode { get; }
public uint Special { get; }
private HFSPermissions(uint ownerID, uint groupID, byte adminFlags, byte ownerFlags, ushort fileMode, uint special)
{
OwnerID = ownerID;
GroupID = groupID;
AdminFlags = adminFlags;
OwnerFlags = ownerFlags;
FileMode = fileMode;
Special = special;
}
public static HFSPermissions Read(ref ReadOnlySpan<byte> data)
{
return new HFSPermissions(
ReadUInt32(ref data),
ReadUInt32(ref data),
ReadUInt8(ref data),
ReadUInt8(ref data),
ReadUInt16(ref data),
ReadUInt32(ref data));
}
}
}

View File

@@ -0,0 +1,187 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Text;
namespace SharpCompress.Common.Dmg.HFS
{
internal abstract class HFSStructBase
{
private const int StringSize = 510;
private const int OSTypeSize = 4;
private static readonly DateTime Epoch = new DateTime(1904, 1, 1, 0, 0, 0, DateTimeKind.Utc);
private static readonly byte[] _buffer = new byte[StringSize];
protected static byte ReadUInt8(Stream stream)
{
if (stream.Read(_buffer, 0, sizeof(byte)) != sizeof(byte))
throw new EndOfStreamException();
return _buffer[0];
}
protected static ushort ReadUInt16(Stream stream)
{
if (stream.Read(_buffer, 0, sizeof(ushort)) != sizeof(ushort))
throw new EndOfStreamException();
return BinaryPrimitives.ReadUInt16BigEndian(_buffer);
}
protected static short ReadInt16(Stream stream)
{
if (stream.Read(_buffer, 0, sizeof(short)) != sizeof(short))
throw new EndOfStreamException();
return BinaryPrimitives.ReadInt16BigEndian(_buffer);
}
protected static uint ReadUInt32(Stream stream)
{
if (stream.Read(_buffer, 0, sizeof(uint)) != sizeof(uint))
throw new EndOfStreamException();
return BinaryPrimitives.ReadUInt32BigEndian(_buffer);
}
protected static int ReadInt32(Stream stream)
{
if (stream.Read(_buffer, 0, sizeof(int)) != sizeof(int))
throw new EndOfStreamException();
return BinaryPrimitives.ReadInt32BigEndian(_buffer);
}
protected static ulong ReadUInt64(Stream stream)
{
if (stream.Read(_buffer, 0, sizeof(ulong)) != sizeof(ulong))
throw new EndOfStreamException();
return BinaryPrimitives.ReadUInt64BigEndian(_buffer);
}
protected static long ReadInt64(Stream stream)
{
if (stream.Read(_buffer, 0, sizeof(long)) != sizeof(long))
throw new EndOfStreamException();
return BinaryPrimitives.ReadInt64BigEndian(_buffer);
}
protected static string ReadString(Stream stream)
{
ushort length = ReadUInt16(stream);
if (stream.Read(_buffer, 0, StringSize) != StringSize)
throw new EndOfStreamException();
return Encoding.Unicode.GetString(_buffer, 0, Math.Min(length * 2, StringSize));
}
protected static DateTime ReadDate(Stream stream)
{
uint seconds = ReadUInt32(stream);
var span = TimeSpan.FromSeconds(seconds);
return Epoch + span;
}
protected static byte ReadUInt8(ref ReadOnlySpan<byte> data)
{
byte val = data[0];
data = data.Slice(sizeof(byte));
return val;
}
protected static ushort ReadUInt16(ref ReadOnlySpan<byte> data)
{
ushort val = BinaryPrimitives.ReadUInt16BigEndian(data);
data = data.Slice(sizeof(ushort));
return val;
}
protected static short ReadInt16(ref ReadOnlySpan<byte> data)
{
short val = BinaryPrimitives.ReadInt16BigEndian(data);
data = data.Slice(sizeof(short));
return val;
}
protected static uint ReadUInt32(ref ReadOnlySpan<byte> data)
{
uint val = BinaryPrimitives.ReadUInt32BigEndian(data);
data = data.Slice(sizeof(uint));
return val;
}
protected static int ReadInt32(ref ReadOnlySpan<byte> data)
{
int val = BinaryPrimitives.ReadInt32BigEndian(data);
data = data.Slice(sizeof(int));
return val;
}
protected static ulong ReadUInt64(ref ReadOnlySpan<byte> data)
{
ulong val = BinaryPrimitives.ReadUInt64BigEndian(data);
data = data.Slice(sizeof(ulong));
return val;
}
protected static long ReadInt64(ref ReadOnlySpan<byte> data)
{
long val = BinaryPrimitives.ReadInt64BigEndian(data);
data = data.Slice(sizeof(long));
return val;
}
protected static string ReadString(ref ReadOnlySpan<byte> data, bool truncate)
{
int length = ReadUInt16(ref data);
if (truncate)
{
length = Math.Min(length * 2, StringSize);
data.Slice(0, length).CopyTo(_buffer);
data = data.Slice(length);
return Encoding.BigEndianUnicode.GetString(_buffer, 0, length);
}
else
{
data.Slice(0, StringSize).CopyTo(_buffer);
data = data.Slice(StringSize);
return Encoding.BigEndianUnicode.GetString(_buffer, 0, Math.Min(length * 2, StringSize));
}
}
protected static DateTime ReadDate(ref ReadOnlySpan<byte> data)
{
uint seconds = ReadUInt32(ref data);
var span = TimeSpan.FromSeconds(seconds);
return Epoch + span;
}
protected static string ReadOSType(ref ReadOnlySpan<byte> data)
{
data.Slice(0, OSTypeSize).CopyTo(_buffer);
data = data.Slice(OSTypeSize);
return Encoding.ASCII.GetString(_buffer, 0, OSTypeSize).NullTerminate();
}
protected static HFSPoint ReadPoint(ref ReadOnlySpan<byte> data)
{
return new HFSPoint()
{
V = ReadInt16(ref data),
H = ReadInt16(ref data)
};
}
protected static HFSRect ReadRect(ref ReadOnlySpan<byte> data)
{
return new HFSRect()
{
Top = ReadInt16(ref data),
Left = ReadInt16(ref data),
Bottom = ReadInt16(ref data),
Right = ReadInt16(ref data)
};
}
}
}

View File

@@ -0,0 +1,108 @@
using System;
using System.IO;
namespace SharpCompress.Common.Dmg.HFS
{
internal enum HFSTreeType : byte
{
HFS = 0, // control file
User = 128, // user btree type starts from 128
Reserved = 255
}
internal enum HFSKeyCompareType : byte
{
CaseFolding = 0xCF, // case-insensitive
BinaryCompare = 0xBC // case-sensitive
}
[Flags]
internal enum HFSTreeAttributes : uint
{
None = 0x00000000,
BadClose = 0x00000001,
BigKeys = 0x00000002,
VariableIndexKeys = 0x00000004
}
internal sealed class HFSTreeHeaderRecord : HFSStructBase
{
public ushort TreeDepth;
public uint RootNode;
public uint LeafRecords;
public uint FirstLeafNode;
public uint LastLeafNode;
public ushort NodeSize;
public ushort MaxKeyLength;
public uint TotalNodes;
public uint FreeNodes;
public uint ClumpSize;
public HFSTreeType TreeType;
public HFSKeyCompareType KeyCompareType;
public HFSTreeAttributes Attributes;
private HFSTreeHeaderRecord(
ushort treeDepth,
uint rootNode,
uint leafRecords,
uint firstLeafNode,
uint lastLeafNode,
ushort nodeSize,
ushort maxKeyLength,
uint totalNodes,
uint freeNodes,
uint clumpSize,
HFSTreeType treeType,
HFSKeyCompareType keyCompareType,
HFSTreeAttributes attributes)
{
TreeDepth = treeDepth;
RootNode = rootNode;
LeafRecords = leafRecords;
FirstLeafNode = firstLeafNode;
LastLeafNode = lastLeafNode;
NodeSize = nodeSize;
MaxKeyLength = maxKeyLength;
TotalNodes = totalNodes;
FreeNodes = freeNodes;
ClumpSize = clumpSize;
TreeType = treeType;
KeyCompareType = keyCompareType;
Attributes = attributes;
}
public static HFSTreeHeaderRecord Read(Stream stream)
{
ushort treeDepth = ReadUInt16(stream);
uint rootNode = ReadUInt32(stream);
uint leafRecords = ReadUInt32(stream);
uint firstLeafNode = ReadUInt32(stream);
uint lastLeafNode = ReadUInt32(stream);
ushort nodeSize = ReadUInt16(stream);
ushort maxKeyLength = ReadUInt16(stream);
uint totalNodes = ReadUInt32(stream);
uint freeNodes = ReadUInt32(stream);
_ = ReadUInt16(stream); // reserved
uint clumpSize = ReadUInt32(stream);
var treeType = (HFSTreeType)ReadUInt8(stream);
var keyCompareType = (HFSKeyCompareType)ReadUInt8(stream);
var attributes = (HFSTreeAttributes)ReadUInt32(stream);
for (int i = 0; i < 16; i++) _ = ReadUInt32(stream); // reserved
return new HFSTreeHeaderRecord(
treeDepth,
rootNode,
leafRecords,
firstLeafNode,
lastLeafNode,
nodeSize,
maxKeyLength,
totalNodes,
freeNodes,
clumpSize,
treeType,
keyCompareType,
attributes);
}
}
}

View File

@@ -0,0 +1,167 @@
using System;
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Common.Dmg.HFS
{
internal abstract class HFSTreeNode : HFSStructBase
{
private static byte[]? _buffer = null;
public HFSTreeNodeDescriptor Descriptor { get; }
protected HFSTreeNode(HFSTreeNodeDescriptor descriptor)
=> Descriptor = descriptor;
public static bool TryRead(Stream stream, HFSTreeHeaderRecord headerRecord, bool isHFSX, out HFSTreeNode? node)
{
node = null;
if (!HFSTreeNodeDescriptor.TryRead(stream, out var descriptor)) return false;
int size = (int)headerRecord.NodeSize - HFSTreeNodeDescriptor.Size;
if ((_buffer is null) || (_buffer.Length < size))
_buffer = new byte[size * 2];
if (stream.Read(_buffer, 0, size) != size)
throw new EndOfStreamException();
ReadOnlySpan<byte> data = _buffer.AsSpan(0, size);
switch (descriptor!.Kind)
{
case HFSTreeNodeKind.Leaf:
node = HFSLeafTreeNode.Read(descriptor, data, headerRecord, isHFSX);
return true;
case HFSTreeNodeKind.Index:
node = HFSIndexTreeNode.Read(descriptor, data, headerRecord, isHFSX);
return true;
case HFSTreeNodeKind.Map:
node = HFSMapTreeNode.Read(descriptor, data);
return true;
}
return false;
}
}
internal sealed class HFSHeaderTreeNode : HFSTreeNode
{
private const int UserDataSize = 128;
public HFSTreeHeaderRecord HeaderRecord { get; }
public IReadOnlyList<byte> UserData { get; }
public IReadOnlyList<byte> Map { get; }
private HFSHeaderTreeNode(
HFSTreeNodeDescriptor descriptor,
HFSTreeHeaderRecord headerRecord,
IReadOnlyList<byte> userData,
IReadOnlyList<byte> map)
: base(descriptor)
{
HeaderRecord = headerRecord;
UserData = userData;
Map = map;
}
public static HFSHeaderTreeNode Read(HFSTreeNodeDescriptor descriptor, Stream stream)
{
if (descriptor.Kind != HFSTreeNodeKind.Header)
throw new ArgumentException("Descriptor does not define a header node");
var headerRecord = HFSTreeHeaderRecord.Read(stream);
var userData = new byte[UserDataSize];
if (stream.Read(userData, 0, UserDataSize) != UserDataSize)
throw new EndOfStreamException();
int mapSize = (int)(headerRecord.NodeSize - 256);
var map = new byte[mapSize];
if (stream.Read(map, 0, mapSize) != mapSize)
throw new EndOfStreamException();
// offset values (not required for header node)
_ = ReadUInt16(stream);
_ = ReadUInt16(stream);
_ = ReadUInt16(stream);
_ = ReadUInt16(stream);
return new HFSHeaderTreeNode(descriptor, headerRecord, userData, map);
}
}
internal sealed class HFSMapTreeNode : HFSTreeNode
{
public IReadOnlyList<byte> Map { get; }
private HFSMapTreeNode(HFSTreeNodeDescriptor descriptor, IReadOnlyList<byte> map)
: base(descriptor)
{
Map = map;
}
public static HFSMapTreeNode Read(HFSTreeNodeDescriptor descriptor, ReadOnlySpan<byte> data)
{
int mapSize = data.Length - 6;
var map = new byte[mapSize];
data.Slice(0, mapSize).CopyTo(map);
return new HFSMapTreeNode(descriptor, map);
}
}
internal sealed class HFSIndexTreeNode : HFSTreeNode
{
public IReadOnlyList<HFSPointerRecord> Records { get; }
private HFSIndexTreeNode(HFSTreeNodeDescriptor descriptor, IReadOnlyList<HFSPointerRecord> records)
: base(descriptor)
{
Records = records;
}
public static HFSIndexTreeNode Read(HFSTreeNodeDescriptor descriptor, ReadOnlySpan<byte> data, HFSTreeHeaderRecord headerRecord, bool isHFSX)
{
int recordCount = descriptor.NumRecords;
var records = new HFSPointerRecord[recordCount];
for (int i = 0; i < recordCount; i++)
records[i] = HFSPointerRecord.Read(ref data, headerRecord, isHFSX);
return new HFSIndexTreeNode(descriptor, records);
}
}
internal sealed class HFSLeafTreeNode : HFSTreeNode
{
public IReadOnlyList<HFSDataRecord> Records { get; }
private HFSLeafTreeNode(HFSTreeNodeDescriptor descriptor, IReadOnlyList<HFSDataRecord> records)
: base(descriptor)
{
Records = records;
}
public static HFSLeafTreeNode Read(HFSTreeNodeDescriptor descriptor, ReadOnlySpan<byte> data, HFSTreeHeaderRecord headerRecord, bool isHFSX)
{
int recordCount = descriptor.NumRecords;
var recordOffsets = new int[recordCount + 1];
for (int i = 0; i < recordOffsets.Length; i++)
{
var offsetData = data.Slice(data.Length - (2 * i) - 2);
ushort offset = ReadUInt16(ref offsetData);
recordOffsets[i] = offset;
}
var records = new HFSDataRecord[recordCount];
for (int i = 0; i < recordCount; i++)
{
int size = recordOffsets[i + 1] - recordOffsets[i];
records[i] = HFSDataRecord.Read(ref data, size, headerRecord, isHFSX);
}
return new HFSLeafTreeNode(descriptor, records);
}
}
}

View File

@@ -0,0 +1,55 @@
using System;
using System.IO;
namespace SharpCompress.Common.Dmg.HFS
{
internal enum HFSTreeNodeKind : sbyte
{
Leaf = -1,
Index = 0,
Header = 1,
Map = 2
}
internal sealed class HFSTreeNodeDescriptor : HFSStructBase
{
public const int Size = 14;
public uint FLink { get; }
public uint BLink { get; }
public HFSTreeNodeKind Kind { get; }
public byte Height { get; }
public ushort NumRecords { get; }
private HFSTreeNodeDescriptor(uint fLink, uint bLink, HFSTreeNodeKind kind, byte height, ushort numRecords)
{
FLink = fLink;
BLink = bLink;
Kind = kind;
Height = height;
NumRecords = numRecords;
}
public static bool TryRead(Stream stream, out HFSTreeNodeDescriptor? descriptor)
{
descriptor = null;
uint fLink = ReadUInt32(stream);
uint bLink = ReadUInt32(stream);
sbyte rawKind = (sbyte)ReadUInt8(stream);
if (!Enum.IsDefined(typeof(HFSTreeNodeKind), rawKind)) return false;
var kind = (HFSTreeNodeKind)rawKind;
byte height = ReadUInt8(stream);
if (((kind == HFSTreeNodeKind.Header) || (kind == HFSTreeNodeKind.Map)) && (height != 0)) return false;
if ((kind == HFSTreeNodeKind.Leaf) && (height != 1)) return false;
ushort numRecords = ReadUInt16(stream);
_ = ReadUInt16(stream); // reserved
descriptor = new HFSTreeNodeDescriptor(fLink, bLink, kind, height, numRecords);
return true;
}
}
}

View File

@@ -0,0 +1,206 @@
using SharpCompress.Archives.Dmg;
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace SharpCompress.Common.Dmg.HFS
{
internal static class HFSUtil
{
private const string CorruptHFSMessage = "Corrupt HFS volume";
private static (HFSHeaderTreeNode, IReadOnlyList<HFSTreeNode>) ReadTree(Stream stream, bool isHFSX)
{
if (!HFSTreeNodeDescriptor.TryRead(stream, out var headerDesc))
throw new InvalidFormatException(CorruptHFSMessage);
var header = HFSHeaderTreeNode.Read(headerDesc!, stream);
var nodes = new HFSTreeNode[header.HeaderRecord.TotalNodes];
nodes[0] = header;
for (int i = 1; i < nodes.Length; i++)
{
if (!HFSTreeNode.TryRead(stream, header.HeaderRecord, isHFSX, out var node))
throw new InvalidFormatException(CorruptHFSMessage);
nodes[i] = node!;
}
return (header, nodes);
}
private static void EnumerateExtentsTree(
IReadOnlyList<HFSTreeNode> extentsTree,
IDictionary<HFSExtentKey, HFSExtentRecord> records,
int parentIndex)
{
var parent = extentsTree[parentIndex];
if (parent is HFSLeafTreeNode leafNode)
{
foreach (var record in leafNode.Records)
{
ReadOnlySpan<byte> data = record.Data.AsSpan();
var recordData = HFSExtentRecord.Read(ref data);
var key = record.GetExtentKey();
records.Add(key, recordData);
}
}
else if (parent is HFSIndexTreeNode indexNode)
{
foreach (var record in indexNode.Records)
EnumerateExtentsTree(extentsTree, records, (int)record.NodeNumber);
}
else
{
throw new InvalidFormatException(CorruptHFSMessage);
}
}
private static IReadOnlyDictionary<HFSExtentKey, HFSExtentRecord> LoadExtents(IReadOnlyList<HFSTreeNode> extentsTree, int rootIndex)
{
var records = new Dictionary<HFSExtentKey, HFSExtentRecord>();
if (rootIndex == 0) return records;
EnumerateExtentsTree(extentsTree, records, rootIndex);
return records;
}
private static void EnumerateCatalogTree(
HFSHeaderTreeNode catalogHeader,
IReadOnlyList<HFSTreeNode> catalogTree,
IDictionary<HFSCatalogKey, HFSCatalogRecord> records,
IDictionary<uint, HFSCatalogThread> threads,
int parentIndex,
bool isHFSX)
{
var parent = catalogTree[parentIndex];
if (parent is HFSLeafTreeNode leafNode)
{
foreach (var record in leafNode.Records)
{
ReadOnlySpan<byte> data = record.Data.AsSpan();
if (HFSCatalogRecord.TryRead(ref data, catalogHeader.HeaderRecord.KeyCompareType, isHFSX, out var recordData))
{
var key = record.GetCatalogKey();
if ((recordData!.Type == HFSCatalogRecordType.FileThread) || (recordData!.Type == HFSCatalogRecordType.FolderThread))
{
threads.Add(key.ParentId, (HFSCatalogThread)recordData);
}
else
{
records.Add(key, recordData);
}
}
else
{
throw new InvalidFormatException(CorruptHFSMessage);
}
}
}
else if (parent is HFSIndexTreeNode indexNode)
{
foreach (var record in indexNode.Records)
EnumerateCatalogTree(catalogHeader, catalogTree, records, threads, (int)record.NodeNumber, isHFSX);
}
else
{
throw new InvalidFormatException(CorruptHFSMessage);
}
}
private static (HFSCatalogKey, HFSCatalogRecord) GetRecord(uint id, IDictionary<HFSCatalogKey, HFSCatalogRecord> records, IDictionary<uint, HFSCatalogThread> threads)
{
if (threads.TryGetValue(id, out var thread))
{
if (records.TryGetValue(thread.CatalogKey, out var record))
return (thread.CatalogKey, record!);
}
throw new InvalidFormatException(CorruptHFSMessage);
}
private static string SanitizePath(string path)
{
var sb = new StringBuilder(path.Length);
foreach (char c in path)
{
if (!char.IsControl(c))
sb.Append(c);
}
return sb.ToString();
}
private static string GetPath(HFSCatalogKey key, IDictionary<HFSCatalogKey, HFSCatalogRecord> records, IDictionary<uint, HFSCatalogThread> threads)
{
if (key.ParentId == 1)
{
return key.Name;
}
else
{
var (parentKey, _) = GetRecord(key.ParentId, records, threads);
var path = Path.Combine(GetPath(parentKey, records, threads), key.Name);
return SanitizePath(path);
}
}
private static IEnumerable<DmgArchiveEntry> LoadEntriesFromCatalogTree(
Stream partitionStream,
DmgFilePart filePart,
HFSVolumeHeader volumeHeader,
HFSHeaderTreeNode catalogHeader,
IReadOnlyList<HFSTreeNode> catalogTree,
IReadOnlyDictionary<HFSExtentKey, HFSExtentRecord> extents,
DmgArchive archive,
int rootIndex)
{
if (rootIndex == 0) return Array.Empty<DmgArchiveEntry>();
var records = new Dictionary<HFSCatalogKey, HFSCatalogRecord>();
var threads = new Dictionary<uint, HFSCatalogThread>();
EnumerateCatalogTree(catalogHeader, catalogTree, records, threads, rootIndex, volumeHeader.IsHFSX);
var entries = new List<DmgArchiveEntry>();
foreach (var kvp in records)
{
var key = kvp.Key;
var record = kvp.Value;
string path = GetPath(key, records, threads);
var stream = (record is HFSCatalogFile file) ? new HFSForkStream(partitionStream, volumeHeader, file.DataFork, file.FileId, extents) : null;
var entry = new DmgArchiveEntry(stream, archive, record, path, filePart);
entries.Add(entry);
}
return entries;
}
public static IEnumerable<DmgArchiveEntry> LoadEntriesFromPartition(Stream partitionStream, string fileName, DmgArchive archive)
{
if (!HFSVolumeHeader.TryRead(partitionStream, out var volumeHeader))
throw new InvalidFormatException(CorruptHFSMessage);
var filePart = new DmgFilePart(partitionStream, fileName);
var extentsFile = volumeHeader!.ExtentsFile;
var extentsStream = new HFSForkStream(partitionStream, volumeHeader, extentsFile);
var (extentsHeader, extentsTree) = ReadTree(extentsStream, volumeHeader.IsHFSX);
var extents = LoadExtents(extentsTree, (int)extentsHeader.HeaderRecord.RootNode);
var catalogFile = volumeHeader!.CatalogFile;
var catalogStream = new HFSForkStream(partitionStream, volumeHeader, catalogFile);
var (catalogHeader, catalogTree) = ReadTree(catalogStream, volumeHeader.IsHFSX);
return LoadEntriesFromCatalogTree(
partitionStream,
filePart,
volumeHeader,
catalogHeader,
catalogTree,
extents,
archive,
(int)catalogHeader.HeaderRecord.RootNode);
}
}
}

View File

@@ -0,0 +1,179 @@
using System;
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Common.Dmg.HFS
{
internal sealed class HFSVolumeHeader : HFSStructBase
{
private const ushort SignaturePlus = 0x482B;
private const ushort SignatureX = 0x4858;
private const int FinderInfoCount = 8;
public bool IsHFSX { get; }
public ushort Version { get; }
public uint Attributes { get; }
public uint LastMountedVersion { get; }
public uint JournalInfoBlock { get; }
public DateTime CreateDate { get; }
public DateTime ModifyDate { get; }
public DateTime BackupDate { get; }
public DateTime CheckedDate { get; }
public uint FileCount { get; }
public uint FolderCount { get; }
public uint BlockSize { get; }
public uint TotalBlocks { get; }
public uint FreeBlocks { get; }
public uint NextAllocation { get; }
public uint RsrcClumpSize { get; }
public uint DataClumpSize { get; }
public uint NextCatalogID { get; }
public uint WriteCount { get; }
public ulong EncodingsBitmap { get; }
public IReadOnlyList<uint> FinderInfo { get; }
public HFSForkData AllocationFile { get; }
public HFSForkData ExtentsFile { get; }
public HFSForkData CatalogFile { get; }
public HFSForkData AttributesFile { get; }
public HFSForkData StartupFile { get; }
public HFSVolumeHeader(
bool isHFSX,
ushort version,
uint attributes,
uint lastMountedVersion,
uint journalInfoBlock,
DateTime createDate,
DateTime modifyDate,
DateTime backupDate,
DateTime checkedDate,
uint fileCount,
uint folderCount,
uint blockSize,
uint totalBlocks,
uint freeBlocks,
uint nextAllocation,
uint rsrcClumpSize,
uint dataClumpSize,
uint nextCatalogID,
uint writeCount,
ulong encodingsBitmap,
IReadOnlyList<uint> finderInfo,
HFSForkData allocationFile,
HFSForkData extentsFile,
HFSForkData catalogFile,
HFSForkData attributesFile,
HFSForkData startupFile)
{
IsHFSX = isHFSX;
Version = version;
Attributes = attributes;
LastMountedVersion = lastMountedVersion;
JournalInfoBlock = journalInfoBlock;
CreateDate = createDate;
ModifyDate = modifyDate;
BackupDate = backupDate;
CheckedDate = checkedDate;
FileCount = fileCount;
FolderCount = folderCount;
BlockSize = blockSize;
TotalBlocks = totalBlocks;
FreeBlocks = freeBlocks;
NextAllocation = nextAllocation;
RsrcClumpSize = rsrcClumpSize;
DataClumpSize = dataClumpSize;
NextCatalogID = nextCatalogID;
WriteCount = writeCount;
EncodingsBitmap = encodingsBitmap;
FinderInfo = finderInfo;
AllocationFile = allocationFile;
ExtentsFile = extentsFile;
CatalogFile = catalogFile;
AttributesFile = attributesFile;
StartupFile = startupFile;
}
private static IReadOnlyList<uint> ReadFinderInfo(Stream stream)
{
var finderInfo = new uint[FinderInfoCount];
for (int i = 0; i < FinderInfoCount; i++)
finderInfo[i] = ReadUInt32(stream);
return finderInfo;
}
public static bool TryRead(Stream stream, out HFSVolumeHeader? header)
{
header = null;
stream.Skip(1024); // reserved bytes
bool isHFSX;
ushort sig = ReadUInt16(stream);
if (sig == SignaturePlus) isHFSX = false;
else if (sig == SignatureX) isHFSX = true;
else return false;
ushort version = ReadUInt16(stream);
uint attributes = ReadUInt32(stream);
uint lastMountedVersion = ReadUInt32(stream);
uint journalInfoBlock = ReadUInt32(stream);
DateTime createDate = ReadDate(stream);
DateTime modifyDate = ReadDate(stream);
DateTime backupDate = ReadDate(stream);
DateTime checkedDate = ReadDate(stream);
uint fileCount = ReadUInt32(stream);
uint folderCount = ReadUInt32(stream);
uint blockSize = ReadUInt32(stream);
uint totalBlocks = ReadUInt32(stream);
uint freeBlocks = ReadUInt32(stream);
uint nextAllocation = ReadUInt32(stream);
uint rsrcClumpSize = ReadUInt32(stream);
uint dataClumpSize = ReadUInt32(stream);
uint nextCatalogID = ReadUInt32(stream);
uint writeCount = ReadUInt32(stream);
ulong encodingsBitmap = ReadUInt64(stream);
IReadOnlyList<uint> finderInfo = ReadFinderInfo(stream);
HFSForkData allocationFile = HFSForkData.Read(stream);
HFSForkData extentsFile = HFSForkData.Read(stream);
HFSForkData catalogFile = HFSForkData.Read(stream);
HFSForkData attributesFile = HFSForkData.Read(stream);
HFSForkData startupFile = HFSForkData.Read(stream);
header = new HFSVolumeHeader(
isHFSX,
version,
attributes,
lastMountedVersion,
journalInfoBlock,
createDate,
modifyDate,
backupDate,
checkedDate,
fileCount,
folderCount,
blockSize,
totalBlocks,
freeBlocks,
nextAllocation,
rsrcClumpSize,
dataClumpSize,
nextCatalogID,
writeCount,
encodingsBitmap,
finderInfo,
allocationFile,
extentsFile,
catalogFile,
attributesFile,
startupFile);
return true;
}
}
}

View File

@@ -0,0 +1,49 @@
using System;
namespace SharpCompress.Common.Dmg.Headers
{
internal enum BlkxChunkType : uint
{
Zero = 0x00000000u,
Uncompressed = 0x00000001u,
Ignore = 0x00000002u,
AdcCompressed = 0x80000004u,
ZlibCompressed = 0x80000005u,
Bz2Compressed = 0x80000006u,
Comment = 0x7FFFFFFEu,
Last = 0xFFFFFFFFu,
}
internal sealed class BlkxChunk : DmgStructBase
{
private const int SectorSize = 512;
public BlkxChunkType Type { get; } // Compression type used or chunk type
public uint Comment { get; } // "+beg" or "+end", if EntryType is comment (0x7FFFFFFE). Else reserved.
public ulong UncompressedOffset { get; } // Start sector of this chunk
public ulong UncompressedLength { get; } // Number of sectors in this chunk
public ulong CompressedOffset { get; } // Start of chunk in data fork
public ulong CompressedLength { get; } // Count of bytes of chunk, in data fork
private BlkxChunk(BlkxChunkType type, uint comment, ulong sectorNumber, ulong sectorCount, ulong compressedOffset, ulong compressedLength)
{
Type = type;
Comment = comment;
UncompressedOffset = sectorNumber * SectorSize;
UncompressedLength = sectorCount * SectorSize;
CompressedOffset = compressedOffset;
CompressedLength = compressedLength;
}
public static bool TryRead(ref ReadOnlySpan<byte> data, out BlkxChunk? chunk)
{
chunk = null;
var type = (BlkxChunkType)ReadUInt32(ref data);
if (!Enum.IsDefined(typeof(BlkxChunkType), type)) return false;
chunk = new BlkxChunk(type, ReadUInt32(ref data), ReadUInt64(ref data), ReadUInt64(ref data), ReadUInt64(ref data), ReadUInt64(ref data));
return true;
}
}
}

View File

@@ -0,0 +1,75 @@
using System;
using System.Collections.Generic;
namespace SharpCompress.Common.Dmg.Headers
{
internal sealed class BlkxTable : DmgStructBase
{
private const uint Signature = 0x6d697368u;
public uint Version { get; } // Current version is 1
public ulong SectorNumber { get; } // Starting disk sector in this blkx descriptor
public ulong SectorCount { get; } // Number of disk sectors in this blkx descriptor
public ulong DataOffset { get; }
public uint BuffersNeeded { get; }
public uint BlockDescriptors { get; } // Number of descriptors
public UdifChecksum Checksum { get; }
public IReadOnlyList<BlkxChunk> Chunks { get; }
private BlkxTable(
uint version,
ulong sectorNumber,
ulong sectorCount,
ulong dataOffset,
uint buffersNeeded,
uint blockDescriptors,
UdifChecksum checksum,
IReadOnlyList<BlkxChunk> chunks)
{
Version = version;
SectorNumber = sectorNumber;
SectorCount = sectorCount;
DataOffset = dataOffset;
BuffersNeeded = buffersNeeded;
BlockDescriptors = blockDescriptors;
Checksum = checksum;
Chunks = chunks;
}
public static bool TryRead(in byte[] buffer, out BlkxTable? header)
{
header = null;
ReadOnlySpan<byte> data = buffer.AsSpan();
uint sig = ReadUInt32(ref data);
if (sig != Signature) return false;
uint version = ReadUInt32(ref data);
ulong sectorNumber = ReadUInt64(ref data);
ulong sectorCount = ReadUInt64(ref data);
ulong dataOffset = ReadUInt64(ref data);
uint buffersNeeded = ReadUInt32(ref data);
uint blockDescriptors = ReadUInt32(ref data);
data = data.Slice(6 * sizeof(uint)); // reserved
var checksum = UdifChecksum.Read(ref data);
uint chunkCount = ReadUInt32(ref data);
var chunks = new BlkxChunk[chunkCount];
for (int i = 0; i < chunkCount; i++)
{
if (!BlkxChunk.TryRead(ref data, out var chunk)) return false;
chunks[i] = chunk!;
}
header = new BlkxTable(version, sectorNumber, sectorCount, dataOffset, buffersNeeded, blockDescriptors, checksum, chunks);
return true;
}
}
}

View File

@@ -0,0 +1,138 @@
using System;
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Common.Dmg.Headers
{
internal sealed class DmgHeader : DmgStructBase
{
public const int HeaderSize = 512;
private const uint Signature = 0x6B6F6C79u;
private const int UuidSize = 16; // 128 bit
public uint Version { get; } // Current version is 4
public uint Flags { get; } // Flags
public ulong RunningDataForkOffset { get; } //
public ulong DataForkOffset { get; } // Data fork offset (usually 0, beginning of file)
public ulong DataForkLength { get; } // Size of data fork (usually up to the XMLOffset, below)
public ulong RsrcForkOffset { get; } // Resource fork offset, if any
public ulong RsrcForkLength { get; } // Resource fork length, if any
public uint SegmentNumber { get; } // Usually 1, may be 0
public uint SegmentCount { get; } // Usually 1, may be 0
public IReadOnlyList<byte> SegmentID { get; } // 128-bit GUID identifier of segment (if SegmentNumber !=0)
public UdifChecksum DataChecksum { get; }
public ulong XMLOffset { get; } // Offset of property list in DMG, from beginning
public ulong XMLLength { get; } // Length of property list
public UdifChecksum Checksum { get; }
public uint ImageVariant { get; } // Commonly 1
public ulong SectorCount { get; } // Size of DMG when expanded, in sectors
private DmgHeader(
uint version,
uint flags,
ulong runningDataForkOffset,
ulong dataForkOffset,
ulong dataForkLength,
ulong rsrcForkOffset,
ulong rsrcForkLength,
uint segmentNumber,
uint segmentCount,
IReadOnlyList<byte> segmentID,
UdifChecksum dataChecksum,
ulong xMLOffset,
ulong xMLLength,
UdifChecksum checksum,
uint imageVariant,
ulong sectorCount)
{
Version = version;
Flags = flags;
RunningDataForkOffset = runningDataForkOffset;
DataForkOffset = dataForkOffset;
DataForkLength = dataForkLength;
RsrcForkOffset = rsrcForkOffset;
RsrcForkLength = rsrcForkLength;
SegmentNumber = segmentNumber;
SegmentCount = segmentCount;
SegmentID = segmentID;
DataChecksum = dataChecksum;
XMLOffset = xMLOffset;
XMLLength = xMLLength;
Checksum = checksum;
ImageVariant = imageVariant;
SectorCount = sectorCount;
}
private static void ReadUuid(ref ReadOnlySpan<byte> data, byte[] buffer)
{
data.Slice(0, UuidSize).CopyTo(buffer);
data = data.Slice(UuidSize);
}
internal static bool TryRead(Stream input, out DmgHeader? header)
{
header = null;
var buffer = new byte[HeaderSize];
int count = input.Read(buffer, 0, HeaderSize);
if (count != HeaderSize) return false;
ReadOnlySpan<byte> data = buffer.AsSpan();
uint sig = ReadUInt32(ref data);
if (sig != Signature) return false;
uint version = ReadUInt32(ref data);
uint size = ReadUInt32(ref data);
if (size != (uint)HeaderSize) return false;
uint flags = ReadUInt32(ref data);
ulong runningDataForkOffset = ReadUInt64(ref data);
ulong dataForkOffset = ReadUInt64(ref data);
ulong dataForkLength = ReadUInt64(ref data);
ulong rsrcForkOffset = ReadUInt64(ref data);
ulong rsrcForkLength = ReadUInt64(ref data);
uint segmentNumber = ReadUInt32(ref data);
uint segmentCount = ReadUInt32(ref data);
var segmentID = new byte[UuidSize];
ReadUuid(ref data, segmentID);
var dataChecksum = UdifChecksum.Read(ref data);
ulong xmlOffset = ReadUInt64(ref data);
ulong xmlLength = ReadUInt64(ref data);
data = data.Slice(120); // Reserved bytes
var checksum = UdifChecksum.Read(ref data);
uint imageVariant = ReadUInt32(ref data);
ulong sectorCount = ReadUInt64(ref data);
header = new DmgHeader(
version,
flags,
runningDataForkOffset,
dataForkOffset,
dataForkLength,
rsrcForkOffset,
rsrcForkLength,
segmentNumber,
segmentCount,
segmentID,
dataChecksum,
xmlOffset,
xmlLength,
checksum,
imageVariant,
sectorCount);
return true;
}
}
}

View File

@@ -0,0 +1,22 @@
using System;
using System.Buffers.Binary;
namespace SharpCompress.Common.Dmg.Headers
{
internal abstract class DmgStructBase
{
protected static uint ReadUInt32(ref ReadOnlySpan<byte> data)
{
uint val = BinaryPrimitives.ReadUInt32BigEndian(data);
data = data.Slice(sizeof(uint));
return val;
}
protected static ulong ReadUInt64(ref ReadOnlySpan<byte> data)
{
ulong val = BinaryPrimitives.ReadUInt64BigEndian(data);
data = data.Slice(sizeof(ulong));
return val;
}
}
}

View File

@@ -0,0 +1,90 @@
using System;
using System.Buffers.Binary;
using System.IO;
namespace SharpCompress.Common.Dmg.Headers
{
internal sealed class GptHeader : GptStructBase
{
private const int HeaderSize = 92;
private static readonly ulong Signature = BinaryPrimitives.ReadUInt64LittleEndian(new byte[] { 69, 70, 73, 32, 80, 65, 82, 84 });
public uint Revision { get; }
public uint Crc32Header { get; }
public ulong CurrentLba { get; }
public ulong BackupLba { get; }
public ulong FirstUsableLba { get; }
public ulong LastUsableLba { get; }
public Guid DiskGuid { get; }
public ulong EntriesStart { get; }
public uint EntriesCount { get; }
public uint EntriesSize { get; }
public uint Crc32Array { get; }
private GptHeader(
uint revision,
uint crc32Header,
ulong currentLba,
ulong backupLba,
ulong firstUsableLba,
ulong lastUsableLba,
Guid diskGuid,
ulong entriesStart,
uint entriesCount,
uint entriesSize,
uint crc32Array)
{
Revision = revision;
Crc32Header = crc32Header;
CurrentLba = currentLba;
BackupLba = backupLba;
FirstUsableLba = firstUsableLba;
LastUsableLba = lastUsableLba;
DiskGuid = diskGuid;
EntriesStart = entriesStart;
EntriesCount = entriesCount;
EntriesSize = entriesSize;
Crc32Array = crc32Array;
}
public static bool TryRead(Stream stream, out GptHeader? header)
{
header = null;
ulong sig = ReadUInt64(stream);
if (sig != Signature) return false;
uint revision = ReadUInt32(stream);
uint headerSize = ReadUInt32(stream);
if (headerSize != HeaderSize) return false;
uint crc32Header = ReadUInt32(stream);
_ = ReadUInt32(stream); // reserved
ulong currentLba = ReadUInt64(stream);
ulong backupLba = ReadUInt64(stream);
ulong firstUsableLba = ReadUInt64(stream);
ulong lastUsableLba = ReadUInt64(stream);
Guid diskGuid = ReadGuid(stream);
ulong entriesStart = ReadUInt64(stream);
uint entriesCount = ReadUInt32(stream);
uint entriesSize = ReadUInt32(stream);
uint crc32Array = ReadUInt32(stream);
header = new GptHeader(
revision,
crc32Header,
currentLba,
backupLba,
firstUsableLba,
lastUsableLba,
diskGuid,
entriesStart,
entriesCount,
entriesSize,
crc32Array);
return true;
}
}
}

View File

@@ -0,0 +1,36 @@
using System;
using System.IO;
namespace SharpCompress.Common.Dmg.Headers
{
internal sealed class GptPartitionEntry : GptStructBase
{
public Guid TypeGuid { get; }
public Guid Guid { get; }
public ulong FirstLba { get; }
public ulong LastLba { get; }
public ulong Attributes { get; }
public string Name { get; }
private GptPartitionEntry(Guid typeGuid, Guid guid, ulong firstLba, ulong lastLba, ulong attributes, string name)
{
TypeGuid = typeGuid;
Guid = guid;
FirstLba = firstLba;
LastLba = lastLba;
Attributes = attributes;
Name = name;
}
public static GptPartitionEntry Read(Stream stream)
{
return new GptPartitionEntry(
ReadGuid(stream),
ReadGuid(stream),
ReadUInt64(stream),
ReadUInt64(stream),
ReadUInt64(stream),
ReadString(stream, 72));
}
}
}

View File

@@ -0,0 +1,56 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Text;
namespace SharpCompress.Common.Dmg.Headers
{
internal abstract class GptStructBase
{
private static readonly byte[] _buffer = new byte[8];
protected static ushort ReadUInt16(Stream stream)
{
if (stream.Read(_buffer, 0, sizeof(ushort)) != sizeof(ushort))
throw new EndOfStreamException();
return BinaryPrimitives.ReadUInt16LittleEndian(_buffer);
}
protected static uint ReadUInt32(Stream stream)
{
if (stream.Read(_buffer, 0, sizeof(uint)) != sizeof(uint))
throw new EndOfStreamException();
return BinaryPrimitives.ReadUInt32LittleEndian(_buffer);
}
protected static ulong ReadUInt64(Stream stream)
{
if (stream.Read(_buffer, 0, sizeof(ulong)) != sizeof(ulong))
throw new EndOfStreamException();
return BinaryPrimitives.ReadUInt64LittleEndian(_buffer);
}
protected static Guid ReadGuid(Stream stream)
{
int a = (int)ReadUInt32(stream);
short b = (short)ReadUInt16(stream);
short c = (short)ReadUInt16(stream);
if (stream.Read(_buffer, 0, 8) != 8)
throw new EndOfStreamException();
return new Guid(a, b, c, _buffer);
}
protected static string ReadString(Stream stream, int byteSize)
{
var buffer = new byte[byteSize];
if (stream.Read(buffer, 0, byteSize) != byteSize)
throw new EndOfStreamException();
return Encoding.Unicode.GetString(buffer).NullTerminate();
}
}
}

View File

@@ -0,0 +1,33 @@
using System;
using System.Collections.Generic;
namespace SharpCompress.Common.Dmg.Headers
{
internal sealed class UdifChecksum : DmgStructBase
{
private const int MaxSize = 32; // * 4 to get byte size
public uint Type { get; }
public uint Size { get; } // in bits
public IReadOnlyList<uint> Bits { get; }
private UdifChecksum(uint type, uint size, IReadOnlyList<uint> bits)
{
Type = type;
Size = size;
Bits = bits;
}
public static UdifChecksum Read(ref ReadOnlySpan<byte> data)
{
uint type = ReadUInt32(ref data);
uint size = ReadUInt32(ref data);
var bits = new uint[MaxSize];
for (int i = 0; i < MaxSize; i++)
bits[i] = ReadUInt32(ref data);
return new UdifChecksum(type, size, bits);
}
}
}

View File

@@ -0,0 +1,14 @@
using System;
namespace SharpCompress.Common.Dmg
{
internal static class PartitionFormat
{
public static readonly Guid AppleHFS = new Guid("48465300-0000-11AA-AA11-00306543ECAC");
public static readonly Guid AppleUFS = new Guid("55465300-0000-11AA-AA11-00306543ECAC");
public static readonly Guid AppleBoot = new Guid("426F6F74-0000-11AA-AA11-00306543ECAC");
public static readonly Guid AppleRaid = new Guid("52414944-0000-11AA-AA11-00306543ECAC");
public static readonly Guid AppleRaidOffline = new Guid("52414944-5F4F-11AA-AA11-00306543ECAC");
public static readonly Guid AppleLabel = new Guid("4C616265-6C00-11AA-AA11-00306543ECAC");
}
}

View File

@@ -75,7 +75,7 @@ namespace SharpCompress.Common
internal abstract IEnumerable<FilePart> Parts { get; }
public bool IsSolid { get; set; }
internal bool IsSolid { get; set; }
internal virtual void Close()
{

View File

@@ -14,25 +14,14 @@ namespace SharpCompress.Common
Action<string, ExtractionOptions?> write)
{
string destinationFileName;
string file = Path.GetFileName(entry.Key);
string fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
//check for trailing slash.
if (fullDestinationDirectoryPath[fullDestinationDirectoryPath.Length - 1] != Path.DirectorySeparatorChar)
{
fullDestinationDirectoryPath += Path.DirectorySeparatorChar;
}
if (!Directory.Exists(fullDestinationDirectoryPath))
{
throw new ExtractionException($"Directory does not exist to extract to: {fullDestinationDirectoryPath}");
}
options ??= new ExtractionOptions()
{
Overwrite = true
};
string file = Path.GetFileName(entry.Key);
if (options.ExtractFullPath)
{
string folder = Path.GetDirectoryName(entry.Key)!;

View File

@@ -14,7 +14,6 @@ namespace SharpCompress.Common
bool IsDirectory { get; }
bool IsEncrypted { get; }
bool IsSplitAfter { get; }
bool IsSolid { get; }
DateTime? LastAccessedTime { get; }
DateTime? LastModifiedTime { get; }
long Size { get; }

View File

@@ -1,4 +1,6 @@
namespace SharpCompress.Common
using System;
namespace SharpCompress.Common
{
public class IncompleteArchiveException : ArchiveException
{
@@ -6,5 +8,10 @@
: base(message)
{
}
public IncompleteArchiveException(string message, Exception inner)
: base(message, inner)
{
}
}
}

View File

@@ -10,7 +10,7 @@ namespace SharpCompress.Common.Rar
/// <summary>
/// As the V2017 port isn't complete, add this check to use the legacy Rar code.
/// </summary>
internal bool IsRarV3 => FileHeader.CompressionAlgorithm == 20 || FileHeader.CompressionAlgorithm == 26 || FileHeader.CompressionAlgorithm == 29 || FileHeader.CompressionAlgorithm == 36; //Nanook - Added 20+26 as Test arc from WinRar2.8 (algo 20) was failing with 2017 code
internal bool IsRarV3 => FileHeader.CompressionAlgorithm == 29 || FileHeader.CompressionAlgorithm == 36;
/// <summary>
/// The File's 32 bit CRC Hash

View File

@@ -1248,7 +1248,7 @@ namespace SharpCompress.Common.SevenZip
if (nextHeaderOffset > _streamEnding - db._startPositionAfterHeader)
{
throw new InvalidOperationException("nextHeaderOffset is invalid");
throw new IndexOutOfRangeException();
}
_stream.Seek(nextHeaderOffset, SeekOrigin.Current);

View File

@@ -30,7 +30,7 @@ namespace SharpCompress.Common.SevenZip
}
}
throw new InvalidOperationException();
throw new Exception();
}
public int GetNumOutStreams()
@@ -185,4 +185,4 @@ namespace SharpCompress.Common.SevenZip
return true;
}
}
}
}

View File

@@ -19,9 +19,11 @@ namespace SharpCompress.Common.Tar.Headers
internal string Name { get; set; }
internal string LinkName { get; set; }
internal long Mode { get; set; }
internal long UserId { get; set; }
internal long GroupId { get; set; }
//internal int Mode { get; set; }
//internal int UserId { get; set; }
//internal string UserName { get; set; }
//internal int GroupId { get; set; }
//internal string GroupName { get; set; }
internal long Size { get; set; }
internal DateTime LastModifiedTime { get; set; }
internal EntryType EntryType { get; set; }
@@ -125,12 +127,9 @@ namespace SharpCompress.Common.Tar.Headers
EntryType = ReadEntryType(buffer);
Size = ReadSize(buffer);
Mode = ReadAsciiInt64Base8(buffer, 100, 7);
if(EntryType == EntryType.Directory)
Mode |= 0b1_000_000_000;
UserId = ReadAsciiInt64Base8(buffer, 108, 7);
GroupId = ReadAsciiInt64Base8(buffer, 116, 7);
//Mode = ReadASCIIInt32Base8(buffer, 100, 7);
//UserId = ReadASCIIInt32Base8(buffer, 108, 7);
//GroupId = ReadASCIIInt32Base8(buffer, 116, 7);
long unixTimeStamp = ReadAsciiInt64Base8(buffer, 136, 11);
LastModifiedTime = EPOCH.AddSeconds(unixTimeStamp).ToLocalTime();

View File

@@ -44,12 +44,6 @@ namespace SharpCompress.Common.Tar
public override bool IsSplitAfter => false;
public long Mode => _filePart.Header.Mode;
public long UserID => _filePart.Header.UserId;
public long GroupId => _filePart.Header.GroupId;
internal override IEnumerable<FilePart> Parts => _filePart.AsEnumerable<FilePart>();
internal static IEnumerable<TarEntry> GetEntries(StreamingMode mode, Stream stream,

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.IO;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -14,7 +14,7 @@ namespace SharpCompress.Common
ReaderOptions = readerOptions;
if (readerOptions.LeaveStreamOpen)
{
stream = NonDisposingStream.Create(stream);
stream = new NonDisposingStream(stream);
}
_actualStream = stream;
}
@@ -48,4 +48,4 @@ namespace SharpCompress.Common
GC.SuppressFinalize(this);
}
}
}
}

View File

@@ -36,7 +36,7 @@ namespace SharpCompress.Common.Zip
// ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR should be before the EOCD
stream.Seek(eocd_location - ZIP64_EOCD_LENGTH - 4, SeekOrigin.Begin);
uint zip64_locator = reader.ReadUInt32();
if (zip64_locator != ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR)
if( zip64_locator != ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR )
{
throw new ArchiveException("Failed to locate the Zip64 Directory Locator");
}
@@ -86,11 +86,11 @@ namespace SharpCompress.Common.Zip
}
}
private static bool IsMatch(byte[] haystack, int position, byte[] needle)
private static bool IsMatch( byte[] haystack, int position, byte[] needle)
{
for (int i = 0; i < needle.Length; i++)
for( int i = 0; i < needle.Length; i++ )
{
if (haystack[position + i] != needle[i])
if( haystack[ position + i ] != needle[ i ] )
{
return false;
}
@@ -117,12 +117,11 @@ namespace SharpCompress.Common.Zip
// Search in reverse
Array.Reverse(seek);
// don't exclude the minimum eocd region, otherwise you fail to locate the header in empty zip files
var max_search_area = len; // - MINIMUM_EOCD_LENGTH;
var max_search_area = len - MINIMUM_EOCD_LENGTH;
for (int pos_from_end = 0; pos_from_end < max_search_area; ++pos_from_end)
for( int pos_from_end = 0; pos_from_end < max_search_area; ++pos_from_end)
{
if (IsMatch(seek, pos_from_end, needle))
if( IsMatch(seek, pos_from_end, needle) )
{
stream.Seek(-pos_from_end, SeekOrigin.End);
return;

View File

@@ -1,4 +1,4 @@
using System.IO;
using System.IO;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors.Deflate;
using SharpCompress.IO;
@@ -28,7 +28,7 @@ namespace SharpCompress.Common.Zip
_decompressionStream = CreateDecompressionStream(GetCryptoStream(CreateBaseStream()), Header.CompressionMethod);
if (LeaveStreamOpen)
{
return NonDisposingStream.Create(_decompressionStream);
return new NonDisposingStream(_decompressionStream);
}
return _decompressionStream;
}
@@ -56,4 +56,4 @@ namespace SharpCompress.Common.Zip
return reader;
}
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Buffers.Binary;
using System.IO;
using System.Linq;
@@ -37,7 +37,7 @@ namespace SharpCompress.Common.Zip
Stream decompressionStream = CreateDecompressionStream(GetCryptoStream(CreateBaseStream()), Header.CompressionMethod);
if (LeaveStreamOpen)
{
return NonDisposingStream.Create(decompressionStream);
return new NonDisposingStream(decompressionStream);
}
return decompressionStream;
}
@@ -142,7 +142,7 @@ namespace SharpCompress.Common.Zip
&& FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor))
|| Header.IsZip64)
{
plainStream = NonDisposingStream.Create(plainStream); //make sure AES doesn't close
plainStream = new NonDisposingStream(plainStream); //make sure AES doesn't close
}
else
{
@@ -174,7 +174,7 @@ namespace SharpCompress.Common.Zip
default:
{
throw new InvalidOperationException("Header.CompressionMethod is invalid");
throw new ArgumentOutOfRangeException();
}
}

View File

@@ -83,7 +83,7 @@ namespace SharpCompress.Compressors.BZip2
stream.SetLength(value);
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !NET461 && !NETSTANDARD2_0
public override int Read(Span<byte> buffer)
{
@@ -123,4 +123,4 @@ namespace SharpCompress.Compressors.BZip2
return true;
}
}
}
}

View File

@@ -1,20 +1,20 @@
// Zlib.cs
// ------------------------------------------------------------------
//
// Copyright (c) 2009 Dino Chiesa and Microsoft Corporation.
// Copyright (c) 2009 Dino Chiesa and Microsoft Corporation.
// All rights reserved.
//
// This code module is part of DotNetZip, a zipfile class library.
//
// ------------------------------------------------------------------
//
// This code is licensed under the Microsoft Public License.
// This code is licensed under the Microsoft Public License.
// See the file License.txt for the license details.
// More info on: http://dotnetzip.codeplex.com
//
// ------------------------------------------------------------------
//
// last saved (in emacs):
// last saved (in emacs):
// Time-stamp: <2009-November-07 05:26:55>
//
// ------------------------------------------------------------------
@@ -27,22 +27,22 @@
// included below.
//
// ------------------------------------------------------------------
//
//
// Copyright (c) 2000,2001,2002,2003 ymnk, JCraft,Inc. All rights reserved.
//
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in
// the documentation and/or other materials provided with the distribution.
//
//
// 3. The names of the authors may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
//
// THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES,
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
// FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JCRAFT,
@@ -53,7 +53,7 @@
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
//
// -----------------------------------------------------------------------
//
// This program is based on zlib-1.1.3; credit to authors
@@ -82,7 +82,7 @@ namespace SharpCompress.Compressors.Deflate
/// <summary>
/// Same as None.
/// </summary>
Level0 = None,
Level0 = 0,
/// <summary>
/// The fastest but least effective compression.
@@ -92,7 +92,7 @@ namespace SharpCompress.Compressors.Deflate
/// <summary>
/// A synonym for BestSpeed.
/// </summary>
Level1 = BestSpeed,
Level1 = 1,
/// <summary>
/// A little slower, but better, than level 1.
@@ -115,14 +115,14 @@ namespace SharpCompress.Compressors.Deflate
Level5 = 5,
/// <summary>
/// The default compression level, with a good balance of speed and compression efficiency.
/// The default compression level, with a good balance of speed and compression efficiency.
/// </summary>
Default = 6,
/// <summary>
/// A synonym for Default.
/// </summary>
Level6 = Default,
Level6 = 6,
/// <summary>
/// Pretty good compression!
@@ -135,7 +135,7 @@ namespace SharpCompress.Compressors.Deflate
Level8 = 8,
/// <summary>
/// The "best" compression, where best means greatest reduction in size of the input data stream.
/// The "best" compression, where best means greatest reduction in size of the input data stream.
/// This is also the slowest compression.
/// </summary>
BestCompression = 9,
@@ -143,7 +143,7 @@ namespace SharpCompress.Compressors.Deflate
/// <summary>
/// A synonym for BestCompression.
/// </summary>
Level9 = BestCompression
Level9 = 9
}
/// <summary>
@@ -154,7 +154,7 @@ namespace SharpCompress.Compressors.Deflate
public enum CompressionStrategy
{
/// <summary>
/// The default strategy is probably the best for normal data.
/// The default strategy is probably the best for normal data.
/// </summary>
Default = 0,
@@ -181,7 +181,7 @@ namespace SharpCompress.Compressors.Deflate
{
/// <summary>
/// The ZlibException class captures exception information generated
/// by the Zlib library.
/// by the Zlib library.
/// </summary>
public ZlibException()
{
@@ -233,7 +233,7 @@ namespace SharpCompress.Compressors.Deflate
/// <param name="target">Contains the array of characteres read from the source TextReader.</param>
/// <param name="start">The starting index of the target array.</param>
/// <param name="count">The maximum number of characters to read from the source TextReader.</param>
///
///
/// <returns>
/// The number of characters read. The number will be less than or equal to
/// count depending on the data available in the source TextReader. Returns -1
@@ -405,4 +405,4 @@ namespace SharpCompress.Compressors.Deflate
BitLengths = new StaticTree(null, extra_blbits, 0, InternalConstants.BL_CODES, InternalConstants.MAX_BL_BITS);
}
}
}
}

View File

@@ -502,37 +502,13 @@ namespace SharpCompress.Compressors.Deflate
throw new ZlibException("Cannot Read after Writing.");
}
int rc = 0;
// set up the output of the deflate/inflate codec:
_z.OutputBuffer = buffer;
_z.NextOut = offset;
_z.AvailableBytesOut = count;
if (count == 0)
{
return 0;
}
if (nomoreinput && _wantCompress)
{
// no more input data available; therefore we flush to
// try to complete the read
rc = _z.Deflate(FlushType.Finish);
if (rc != ZlibConstants.Z_OK && rc != ZlibConstants.Z_STREAM_END)
{
throw new ZlibException(String.Format("Deflating: rc={0} msg={1}", rc, _z.Message));
}
rc = (count - _z.AvailableBytesOut);
// calculate CRC after reading
if (crc != null)
{
crc.SlurpBlock(buffer, offset, rc);
}
return rc;
return 0; // workitem 8557
}
if (buffer is null)
{
@@ -551,6 +527,13 @@ namespace SharpCompress.Compressors.Deflate
throw new ArgumentOutOfRangeException(nameof(count));
}
int rc = 0;
// set up the output of the deflate/inflate codec:
_z.OutputBuffer = buffer;
_z.NextOut = offset;
_z.AvailableBytesOut = count;
// This is necessary in case _workingBuffer has been resized. (new byte[])
// (The first reference to _workingBuffer goes through the private accessor which
// may initialize it.)

View File

@@ -1,297 +0,0 @@
/*
* BranchExecFilter.cs -- Converters for executable
* <Contribution by Louis-Michel Bergeron, on behalf of aDolus Technolog Inc., 2022>
* @TODO Encoding
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SharpCompress.Compressors.Filters
{
[CLSCompliant(false)]
public sealed class BranchExecFilter
{
public enum Alignment : int
{
ARCH_x86_ALIGNMENT = 1,
ARCH_PowerPC_ALIGNMENT = 4,
ARCH_IA64_ALIGNMENT = 16,
ARCH_ARM_ALIGNMENT = 4,
ARCH_ARMTHUMB_ALIGNMENT = 2,
ARCH_SPARC_ALIGNMENT = 4,
}
public static void X86Converter(byte[] data, UInt32 ip, ref UInt32 state) {
long i = 0;
long size = data.Length;
UInt32 pos = 0;
UInt32 mask = state & 7;
if (size < 5)
return;
size -= 4;
ip += 5;
for (;;)
{
i = pos;
for (; i < size; i++)
{
if ((data[i] & 0xFE) == 0xE8)
{
break;
}
}
UInt32 d = (UInt32)(i) - pos;
pos = (UInt32)i;
if (i >= size)
{
state = (d > 2 ? 0 : mask >> (int)d);
return;
}
if (d > 2)
{
mask = 0;
}
else
{
mask >>= (int)d;
if (mask != 0 && (mask > 4 || mask == 3 || (((((data[(UInt32)(mask >> 1) + 1])) + 1) & 0xFE) == 0) ))
{
mask = (mask >> 1) | 4;
pos++;
continue;
}
}
if ((((data[i + 4]) + 1) & 0xFE) == 0)
{
UInt32 inst = ((UInt32)data[i + 4] << 24) | ((UInt32)data[i + 3] << 16) | ((UInt32)data[i + 2] << 8) | ((UInt32)data[i + 1]);
UInt32 cur = ip + (UInt32)pos;
pos += 5;
inst -= cur;
if (mask != 0)
{
UInt32 sh = (mask & 6) << 2;
if (((((((Byte)(inst >> (int)sh))) + 1) & 0xFE) == 0))
{
inst ^= (((UInt32)0x100 << (int)sh) - 1);
inst -= cur;
}
mask = 0;
}
data[i + 1] = (Byte)inst;
data[i + 2] = (Byte)(inst >> 8);
data[i + 3] = (Byte)(inst >> 16);
data[i + 4] = (Byte)(0 - ((inst >> 24) & 1));
}
else
{
mask = (mask >> 1) | 4;
pos++;
}
}
}
public static void PowerPCConverter(byte[] data, UInt32 ip)
{
long i = 0;
long size = data.Length;
size &= ~(UInt32)3;
ip -= 4;
for (;; ) // infinite loop
{
for (;; ) // infinite loop
{
if (i >= size)
return;
i += 4;
if ((data[i - 4] & 0xFC) == 0x48 && (data[i - 1] & 3) == 1)
break;
}
{
UInt32 inst = BitConverter.ToUInt32(data, (int)i - 4);
if (BitConverter.IsLittleEndian)
{
inst = Utility.SwapUINT32(inst);
}
inst -= (UInt32)(ip + i);
inst &= 0x03FFFFFF;
inst |= 0x48000000;
Utility.SetBigUInt32(ref data, inst, (i - 4));
}
}
}
public static void ARMConverter(byte[] data, UInt32 ip)
{
long i = 0;
long size = data.Length;
size &= ~(UInt32)3;
ip += 4;
for (;;) // infinite loop
{
for (;;) // infinite loop
{
if (i >= size)
{
return;
}
i += 4;
if (data[i - 1] == 0xEB)
break;
}
UInt32 inst = BitConverter.ToUInt32(data, (int)i - 4);
inst <<= 2;
inst -= (UInt32)(ip + i);
inst >>= 2;
inst &= 0x00FFFFFF;
inst |= 0xEB000000;
Utility.SetLittleUInt32(ref data, inst, i - 4);
}
}
public static void ARMTConverter(byte[] data, UInt32 ip)
{
long i = 0;
long size = data.Length;
size &= ~(UInt32)1;
long lim = size - 4;
for (;;)
{
UInt32 b1;
for (;;)
{
UInt32 b3;
if (i > lim)
return;
b1 = data[i + 1];
b3 = data[i + 3];
i += 2;
b1 ^= 8;
if ((b3 & b1) >= 0xF8)
break;
}
UInt32 inst = ((UInt32)b1 << 19)
+ (((UInt32)data[i + 1] & 0x7) << 8)
+ (((UInt32)data[i - 2] << 11))
+ (data[i]);
i += 2;
UInt32 cur = ((UInt32)(ip + i)) >> 1;
inst -= cur;
data[i - 4] = (Byte)(inst >> 11);
data[i - 3] = (Byte)(0xF0 | ((inst >> 19) & 0x7));
data[i - 2] = (Byte)inst;
data[i - 1] = (Byte)(0xF8 | (inst >> 8));
}
}
public static void IA64Converter(byte[] data, UInt32 ip)
{
UInt32 i = 0;
long size = data.Length;
if (size < 16)
throw new InvalidDataException("Unexpected data size");
size -= 16;
do
{
UInt32 m = ((UInt32)0x334B0000 >> (data[i] & 0x1E)) & 3;
if (m != 0)
{
m++;
do
{
UInt32 iterator = (UInt32)( (i + (m * 5) - 8));
if (((data[iterator + 3] >> (int)m) & 15) == 5
&& (((data[iterator - 1] | ((UInt32)data[iterator] << 8)) >> (int)m) & 0x70) == 0)
{
UInt32 raw = BitConverter.ToUInt32(data, (int)iterator);
UInt32 inst = raw >> (int)m;
inst = (inst & 0xFFFFF) | ((inst & (1 << 23)) >> 3);
inst <<= 4;
inst -= (ip + (UInt32)i);
inst >>= 4;
inst &= 0x1FFFFF;
inst += 0x700000;
inst &= 0x8FFFFF;
raw &= ~((UInt32)0x8FFFFF << (int)m);
raw |= (inst << (int)m);
Utility.SetLittleUInt32(ref data, raw, iterator);
}
}
while (++m <= 4);
}
i += 16;
}
while (i <= size);
return;
}
public static void SPARCConverter(byte[] data, UInt32 ip)
{
long i = 0;
long size = data.Length;
size &= ~(UInt32)3;
ip -= 4;
for (;;) // infinite loop
{
for (;;) // infinite loop
{
if (i >= size)
return;
i += 4;
if ((data[i - 4] == 0x40 && (data[i - 3] & 0xC0) == 0) ||
(data[i - 4] == 0x7F && (data[i - 3] >= 0xC0)))
break;
}
UInt32 inst = BitConverter.ToUInt32(data, (int)i - 4);
if (BitConverter.IsLittleEndian)
{
inst = Utility.SwapUINT32(inst);
}
inst <<= 2;
inst -= (UInt32)(ip + i);
inst &= 0x01FFFFFF;
inst -= (UInt32)1 << 24;
inst ^= 0xFF000000;
inst >>= 2;
inst |= 0x40000000;
Utility.SetBigUInt32(ref data, inst, (i - 4));
}
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.IO;
using System.Security.Cryptography;
using System.Text;
@@ -20,11 +20,6 @@ namespace SharpCompress.Compressors.LZMA
public AesDecoderStream(Stream input, byte[] info, IPasswordProvider pass, long limit)
{
if (pass.CryptoGetTextPassword() == null)
{
throw new SharpCompress.Common.CryptographicException("Encrypted 7Zip archive has no password specified.");
}
mStream = input;
mLimit = limit;
@@ -148,8 +143,8 @@ namespace SharpCompress.Compressors.LZMA
if ((bt & 0xC0) == 0)
{
salt = Array.Empty<byte>();
iv = Array.Empty<byte>();
salt = new byte[0];
iv = new byte[0];
return;
}

View File

@@ -103,7 +103,7 @@ namespace SharpCompress.Compressors.LZMA.LZ
{
if (historySize > K_MAX_VAL_FOR_NORMALIZE - 256)
{
throw new ArgumentOutOfRangeException(nameof(historySize));
throw new Exception();
}
_cutValue = 16 + (matchMaxLen >> 1);
@@ -423,4 +423,4 @@ namespace SharpCompress.Compressors.LZMA.LZ
_cutValue = cutValue;
}
}
}
}

View File

@@ -118,7 +118,7 @@ namespace SharpCompress.Compressors.LZMA
public override void SetLength(long value) => throw new NotImplementedException();
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !NET461 && !NETSTANDARD2_0
public override int Read(Span<byte> buffer)
{

View File

@@ -1649,7 +1649,7 @@ namespace SharpCompress.Compressors.LZMA
{
for (int m = 0; m < K_MATCH_FINDER_I_DS.Length; m++)
{
if (string.Equals(s, K_MATCH_FINDER_I_DS[m], StringComparison.OrdinalIgnoreCase))
if (s == K_MATCH_FINDER_I_DS[m])
{
return m;
}
@@ -1696,7 +1696,7 @@ namespace SharpCompress.Compressors.LZMA
throw new InvalidParamException();
}
EMatchFinderType matchFinderIndexPrev = _matchFinderType;
int m = FindMatchFinder(((string)prop));
int m = FindMatchFinder(((string)prop).ToUpper());
if (m < 0)
{
throw new InvalidParamException();

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Diagnostics;
using System.IO;
@@ -21,17 +21,16 @@ namespace SharpCompress.Compressors.LZMA.Utilites
protected override void Dispose(bool disposing)
{
//Nanook - is not equal here - _mCurrentCrc is yet to be negated
//if (_mCurrentCrc != _mExpectedCrc)
//{
// throw new InvalidOperationException();
//}
if (_mCurrentCrc != _mExpectedCrc)
{
throw new InvalidOperationException();
}
try
{
if (disposing && !_mClosed)
{
_mClosed = true;
_mCurrentCrc = Crc.Finish(_mCurrentCrc); //now becomes equal
_mCurrentCrc = Crc.Finish(_mCurrentCrc);
#if DEBUG
if (_mCurrentCrc == _mExpectedCrc)
{
@@ -55,10 +54,6 @@ namespace SharpCompress.Compressors.LZMA.Utilites
}
Debug.WriteLine("entropy: " + (int)(entropy * 100) + "%");
#endif
if (_mCurrentCrc != _mExpectedCrc) //moved test to here
{
throw new InvalidOperationException();
}
}
}
finally
@@ -107,4 +102,4 @@ namespace SharpCompress.Compressors.LZMA.Utilites
_mCurrentCrc = Crc.Update(_mCurrentCrc, buffer, offset, count);
}
}
}
}

View File

@@ -50,7 +50,7 @@ namespace SharpCompress.Compressors.LZMA.Utilites
Debugger.Break();
}
throw new InvalidOperationException("Assertion failed.");
throw new Exception("Assertion failed.");
}
}
@@ -89,4 +89,4 @@ namespace SharpCompress.Compressors.LZMA.Utilites
}
}
}
}
}

View File

@@ -83,7 +83,7 @@ namespace SharpCompress.Compressors.PPMd.H
internal void AriDecNormalize()
{
// while ((low ^ (low + range)) < TOP || range < BOT && ((range = -low & (BOT - 1)) != 0 ? true : true))
// while ((low ^ (low + range)) < TOP || range < BOT && ((range = -low & (BOT - 1)) != 0 ? true : true))
// {
// code = ((code << 8) | unpackRead.getChar()&0xff)&uintMask;
// range = (range << 8)&uintMask;
@@ -118,7 +118,7 @@ namespace SharpCompress.Compressors.PPMd.H
buffer.Append(_range);
buffer.Append("\n subrange=");
buffer.Append(SubRange);
buffer.Append(']');
buffer.Append("]");
return buffer.ToString();
}
}
@@ -150,8 +150,8 @@ namespace SharpCompress.Compressors.PPMd.H
buffer.Append(_highCount);
buffer.Append("\n scale=");
buffer.Append(_scale);
buffer.Append(']');
buffer.Append("]");
return buffer.ToString();
}
}
}
}

View File

@@ -1,4 +1,4 @@
#nullable disable
#nullable disable
using System;
using System.IO;
@@ -23,7 +23,6 @@ namespace SharpCompress.Compressors.Rar
private int outCount;
private int outTotal;
private bool isDisposed;
private long _position;
public RarStream(IRarUnpack unpack, FileHeader fileHeader, Stream readStream)
{
@@ -33,7 +32,6 @@ namespace SharpCompress.Compressors.Rar
fetch = true;
unpack.DoUnpack(fileHeader, readStream, this);
fetch = false;
_position = 0;
}
protected override void Dispose(bool disposing)
@@ -58,8 +56,7 @@ namespace SharpCompress.Compressors.Rar
public override long Length => fileHeader.UncompressedSize;
//commented out code always returned the length of the file
public override long Position { get => _position; /* fileHeader.UncompressedSize - unpack.DestSize;*/ set => throw new NotSupportedException(); }
public override long Position { get => fileHeader.UncompressedSize - unpack.DestSize; set => throw new NotSupportedException(); }
public override int Read(byte[] buffer, int offset, int count)
{
@@ -83,7 +80,6 @@ namespace SharpCompress.Compressors.Rar
unpack.DoUnpack();
fetch = false;
}
_position += (long)outTotal;
return outTotal;
}
@@ -133,4 +129,4 @@ namespace SharpCompress.Compressors.Rar
}
}
}
}
}

View File

@@ -1,68 +0,0 @@
/*
* ArmFilter.cs -- XZ converter ARM executable
* <Contribution by Louis-Michel Bergeron, on behalf of aDolus Technolog Inc., 2022>
* @TODO Properties offset
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Compressors.Filters;
namespace SharpCompress.Compressors.Xz.Filters
{
internal class ArmFilter : BlockFilter
{
public override bool AllowAsLast => false;
public override bool AllowAsNonLast => true;
public override bool ChangesDataSize => false;
private UInt32 _ip = 0;
//private UInt32 _offset = 0;
public override void Init(byte[] properties)
{
if (properties.Length != 0 && properties.Length != 4)
{
throw new InvalidDataException("ARM properties unexpected length");
}
if (properties.Length == 4)
{
// Even XZ doesn't support it.
throw new InvalidDataException("ARM properties offset is not supported");
//_offset = BitConverter.ToUInt32(properties, 0);
//
//if (_offset % (UInt32)BranchExec.Alignment.ARCH_ARM_ALIGNMENT != 0)
//{
// throw new InvalidDataException("Filter offset does not match alignment");
//}
}
}
public override void ValidateFilter()
{
}
public override int Read(byte[] buffer, int offset, int count)
{
int bytesRead = BaseStream.Read(buffer, offset, count);
BranchExecFilter.ARMConverter(buffer, _ip);
_ip += (UInt32)bytesRead;
return bytesRead;
}
public override void SetBaseStream(Stream stream)
{
BaseStream = stream;
}
}
}

View File

@@ -1,69 +0,0 @@
/*
* ArmFThumbFilter.cs -- XZ converter ARMThumb executable
* <Contribution by Louis-Michel Bergeron, on behalf of aDolus Technolog Inc., 2022>
* @TODO Properties offset
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Compressors.Filters;
namespace SharpCompress.Compressors.Xz.Filters
{
internal class ArmThumbFilter : BlockFilter
{
public override bool AllowAsLast => false;
public override bool AllowAsNonLast => true;
public override bool ChangesDataSize => false;
private UInt32 _ip = 0;
//private UInt32 _offset = 0;
public override void Init(byte[] properties)
{
if (properties.Length != 0 && properties.Length != 4)
{
throw new InvalidDataException("ARM Thumb properties unexpected length");
}
if (properties.Length == 4)
{
// Even XZ doesn't support it.
throw new InvalidDataException("ARM Thumb properties offset is not supported");
//_offset = BitConverter.ToUInt32(properties, 0);
//
//if (_offset % (UInt32)BranchExec.Alignment.ARCH_ARMTHUMB_ALIGNMENT != 0)
//{
// throw new InvalidDataException("Filter offset does not match alignment");
//}
}
}
public override void ValidateFilter()
{
}
public override int Read(byte[] buffer, int offset, int count)
{
int bytesRead = BaseStream.Read(buffer, offset, count);
BranchExecFilter.ARMTConverter(buffer, _ip);
_ip += (UInt32)bytesRead;
return bytesRead;
}
public override void SetBaseStream(Stream stream)
{
BaseStream = stream;
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
@@ -20,12 +20,6 @@ namespace SharpCompress.Compressors.Xz.Filters
private static readonly Dictionary<FilterTypes, Type> FilterMap = new Dictionary<FilterTypes, Type>
{
{FilterTypes.ARCH_x86_FILTER, typeof(X86Filter) },
{FilterTypes.ARCH_PowerPC_FILTER, typeof(PowerPCFilter) },
{FilterTypes.ARCH_IA64_FILTER, typeof(IA64Filter) },
{FilterTypes.ARCH_ARM_FILTER, typeof(ArmFilter) },
{FilterTypes.ARCH_ARMTHUMB_FILTER, typeof(ArmThumbFilter) },
{FilterTypes.ARCH_SPARC_FILTER, typeof(SparcFilter) },
{FilterTypes.LZMA2, typeof(Lzma2Filter) }
};

View File

@@ -1,67 +0,0 @@
/*
* IA64Filter.cs -- XZ converter IA64 executable
* <Contribution by Louis-Michel Bergeron, on behalf of aDolus Technolog Inc., 2022>
* @TODO Properties offset
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Compressors.Filters;
namespace SharpCompress.Compressors.Xz.Filters
{
internal class IA64Filter : BlockFilter
{
public override bool AllowAsLast => false;
public override bool AllowAsNonLast => true;
public override bool ChangesDataSize => false;
private UInt32 _ip = 0;
//private UInt32 _offset = 0;
public override void Init(byte[] properties)
{
if (properties.Length != 0 && properties.Length != 4)
{
throw new InvalidDataException("IA64 properties unexpected length");
}
if (properties.Length == 4)
{
// Even XZ doesn't support it.
throw new InvalidDataException("IA64 properties offset is not supported");
//_offset = BitConverter.ToUInt32(properties, 0);
//
//if (_offset % (UInt32)BranchExec.Alignment.ARCH_IA64_ALIGNMENT != 0)
//{
// throw new InvalidDataException("Filter offset does not match alignment");
//}
}
}
public override void ValidateFilter()
{
}
public override int Read(byte[] buffer, int offset, int count)
{
int bytesRead = BaseStream.Read(buffer, offset, count);
BranchExecFilter.IA64Converter(buffer, _ip);
_ip += (UInt32)bytesRead;
return bytesRead;
}
public override void SetBaseStream(Stream stream)
{
BaseStream = stream;
}
}
}

View File

@@ -1,66 +0,0 @@
/*
* PowerPCFilter.cs -- XZ converter PowerPC executable
* <Contribution by Louis-Michel Bergeron, on behalf of aDolus Technolog Inc., 2022>
* @TODO Properties offset
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Compressors.Filters;
namespace SharpCompress.Compressors.Xz.Filters
{
internal class PowerPCFilter : BlockFilter
{
public override bool AllowAsLast => false;
public override bool AllowAsNonLast => true;
public override bool ChangesDataSize => false;
private UInt32 _ip = 0;
//private UInt32 _offset = 0;
public override void Init(byte[] properties)
{
if (properties.Length != 0 && properties.Length != 4)
{
throw new InvalidDataException("PPC properties unexpected length");
}
if (properties.Length == 4)
{
// Even XZ doesn't support it.
throw new InvalidDataException("PPC properties offset is not supported");
//_offset = BitConverter.ToUInt32(properties, 0);
//
//if (_offset % (UInt32)BranchExec.Alignment.ARCH_PowerPC_ALIGNMENT != 0)
//{
// throw new InvalidDataException("Filter offset does not match alignment");
//}
}
}
public override void ValidateFilter() { }
public override int Read(byte[] buffer, int offset, int count)
{
int bytesRead = BaseStream.Read(buffer, offset, count);
BranchExecFilter.PowerPCConverter(buffer, _ip);
_ip += (UInt32)bytesRead;
return bytesRead;
}
public override void SetBaseStream(Stream stream)
{
BaseStream = stream;
}
}
}

View File

@@ -1,65 +0,0 @@
/*
* SparcFilter.cs -- XZ converter SPARC executable
* <Contribution by Louis-Michel Bergeron, on behalf of aDolus Technolog Inc., 2022>
* @TODO Properties offset
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Compressors.Filters;
namespace SharpCompress.Compressors.Xz.Filters
{
internal class SparcFilter : BlockFilter
{
public override bool AllowAsLast => false;
public override bool AllowAsNonLast => true;
public override bool ChangesDataSize => false;
private UInt32 _ip = 0;
//private UInt32 _offset = 0;
public override void Init(byte[] properties) {
if (properties.Length != 0 && properties.Length != 4)
{
throw new InvalidDataException("SPARC properties unexpected length");
}
if (properties.Length == 4)
{
// Even XZ doesn't support it.
throw new InvalidDataException("SPARC properties offset is not supported");
//_offset = BitConverter.ToUInt32(properties, 0);
//
//if (_offset % (UInt32)BranchExec.Alignment.ARCH_SPARC_ALIGNMENT != 0)
//{
// throw new InvalidDataException("Filter offset does not match alignment");
//}
}
}
public override void ValidateFilter() {
}
public override int Read(byte[] buffer, int offset, int count){
int bytesRead = BaseStream.Read(buffer, offset, count);
BranchExecFilter.SPARCConverter(buffer, _ip);
_ip += (UInt32)bytesRead;
return bytesRead;
}
public override void SetBaseStream(Stream stream) {
BaseStream = stream;
}
}
}

View File

@@ -1,70 +0,0 @@
/*
* X86Filter.cs -- XZ converter x86 executable
* <Contribution by Louis-Michel Bergeron, on behalf of aDolus Technolog Inc., 2022>
* @TODO Properties offset
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Compressors.Filters;
namespace SharpCompress.Compressors.Xz.Filters
{
internal class X86Filter : BlockFilter
{
public override bool AllowAsLast => false;
public override bool AllowAsNonLast => true;
public override bool ChangesDataSize => false;
private UInt32 _ip = 0;
private UInt32 _state = 0;
//private UInt32 _offset = 0;
public override void Init(byte[] properties)
{
if (properties.Length != 0 && properties.Length != 4)
{
throw new InvalidDataException("X86 properties unexpected length");
}
if (properties.Length == 4)
{
// Even XZ doesn't support it.
throw new InvalidDataException("X86 properties offset is not supported");
//_offset = BitConverter.ToUInt32(properties, 0);
//
//if (_offset % (UInt32)BranchExec.Alignment.ARCH_x86_ALIGNMENT != 0)
//{
// throw new InvalidDataException("Filter offset does not match alignment");
//}
}
}
public override void ValidateFilter()
{
}
public override int Read(byte[] buffer, int offset, int count)
{
int bytesRead = BaseStream.Read(buffer, offset, count);
BranchExecFilter.X86Converter(buffer, _ip, ref _state);
_ip += (UInt32)bytesRead;
return bytesRead;
}
public override void SetBaseStream(Stream stream)
{
BaseStream = stream;
}
}
}

View File

@@ -9,7 +9,7 @@ namespace SharpCompress.Compressors.Xz
{
if (MaxBytes <= 0)
{
throw new ArgumentOutOfRangeException(nameof(MaxBytes));
throw new ArgumentOutOfRangeException();
}
if (MaxBytes > 9)

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.IO;
using System.Linq;
using System.Text;
@@ -22,7 +22,7 @@ namespace SharpCompress.Compressors.Xz
public static XZFooter FromStream(Stream stream)
{
var footer = new XZFooter(new BinaryReader(NonDisposingStream.Create(stream), Encoding.UTF8));
var footer = new XZFooter(new BinaryReader(new NonDisposingStream(stream), Encoding.UTF8));
footer.Process();
return footer;
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.IO;
using System.Linq;
using System.Text;
@@ -21,7 +21,7 @@ namespace SharpCompress.Compressors.Xz
public static XZHeader FromStream(Stream stream)
{
var header = new XZHeader(new BinaryReader(NonDisposingStream.Create(stream), Encoding.UTF8));
var header = new XZHeader(new BinaryReader(new NonDisposingStream(stream), Encoding.UTF8));
header.Process();
return header;
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -30,7 +30,7 @@ namespace SharpCompress.Compressors.Xz
public static XZIndex FromStream(Stream stream, bool indexMarkerAlreadyVerified)
{
var index = new XZIndex(new BinaryReader(NonDisposingStream.Create(stream), Encoding.UTF8), indexMarkerAlreadyVerified);
var index = new XZIndex(new BinaryReader(new NonDisposingStream(stream), Encoding.UTF8), indexMarkerAlreadyVerified);
index.Process();
return index;
}

View File

@@ -42,7 +42,7 @@ namespace SharpCompress.Crypto
public override void SetLength(long value) => throw new NotSupportedException();
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !NET461 && !NETSTANDARD2_0
public override void Write(ReadOnlySpan<byte> buffer)
{

View File

@@ -0,0 +1,172 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
namespace SharpCompress.IO
{
internal class CompositeStream : Stream
{
private readonly IReadOnlyList<Stream> _streams;
private long _pos;
private int _streamIndex;
private long _streamPos;
public override bool CanRead { get; }
public override bool CanWrite { get; }
public override bool CanSeek { get; }
public override long Length { get; }
public override long Position
{
get => _pos;
set
{
if (!CanSeek) throw new NotSupportedException();
if ((value < 0) || (value > Length)) throw new ArgumentOutOfRangeException(nameof(value));
_pos = value;
_streamIndex = -1;
long offset = _pos;
for (int i = 0; i < _streams.Count; i++)
{
var stream = _streams[i];
if (offset < stream.Length)
{
_streamIndex = i;
_streamPos = offset;
break;
}
else
{
offset -= stream.Length;
}
}
}
}
public CompositeStream(IReadOnlyList<Stream> streams)
{
CanRead = true;
CanWrite = false;
CanSeek = true;
Length = 0;
_pos = 0;
_streamIndex = 0;
_streamPos = 0;
_streams = streams;
foreach (var stream in _streams)
{
if (!stream.CanRead) throw new ArgumentException("All streams must be readable");
if (!stream.CanSeek) CanSeek = false;
Length += stream.Length;
}
}
public CompositeStream(IEnumerable<Stream> streams)
: this((IReadOnlyList<Stream>)streams.ToArray())
{ }
public CompositeStream(params Stream[] streams)
: this((IReadOnlyList<Stream>)streams)
{ }
public override int Read(byte[] buffer, int offset, int count)
{
Stream? GetCurrent()
{
if ((_streamIndex < 0) || (_streamIndex >= _streams.Count)) return null;
else return _streams[_streamIndex];
}
if (CanSeek)
{
var stream = GetCurrent();
if (stream is null) return 0;
stream.Position = _streamPos;
int readCount = stream.Read(buffer, offset, count);
_pos += readCount;
_streamPos += readCount;
while (readCount < count)
{
_streamIndex++;
stream = GetCurrent();
if (stream is null) return readCount;
stream.Position = _streamPos = 0;
int rc = stream.Read(buffer, offset + readCount, count - readCount);
readCount += rc;
_pos += rc;
_streamPos += rc;
}
return readCount;
}
else
{
var stream = GetCurrent();
if (stream is null) return 0;
int readCount = stream.Read(buffer, offset, count);
_pos += readCount;
while (readCount < count)
{
_streamIndex++;
stream = GetCurrent();
if (stream is null) return readCount;
int rc = stream.Read(buffer, offset + readCount, count - readCount);
readCount += rc;
_pos += rc;
}
return readCount;
}
}
public override long Seek(long offset, SeekOrigin origin)
{
if (CanSeek)
{
long ClampPos(long value) => Math.Min(Math.Max(value, 0), Length);
switch (origin)
{
case SeekOrigin.Begin:
Position = ClampPos(offset);
break;
case SeekOrigin.Current:
Position = ClampPos(Position + offset);
break;
case SeekOrigin.End:
Position = ClampPos(Length - offset);
break;
}
}
return Position;
}
public override void Flush()
{ }
public override void Write(byte[] buffer, int offset, int count)
=> throw new NotSupportedException();
public override void SetLength(long value)
=> throw new NotSupportedException();
protected override void Dispose(bool disposing)
{
foreach (var stream in _streams)
stream.Dispose();
}
}
}

View File

@@ -0,0 +1,81 @@
using System;
using System.IO;
namespace SharpCompress.IO
{
internal class ConstantStream : Stream
{
private long _length;
private long _pos;
public byte Value { get; set; }
public override bool CanRead => true;
public override bool CanWrite => false;
public override bool CanSeek => true;
public override long Length => _length;
public override long Position
{
get => _pos;
set
{
if ((value < 0) || (value > Length)) throw new ArgumentOutOfRangeException(nameof(value));
_pos = value;
}
}
public ConstantStream(byte value, long length)
{
Value = value;
_length = length;
_pos = 0;
}
private long ClampPos(long value) => Math.Min(Math.Max(value, 0), Length);
public override void Flush()
{ }
public override int Read(byte[] buffer, int offset, int count)
{
count = (int)Math.Min(count, Length - Position);
for (int i = 0; i < count; i++)
buffer[i + offset] = Value;
Position += count;
return count;
}
public override long Seek(long offset, SeekOrigin origin)
{
switch (origin)
{
case SeekOrigin.Begin:
Position = ClampPos(offset);
break;
case SeekOrigin.Current:
Position = ClampPos(Position + offset);
break;
case SeekOrigin.End:
Position = ClampPos(Length - offset);
break;
}
return Position;
}
public override void SetLength(long value)
{
_length = value;
Position = ClampPos(Position);
}
public override void Write(byte[] buffer, int offset, int count)
=> throw new NotSupportedException();
protected override void Dispose(bool disposing)
{ }
}
}

View File

@@ -1,20 +1,11 @@
using System;
using System;
using System.IO;
namespace SharpCompress.IO
{
public class NonDisposingStream : Stream
{
public static NonDisposingStream Create(Stream stream, bool throwOnDispose = false)
{
if (stream is NonDisposingStream nonDisposingStream && nonDisposingStream.ThrowOnDispose == throwOnDispose)
{
return nonDisposingStream;
}
return new NonDisposingStream(stream, throwOnDispose);
}
protected NonDisposingStream(Stream stream, bool throwOnDispose = false)
public NonDisposingStream(Stream stream, bool throwOnDispose = false)
{
Stream = stream;
ThrowOnDispose = throwOnDispose;
@@ -67,7 +58,7 @@ namespace SharpCompress.IO
Stream.Write(buffer, offset, count);
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !NET461 && !NETSTANDARD2_0
public override int Read(Span<byte> buffer)
{
@@ -81,4 +72,4 @@ namespace SharpCompress.IO
#endif
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.IO;
namespace SharpCompress.IO
@@ -119,10 +119,8 @@ namespace SharpCompress.IO
int read;
if (isRewound && bufferStream.Position != bufferStream.Length)
{
// don't read more than left
int readCount = Math.Min(count, (int)(bufferStream.Length - bufferStream.Position));
read = bufferStream.Read(buffer, offset, readCount);
if (read < readCount)
read = bufferStream.Read(buffer, offset, count);
if (read < count)
{
int tempRead = stream.Read(buffer, offset + read, count - read);
if (IsRecording)
@@ -162,4 +160,4 @@ namespace SharpCompress.IO
throw new NotSupportedException();
}
}
}
}

View File

@@ -0,0 +1,79 @@
using System;
using System.IO;
namespace SharpCompress.IO
{
internal class SeekableSubStream : NonDisposingStream
{
private readonly long _origin;
private long _pos;
public override bool CanRead => true;
public override bool CanWrite => false;
public override bool CanSeek => true;
public override long Length { get; }
public override long Position
{
get => _pos;
set
{
if ((value < 0) || (value > Length)) throw new ArgumentOutOfRangeException(nameof(value));
_pos = value;
}
}
public SeekableSubStream(Stream stream, long origin, long length)
: base(stream, false)
{
if (!stream.CanRead) throw new ArgumentException("Requires a readable stream", nameof(stream));
if (!stream.CanSeek) throw new ArgumentException("Requires a seekable stream", nameof(stream));
_origin = origin;
Length = length;
_pos = 0;
}
public override void Flush()
{ }
public override int Read(byte[] buffer, int offset, int count)
{
count = (int)Math.Min(count, Length - Position);
Stream.Position = Position + _origin;
count = Stream.Read(buffer, offset, count);
Position += count;
return count;
}
public override long Seek(long offset, SeekOrigin origin)
{
long ClampPos(long value) => Math.Min(Math.Max(value, 0), Length);
switch (origin)
{
case SeekOrigin.Begin:
Position = ClampPos(offset);
break;
case SeekOrigin.Current:
Position = ClampPos(Position + offset);
break;
case SeekOrigin.End:
Position = ClampPos(Length - offset);
break;
}
return Position;
}
public override void SetLength(long value)
=> throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count)
=> throw new NotSupportedException();
}
}

Some files were not shown because too many files have changed in this diff Show More