Compare commits

..

33 Commits

Author SHA1 Message Date
Adam Hathcock
e08e4e5d9f Enabling Bzip2 but something else is broken 2021-02-21 18:01:56 +00:00
Adam Hathcock
dd710ec308 fixed read only sub stream 2021-02-21 13:37:58 +00:00
Adam Hathcock
5cfc608010 More fixes? 2021-02-21 13:21:33 +00:00
Adam Hathcock
997c11ef25 Bug fix on counting 2021-02-21 12:14:07 +00:00
Adam Hathcock
249f11f543 Rework some zip writing 2021-02-21 12:10:17 +00:00
Adam Hathcock
eeb6761a9f Reuse gzip header reading 2021-02-20 13:11:40 +00:00
Adam Hathcock
0c35abdebe Explicit exception for read shortcut 2021-02-20 12:52:29 +00:00
Adam Hathcock
30da0b91ed Fixed Gzip by reverting EmitHeaderAsync 2021-02-20 11:47:38 +00:00
Adam Hathcock
d9c53e1c82 ZLIbStreamfile fixes? 2021-02-20 11:41:46 +00:00
Adam Hathcock
14e6d95559 More clean up doesn’t help 2021-02-14 18:09:22 +00:00
Adam Hathcock
8cdc49cb85 ReadByteAsync 2021-02-14 14:09:25 +00:00
Adam Hathcock
5c11075d36 Updates for merge 2021-02-14 13:52:55 +00:00
Adam Hathcock
be34fe2056 Merge branch 'master' into async
# Conflicts:
#	src/SharpCompress/Archives/GZip/GZipArchive.cs
#	src/SharpCompress/Common/GZip/GZipFilePart.cs
#	src/SharpCompress/Common/Tar/Headers/TarHeader.cs
#	src/SharpCompress/Common/Zip/SeekableZipHeaderFactory.cs
#	src/SharpCompress/Common/Zip/ZipFilePart.cs
#	src/SharpCompress/Compressors/Deflate/ZlibBaseStream.cs
#	src/SharpCompress/Compressors/LZMA/LZipStream.cs
#	src/SharpCompress/Compressors/Xz/BinaryUtils.cs
#	src/SharpCompress/Compressors/Xz/Crc32.cs
#	src/SharpCompress/Writers/Tar/TarWriter.cs
#	src/SharpCompress/Writers/Zip/ZipCentralDirectoryEntry.cs
#	src/SharpCompress/Writers/Zip/ZipWriter.cs
2021-02-14 13:38:58 +00:00
Adam Hathcock
7e9fb645cb Minor changes 2021-02-14 12:55:05 +00:00
Adam Hathcock
15209178ce AsyncStream for BZip2 2021-02-13 18:09:58 +00:00
Adam Hathcock
ea688e1f4c Writer problems still :( 2021-02-13 17:52:31 +00:00
Adam Hathcock
fe4cc8e6cb Zip LZMA write will roundtrip 2021-02-13 16:44:53 +00:00
Adam Hathcock
1f37ced35a AsyncStream everything 2021-02-13 16:16:03 +00:00
Adam Hathcock
949e90351f More LZMA conversion going, BZip2 not for now 2021-02-13 10:45:57 +00:00
Adam Hathcock
db02e8b634 Minor fixes 2021-02-08 18:25:14 +00:00
Adam Hathcock
d6fe729068 create async 2021-02-08 12:07:45 +00:00
Adam Hathcock
ef3d4da286 Fix test and some zip writing 2021-02-08 11:18:57 +00:00
Adam Hathcock
813bd5ae80 Async open entry 2021-02-08 10:17:34 +00:00
Adam Hathcock
f40d3342c8 Tar and Xz mostly work 2021-02-07 18:58:24 +00:00
Adam Hathcock
9738b812c4 Fix rewindable stream and encoding tests 2021-02-07 18:08:29 +00:00
Adam Hathcock
c6a011df17 Fixed reader issue 2021-02-07 18:03:17 +00:00
Adam Hathcock
7d2dc58766 More API fixes 2021-02-07 13:38:41 +00:00
Adam Hathcock
d234f2d509 First pass of trying tar 2021-02-07 11:30:44 +00:00
Adam Hathcock
cdba5ec419 AsyncEnumerable usage in entries 2021-02-07 09:08:15 +00:00
Adam Hathcock
9cf8a3dbbe more awaits 2021-02-06 09:08:09 +00:00
Adam Hathcock
2b4f02997e more async await 2021-02-01 08:34:12 +00:00
Adam Hathcock
bcdfd992a3 async dispose and fix tests? 2021-01-24 09:18:38 +00:00
Adam Hathcock
3a820c52bd async Deflate. Start of writer 2021-01-24 09:06:02 +00:00
972 changed files with 49817 additions and 167633 deletions

View File

@@ -2,12 +2,11 @@
"version": 1,
"isRoot": true,
"tools": {
"csharpier": {
"version": "1.2.5",
"dotnet-format": {
"version": "4.1.131201",
"commands": [
"csharpier"
],
"rollForward": false
"dotnet-format"
]
}
}
}

View File

@@ -1,7 +0,0 @@
enabled: true
agent:
name: copilot-coding-agent
allow:
- paths: ["src/**/*", "tests/**/*", "README.md", "AGENTS.md"]
actions: ["create", "modify"]
require_review_before_merge: true

View File

@@ -1,570 +0,0 @@
# Version: 2.0.1 (Using https://semver.org/)
# Updated: 2020-12-11
# See https://github.com/RehanSaeed/EditorConfig/releases for release notes.
# See https://github.com/RehanSaeed/EditorConfig for updates to this file.
# See http://EditorConfig.org for more information about .editorconfig files.
##########################################
# Common Settings
##########################################
# This file is the top-most EditorConfig file
root = true
# All Files
[*]
charset = utf-8
indent_style = space
indent_size = 4
insert_final_newline = true
trim_trailing_whitespace = true
##########################################
# File Extension Settings
##########################################
# Visual Studio Solution Files
[*.sln]
indent_style = tab
# Visual Studio XML Project Files
[*.{csproj,vbproj,vcxproj.filters,proj,projitems,shproj}]
indent_size = 2
# XML Configuration Files
[*.{xml,config,props,targets,nuspec,resx,ruleset,vsixmanifest,vsct}]
indent_size = 2
# JSON Files
[*.{json,json5,webmanifest}]
indent_size = 2
# YAML Files
[*.{yml,yaml}]
indent_size = 2
# Markdown Files
[*.md]
trim_trailing_whitespace = false
# Web Files
[*.{htm,html,js,jsm,ts,tsx,css,sass,scss,less,svg,vue}]
indent_size = 2
# Batch Files
[*.{cmd,bat}]
end_of_line = crlf
# Bash Files
[*.sh]
end_of_line = lf
# Makefiles
[Makefile]
indent_style = tab
##########################################
# Default .NET Code Style Severities
# https://docs.microsoft.com/dotnet/fundamentals/code-analysis/configuration-options#scope
##########################################
[*.{cs,csx,cake,vb,vbx}]
# Default Severity for all .NET Code Style rules below
dotnet_analyzer_diagnostic.severity = silent
##########################################
# File Header (Uncomment to support file headers)
# https://docs.microsoft.com/visualstudio/ide/reference/add-file-header
##########################################
# [*.{cs,csx,cake,vb,vbx}]
# file_header_template = <copyright file="{fileName}" company="PROJECT-AUTHOR">\n© PROJECT-AUTHOR\n</copyright>
# SA1636: File header copyright text should match
# Justification: .editorconfig supports file headers. If this is changed to a value other than "none", a stylecop.json file will need to added to the project.
# dotnet_diagnostic.SA1636.severity = none
##########################################
# .NET Language Conventions
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions
##########################################
# .NET Code Style Settings
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#net-code-style-settings
[*.{cs,csx,cake,vb,vbx}]
# "this." and "Me." qualifiers
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#this-and-me
#dotnet_style_qualification_for_field = true:warning
#dotnet_style_qualification_for_property = true:warning
#dotnet_style_qualification_for_method = true:warning
#dotnet_style_qualification_for_event = true:warning
# Language keywords instead of framework type names for type references
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#language-keywords
dotnet_style_predefined_type_for_locals_parameters_members = true:warning
dotnet_style_predefined_type_for_member_access = true:warning
# Modifier preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#normalize-modifiers
dotnet_style_require_accessibility_modifiers = always:warning
csharp_preferred_modifier_order = public,private,protected,internal,static,extern,new,virtual,abstract,sealed,override,readonly,unsafe,volatile,async:warning
visual_basic_preferred_modifier_order = Partial,Default,Private,Protected,Public,Friend,NotOverridable,Overridable,MustOverride,Overloads,Overrides,MustInherit,NotInheritable,Static,Shared,Shadows,ReadOnly,WriteOnly,Dim,Const,WithEvents,Widening,Narrowing,Custom,Async:warning
dotnet_style_readonly_field = true:warning
# Parentheses preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#parentheses-preferences
dotnet_style_parentheses_in_arithmetic_binary_operators = always_for_clarity:warning
dotnet_style_parentheses_in_relational_binary_operators = always_for_clarity:warning
dotnet_style_parentheses_in_other_binary_operators = always_for_clarity:warning
dotnet_style_parentheses_in_other_operators = always_for_clarity:suggestion
# Expression-level preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#expression-level-preferences
dotnet_style_object_initializer = true:warning
dotnet_style_collection_initializer = true:warning
dotnet_style_explicit_tuple_names = true:warning
dotnet_style_prefer_inferred_tuple_names = true:warning
dotnet_style_prefer_inferred_anonymous_type_member_names = true:warning
dotnet_style_prefer_auto_properties = true:warning
dotnet_style_prefer_is_null_check_over_reference_equality_method = true:warning
dotnet_style_prefer_conditional_expression_over_assignment = false:suggestion
dotnet_diagnostic.IDE0045.severity = suggestion
dotnet_style_prefer_conditional_expression_over_return = false:suggestion
dotnet_diagnostic.IDE0046.severity = suggestion
dotnet_style_prefer_compound_assignment = true:warning
# Null-checking preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#null-checking-preferences
dotnet_style_coalesce_expression = true:warning
dotnet_style_null_propagation = true:warning
# Parameter preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#parameter-preferences
dotnet_code_quality_unused_parameters = all:warning
# More style options (Undocumented)
# https://github.com/MicrosoftDocs/visualstudio-docs/issues/3641
dotnet_style_operator_placement_when_wrapping = end_of_line
# https://github.com/dotnet/roslyn/pull/40070
dotnet_style_prefer_simplified_interpolation = true:warning
# C# Code Style Settings
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#c-code-style-settings
[*.{cs,csx,cake}]
# Implicit and explicit types
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#implicit-and-explicit-types
csharp_style_var_for_built_in_types = true:warning
csharp_style_var_when_type_is_apparent = true:warning
csharp_style_var_elsewhere = true:warning
# Expression-bodied members
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#expression-bodied-members
csharp_style_expression_bodied_methods = true:warning
csharp_style_expression_bodied_constructors = true:warning
csharp_style_expression_bodied_operators = true:warning
csharp_style_expression_bodied_properties = true:warning
csharp_style_expression_bodied_indexers = true:warning
csharp_style_expression_bodied_accessors = true:warning
csharp_style_expression_bodied_lambdas = true:warning
csharp_style_expression_bodied_local_functions = true:warning
# Pattern matching
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#pattern-matching
csharp_style_pattern_matching_over_is_with_cast_check = true:warning
csharp_style_pattern_matching_over_as_with_null_check = true:warning
# Inlined variable declarations
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#inlined-variable-declarations
csharp_style_inlined_variable_declaration = true:warning
# Expression-level preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#expression-level-preferences
csharp_prefer_simple_default_expression = true:warning
# "Null" checking preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#c-null-checking-preferences
csharp_style_throw_expression = true:warning
csharp_style_conditional_delegate_call = true:warning
# Code block preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#code-block-preferences
csharp_prefer_braces = true:warning
# Unused value preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#unused-value-preferences
csharp_style_unused_value_expression_statement_preference = discard_variable:suggestion
dotnet_diagnostic.IDE0058.severity = suggestion
csharp_style_unused_value_assignment_preference = discard_variable:suggestion
dotnet_diagnostic.IDE0059.severity = suggestion
# Index and range preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#index-and-range-preferences
csharp_style_prefer_index_operator = true:warning
csharp_style_prefer_range_operator = true:warning
# Miscellaneous preferences
# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#miscellaneous-preferences
csharp_style_deconstructed_variable_declaration = true:warning
csharp_style_pattern_local_over_anonymous_function = true:warning
csharp_using_directive_placement = outside_namespace:warning
csharp_prefer_static_local_function = true:warning
csharp_prefer_simple_using_statement = true:suggestion
dotnet_diagnostic.IDE0063.severity = suggestion
##########################################
# .NET Formatting Conventions
# https://docs.microsoft.com/visualstudio/ide/editorconfig-code-style-settings-reference#formatting-conventions
##########################################
# Organize usings
# https://docs.microsoft.com/visualstudio/ide/editorconfig-formatting-conventions#organize-using-directives
dotnet_sort_system_directives_first = true
# Newline options
# https://docs.microsoft.com/visualstudio/ide/editorconfig-formatting-conventions#new-line-options
csharp_new_line_before_open_brace = all
csharp_new_line_before_else = true
csharp_new_line_before_catch = true
csharp_new_line_before_finally = true
csharp_new_line_before_members_in_object_initializers = true
csharp_new_line_before_members_in_anonymous_types = true
csharp_new_line_between_query_expression_clauses = true
# Indentation options
# https://docs.microsoft.com/visualstudio/ide/editorconfig-formatting-conventions#indentation-options
csharp_indent_case_contents = true
csharp_indent_switch_labels = true
csharp_indent_labels = no_change
csharp_indent_block_contents = true
csharp_indent_braces = false
csharp_indent_case_contents_when_block = false
# Spacing options
# https://docs.microsoft.com/visualstudio/ide/editorconfig-formatting-conventions#spacing-options
csharp_space_after_cast = false
csharp_space_after_keywords_in_control_flow_statements = true
csharp_space_between_parentheses = false
csharp_space_before_colon_in_inheritance_clause = true
csharp_space_after_colon_in_inheritance_clause = true
csharp_space_around_binary_operators = before_and_after
csharp_space_between_method_declaration_parameter_list_parentheses = false
csharp_space_between_method_declaration_empty_parameter_list_parentheses = false
csharp_space_between_method_declaration_name_and_open_parenthesis = false
csharp_space_between_method_call_parameter_list_parentheses = false
csharp_space_between_method_call_empty_parameter_list_parentheses = false
csharp_space_between_method_call_name_and_opening_parenthesis = false
csharp_space_after_comma = true
csharp_space_before_comma = false
csharp_space_after_dot = false
csharp_space_before_dot = false
csharp_space_after_semicolon_in_for_statement = true
csharp_space_before_semicolon_in_for_statement = false
csharp_space_around_declaration_statements = false
csharp_space_before_open_square_brackets = false
csharp_space_between_empty_square_brackets = false
csharp_space_between_square_brackets = false
# Wrapping options
# https://docs.microsoft.com/visualstudio/ide/editorconfig-formatting-conventions#wrap-options
csharp_preserve_single_line_statements = false
csharp_preserve_single_line_blocks = true
csharp_style_namespace_declarations = file_scoped
##########################################
# .NET Naming Conventions
# https://docs.microsoft.com/visualstudio/ide/editorconfig-naming-conventions
##########################################
[*.{cs,csx,cake,vb,vbx}]
dotnet_diagnostic.CA1000.severity = suggestion
dotnet_diagnostic.CA1001.severity = error
dotnet_diagnostic.CA1018.severity = error
dotnet_diagnostic.CA1036.severity = silent
dotnet_diagnostic.CA1051.severity = suggestion
dotnet_diagnostic.CA1068.severity = error
dotnet_diagnostic.CA1069.severity = error
dotnet_diagnostic.CA1304.severity = error
dotnet_diagnostic.CA1305.severity = suggestion
dotnet_diagnostic.CA1307.severity = suggestion
dotnet_diagnostic.CA1309.severity = suggestion
dotnet_diagnostic.CA1310.severity = error
dotnet_diagnostic.CA1507.severity = suggestion
dotnet_diagnostic.CA1513.severity = suggestion
dotnet_diagnostic.CA1707.severity = suggestion
dotnet_diagnostic.CA1708.severity = suggestion
dotnet_diagnostic.CA1711.severity = suggestion
dotnet_diagnostic.CA1716.severity = suggestion
dotnet_diagnostic.CA1720.severity = suggestion
dotnet_diagnostic.CA1725.severity = suggestion
dotnet_diagnostic.CA1805.severity = suggestion
dotnet_diagnostic.CA1816.severity = suggestion
dotnet_diagnostic.CA1822.severity = suggestion
dotnet_diagnostic.CA1825.severity = error
dotnet_diagnostic.CA1826.severity = silent
dotnet_diagnostic.CA1827.severity = error
dotnet_diagnostic.CA1829.severity = suggestion
dotnet_diagnostic.CA1834.severity = error
dotnet_diagnostic.CA1845.severity = suggestion
dotnet_diagnostic.CA1848.severity = suggestion
dotnet_diagnostic.CA1852.severity = suggestion
dotnet_diagnostic.CA1860.severity = silent
dotnet_diagnostic.CA2016.severity = suggestion
dotnet_diagnostic.CA2201.severity = error
dotnet_diagnostic.CA2206.severity = error
dotnet_diagnostic.CA2208.severity = error
dotnet_diagnostic.CA2211.severity = error
dotnet_diagnostic.CA2249.severity = error
dotnet_diagnostic.CA2251.severity = error
dotnet_diagnostic.CA2252.severity = none
dotnet_diagnostic.CA2254.severity = suggestion
dotnet_diagnostic.CS0169.severity = error
dotnet_diagnostic.CS0219.severity = error
dotnet_diagnostic.CS0649.severity = suggestion
dotnet_diagnostic.CS1998.severity = error
dotnet_diagnostic.CS8602.severity = error
dotnet_diagnostic.CS8604.severity = error
dotnet_diagnostic.CS8618.severity = error
dotnet_diagnostic.CS0618.severity = suggestion
dotnet_diagnostic.CS4014.severity = error
dotnet_diagnostic.CS8600.severity = error
dotnet_diagnostic.CS8603.severity = error
dotnet_diagnostic.CS8625.severity = error
dotnet_diagnostic.BL0005.severity = suggestion
dotnet_diagnostic.MVC1000.severity = suggestion
dotnet_diagnostic.RZ10012.severity = error
dotnet_diagnostic.IDE0004.severity = error # redundant cast
dotnet_diagnostic.IDE0005.severity = suggestion
dotnet_diagnostic.IDE0007.severity = error # Use var
dotnet_diagnostic.IDE0011.severity = error # Use braces on if statements
dotnet_diagnostic.IDE0010.severity = silent # populate switch
dotnet_diagnostic.IDE0017.severity = suggestion # initialization can be simplified
dotnet_diagnostic.IDE0021.severity = silent # expression body for constructors
dotnet_diagnostic.IDE0022.severity = silent # expression body for methods
dotnet_diagnostic.IDE0023.severity = suggestion # use expression body for operators
dotnet_diagnostic.IDE0024.severity = silent # expression body for operators
dotnet_diagnostic.IDE0025.severity = suggestion # use expression body for properties
dotnet_diagnostic.IDE0027.severity = suggestion # Use expression body for accessors
dotnet_diagnostic.IDE0028.severity = silent # expression body for accessors
dotnet_diagnostic.IDE0032.severity = suggestion # Use auto property
dotnet_diagnostic.IDE0033.severity = error # prefer tuple name
dotnet_diagnostic.IDE0037.severity = suggestion # simplify anonymous type
dotnet_diagnostic.IDE0040.severity = error # modifiers required
dotnet_diagnostic.IDE0041.severity = error # simplify null
dotnet_diagnostic.IDE0042.severity = error # deconstruct variable
dotnet_diagnostic.IDE0044.severity = suggestion # make field only when possible
dotnet_diagnostic.IDE0047.severity = suggestion # parameter name
dotnet_diagnostic.IDE0051.severity = error # unused field
dotnet_diagnostic.IDE0052.severity = error # unused member
dotnet_diagnostic.IDE0053.severity = suggestion # lambda not needed
dotnet_diagnostic.IDE0055.severity = suggestion # Fix formatting
dotnet_diagnostic.IDE0057.severity = suggestion # substring can be simplified
dotnet_diagnostic.IDE0060.severity = suggestion # unused parameters
dotnet_diagnostic.IDE0061.severity = suggestion # local expression body
dotnet_diagnostic.IDE0062.severity = suggestion # local to static
dotnet_diagnostic.IDE0063.severity = error # simplify using
dotnet_diagnostic.IDE0066.severity = suggestion # switch expression
dotnet_diagnostic.IDE0072.severity = suggestion # Populate switch - forces population of all cases even when default specified
dotnet_diagnostic.IDE0078.severity = suggestion # use pattern matching
dotnet_diagnostic.IDE0090.severity = suggestion # new can be simplified
dotnet_diagnostic.IDE0130.severity = suggestion # namespace folder structure
dotnet_diagnostic.IDE0160.severity = silent # Use block namespaces ARE NOT required
dotnet_diagnostic.IDE0161.severity = error # Please use file namespaces
dotnet_diagnostic.IDE0200.severity = suggestion # lambda not needed
dotnet_diagnostic.IDE1006.severity = suggestion # Naming rule violation: These words cannot contain lower case characters
dotnet_diagnostic.IDE0260.severity = suggestion # Use pattern matching
dotnet_diagnostic.IDE0270.severity = suggestion # Null check simplifcation
dotnet_diagnostic.IDE0290.severity = error # Primary Constructor
dotnet_diagnostic.IDE0300.severity = suggestion # Collection
dotnet_diagnostic.IDE0305.severity = suggestion # Collection ToList
dotnet_diagnostic.NX0001.severity = error
dotnet_diagnostic.NX0002.severity = silent
dotnet_diagnostic.NX0003.severity = silent
dotnet_diagnostic.VSTHRD110.severity = error
dotnet_diagnostic.VSTHRD107.severity = error
##########################################
# Styles
##########################################
# camel_case_style - Define the camelCase style
dotnet_naming_style.camel_case_style.capitalization = camel_case
# pascal_case_style - Define the PascalCase style
dotnet_naming_style.pascal_case_style.capitalization = pascal_case
# constant_case - Define the CONSTANT_CASE style
dotnet_naming_style.constant_case.capitalization = all_upper
dotnet_naming_style.constant_case.word_separator = _
# first_upper_style - The first character must start with an upper-case character
dotnet_naming_style.first_upper_style.capitalization = first_word_upper
# prefix_interface_with_i_style - Interfaces must be PascalCase and the first character of an interface must be an 'I'
dotnet_naming_style.prefix_interface_with_i_style.capitalization = pascal_case
dotnet_naming_style.prefix_interface_with_i_style.required_prefix = I
# prefix_type_parameters_with_t_style - Generic Type Parameters must be PascalCase and the first character must be a 'T'
dotnet_naming_style.prefix_type_parameters_with_t_style.capitalization = pascal_case
dotnet_naming_style.prefix_type_parameters_with_t_style.required_prefix = T
# disallowed_style - Anything that has this style applied is marked as disallowed
dotnet_naming_style.disallowed_style.capitalization = pascal_case
dotnet_naming_style.disallowed_style.required_prefix = ____RULE_VIOLATION____
dotnet_naming_style.disallowed_style.required_suffix = ____RULE_VIOLATION____
# internal_error_style - This style should never occur... if it does, it indicates a bug in file or in the parser using the file
dotnet_naming_style.internal_error_style.capitalization = pascal_case
dotnet_naming_style.internal_error_style.required_prefix = ____INTERNAL_ERROR____
dotnet_naming_style.internal_error_style.required_suffix = ____INTERNAL_ERROR____
# prefix_interface_with_i_style - Interfaces must be PascalCase and the first character of an interface must be an 'I'
dotnet_naming_style.underscore_camel_case_style.capitalization = camel_case
dotnet_naming_style.underscore_camel_case_style.required_prefix = _
##########################################
# .NET Design Guideline Field Naming Rules
# Naming rules for fields follow the .NET Framework design guidelines
# https://docs.microsoft.com/dotnet/standard/design-guidelines/index
##########################################
# All public/protected/protected_internal constant fields must be constant_case
# https://docs.microsoft.com/dotnet/standard/design-guidelines/field
dotnet_naming_symbols.public_protected_constant_fields_group.applicable_accessibilities = public, protected, protected_internal
dotnet_naming_symbols.public_protected_constant_fields_group.required_modifiers = const
dotnet_naming_symbols.public_protected_constant_fields_group.applicable_kinds = field
dotnet_naming_rule.public_protected_constant_fields_must_be_pascal_case_rule.symbols = public_protected_constant_fields_group
dotnet_naming_rule.public_protected_constant_fields_must_be_pascal_case_rule.style = constant_case
dotnet_naming_rule.public_protected_constant_fields_must_be_pascal_case_rule.severity = warning
# All public/protected/protected_internal static readonly fields must be constant_case
# https://docs.microsoft.com/dotnet/standard/design-guidelines/field
dotnet_naming_symbols.public_protected_static_readonly_fields_group.applicable_accessibilities = public, protected, protected_internal
dotnet_naming_symbols.public_protected_static_readonly_fields_group.required_modifiers = static, readonly
dotnet_naming_symbols.public_protected_static_readonly_fields_group.applicable_kinds = field
dotnet_naming_rule.public_protected_static_readonly_fields_must_be_pascal_case_rule.symbols = public_protected_static_readonly_fields_group
dotnet_naming_rule.public_protected_static_readonly_fields_must_be_pascal_case_rule.style = constant_case
dotnet_naming_rule.public_protected_static_readonly_fields_must_be_pascal_case_rule.severity = warning
# No other public/protected/protected_internal fields are allowed
# https://docs.microsoft.com/dotnet/standard/design-guidelines/field
dotnet_naming_symbols.other_public_protected_fields_group.applicable_accessibilities = public, protected, protected_internal
dotnet_naming_symbols.other_public_protected_fields_group.applicable_kinds = field
dotnet_naming_rule.other_public_protected_fields_disallowed_rule.symbols = other_public_protected_fields_group
dotnet_naming_rule.other_public_protected_fields_disallowed_rule.style = disallowed_style
dotnet_naming_rule.other_public_protected_fields_disallowed_rule.severity = error
##########################################
# StyleCop Field Naming Rules
# Naming rules for fields follow the StyleCop analyzers
# This does not override any rules using disallowed_style above
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers
##########################################
# All constant fields must be constant_case
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1303.md
dotnet_naming_symbols.stylecop_constant_fields_group.applicable_accessibilities = public, internal, protected_internal, protected, private_protected, private
dotnet_naming_symbols.stylecop_constant_fields_group.required_modifiers = const
dotnet_naming_symbols.stylecop_constant_fields_group.applicable_kinds = field
dotnet_naming_rule.stylecop_constant_fields_must_be_pascal_case_rule.symbols = stylecop_constant_fields_group
dotnet_naming_rule.stylecop_constant_fields_must_be_pascal_case_rule.style = constant_case
dotnet_naming_rule.stylecop_constant_fields_must_be_pascal_case_rule.severity = warning
# All static readonly fields must be constant_case
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1311.md
dotnet_naming_symbols.stylecop_static_readonly_fields_group.applicable_accessibilities = public, internal, protected_internal, protected, private_protected, private
dotnet_naming_symbols.stylecop_static_readonly_fields_group.required_modifiers = static, readonly
dotnet_naming_symbols.stylecop_static_readonly_fields_group.applicable_kinds = field
dotnet_naming_rule.stylecop_static_readonly_fields_must_be_pascal_case_rule.symbols = stylecop_static_readonly_fields_group
dotnet_naming_rule.stylecop_static_readonly_fields_must_be_pascal_case_rule.style = constant_case
dotnet_naming_rule.stylecop_static_readonly_fields_must_be_pascal_case_rule.severity = warning
# No non-private instance fields are allowed
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1401.md
dotnet_naming_symbols.stylecop_fields_must_be_private_group.applicable_accessibilities = public, internal, protected_internal, protected, private_protected
dotnet_naming_symbols.stylecop_fields_must_be_private_group.applicable_kinds = field
dotnet_naming_rule.stylecop_instance_fields_must_be_private_rule.symbols = stylecop_fields_must_be_private_group
dotnet_naming_rule.stylecop_instance_fields_must_be_private_rule.style = disallowed_style
dotnet_naming_rule.stylecop_instance_fields_must_be_private_rule.severity = error
# Private fields must be camelCase
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1306.md
dotnet_naming_symbols.stylecop_private_fields_group.applicable_accessibilities = private
dotnet_naming_symbols.stylecop_private_fields_group.applicable_kinds = field
dotnet_naming_rule.stylecop_private_fields_must_be_camel_case_rule.symbols = stylecop_private_fields_group
dotnet_naming_rule.stylecop_private_fields_must_be_camel_case_rule.style = underscore_camel_case_style
dotnet_naming_rule.stylecop_private_fields_must_be_camel_case_rule.severity = warning
# Local variables must be camelCase
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1312.md
dotnet_naming_symbols.stylecop_local_fields_group.applicable_accessibilities = local
dotnet_naming_symbols.stylecop_local_fields_group.applicable_kinds = local
dotnet_naming_rule.stylecop_local_fields_must_be_camel_case_rule.symbols = stylecop_local_fields_group
dotnet_naming_rule.stylecop_local_fields_must_be_camel_case_rule.style = camel_case_style
dotnet_naming_rule.stylecop_local_fields_must_be_camel_case_rule.severity = warning
# This rule should never fire. However, it's included for at least two purposes:
# First, it helps to understand, reason about, and root-case certain types of issues, such as bugs in .editorconfig parsers.
# Second, it helps to raise immediate awareness if a new field type is added (as occurred recently in C#).
dotnet_naming_symbols.sanity_check_uncovered_field_case_group.applicable_accessibilities = *
dotnet_naming_symbols.sanity_check_uncovered_field_case_group.applicable_kinds = field
dotnet_naming_rule.sanity_check_uncovered_field_case_rule.symbols = sanity_check_uncovered_field_case_group
dotnet_naming_rule.sanity_check_uncovered_field_case_rule.style = internal_error_style
dotnet_naming_rule.sanity_check_uncovered_field_case_rule.severity = error
##########################################
# Other Naming Rules
##########################################
# All of the following must be PascalCase:
# - Namespaces
# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-namespaces
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1300.md
# - Classes and Enumerations
# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-classes-structs-and-interfaces
# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1300.md
# - Delegates
# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-classes-structs-and-interfaces#names-of-common-types
# - Constructors, Properties, Events, Methods
# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-type-members
dotnet_naming_symbols.element_group.applicable_kinds = namespace, class, enum, struct, delegate, event, method, property
dotnet_naming_rule.element_rule.symbols = element_group
dotnet_naming_rule.element_rule.style = pascal_case_style
dotnet_naming_rule.element_rule.severity = warning
# Interfaces use PascalCase and are prefixed with uppercase 'I'
# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-classes-structs-and-interfaces
dotnet_naming_symbols.interface_group.applicable_kinds = interface
dotnet_naming_rule.interface_rule.symbols = interface_group
dotnet_naming_rule.interface_rule.style = prefix_interface_with_i_style
dotnet_naming_rule.interface_rule.severity = warning
# Generics Type Parameters use PascalCase and are prefixed with uppercase 'T'
# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-classes-structs-and-interfaces
dotnet_naming_symbols.type_parameter_group.applicable_kinds = type_parameter
dotnet_naming_rule.type_parameter_rule.symbols = type_parameter_group
dotnet_naming_rule.type_parameter_rule.style = prefix_type_parameters_with_t_style
dotnet_naming_rule.type_parameter_rule.severity = warning
# Function parameters use camelCase
# https://docs.microsoft.com/dotnet/standard/design-guidelines/naming-parameters
dotnet_naming_symbols.parameters_group.applicable_kinds = parameter
dotnet_naming_rule.parameters_rule.symbols = parameters_group
dotnet_naming_rule.parameters_rule.style = camel_case_style
dotnet_naming_rule.parameters_rule.severity = warning
##########################################
# License
##########################################
# The following applies as to the .editorconfig file ONLY, and is
# included below for reference, per the requirements of the license
# corresponding to this .editorconfig file.
# See: https://github.com/RehanSaeed/EditorConfig
#
# MIT License
#
# Copyright (c) 2017-2019 Muhammad Rehan Saeed
# Copyright (c) 2019 Henry Gabryjelski
#
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the
# Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute,
# sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject
# to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
##########################################

View File

@@ -1,17 +0,0 @@
enabled: true
agent:
name: copilot-coding-agent
allow:
- paths: ["src/**/*", "tests/**/*", "README.md", "AGENTS.md"]
actions: ["create", "modify", "delete"]
require_review_before_merge: true
required_approvals: 1
allowed_merge_strategies:
- squash
- merge
auto_merge_on_green: false
run_workflows: true
notes: |
- This manifest expresses the policy for the Copilot coding agent in this repository.
- It does NOT install or authorize the agent; a repository admin must install the Copilot coding agent app and grant the repository the necessary permissions (contents: write, pull_requests: write, checks: write, actions: write/read, issues: write) to allow the agent to act.
- Keep allow paths narrow and prefer require_review_before_merge during initial rollout.

View File

@@ -1,13 +0,0 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
- package-ecosystem: "nuget"
directory: "/" # change to "/src/YourProject" if .csproj files are in subfolders
schedule:
interval: "weekly"
open-pull-requests-limit: 5
# optional: target-branch: "master"

View File

@@ -1,25 +0,0 @@
# Plan: Implement Missing Async Functionality in SharpCompress
SharpCompress has async support for low-level stream operations and Reader/Writer APIs, but critical entry points (Archive.Open, factory methods, initialization) remain synchronous. This plan adds async overloads for all user-facing I/O operations and fixes existing async bugs, enabling full end-to-end async workflows.
## Steps
1. **Add async factory methods** to [ArchiveFactory.cs](src/SharpCompress/Factories/ArchiveFactory.cs), [ReaderFactory.cs](src/SharpCompress/Factories/ReaderFactory.cs), and [WriterFactory.cs](src/SharpCompress/Factories/WriterFactory.cs) with `OpenAsync` and `CreateAsync` overloads accepting `CancellationToken`
2. **Implement async Open methods** on concrete archive types ([ZipArchive.cs](src/SharpCompress/Archives/Zip/ZipArchive.cs), [TarArchive.cs](src/SharpCompress/Archives/Tar/TarArchive.cs), [RarArchive.cs](src/SharpCompress/Archives/Rar/RarArchive.cs), [GZipArchive.cs](src/SharpCompress/Archives/GZip/GZipArchive.cs), [SevenZipArchive.cs](src/SharpCompress/Archives/SevenZip/SevenZipArchive.cs)) and reader types ([ZipReader.cs](src/SharpCompress/Readers/Zip/ZipReader.cs), [TarReader.cs](src/SharpCompress/Readers/Tar/TarReader.cs), etc.)
3. **Convert archive initialization logic to async** including header reading, volume loading, and format signature detection across archive constructors and internal initialization methods
4. **Fix LZMA decoder async bugs** in [LzmaStream.cs](src/SharpCompress/Compressors/LZMA/LzmaStream.cs), [Decoder.cs](src/SharpCompress/Compressors/LZMA/Decoder.cs), and [OutWindow.cs](src/SharpCompress/Compressors/LZMA/OutWindow.cs) to enable true async 7Zip support and remove `NonDisposingStream` workaround
5. **Complete Rar async implementation** by converting `UnpackV2017` methods to async in [UnpackV2017.cs](src/SharpCompress/Compressors/Rar/UnpackV2017.cs) and updating Rar20 decompression
6. **Add comprehensive async tests** covering all new async entry points, cancellation scenarios, and concurrent operations across all archive formats in test files
## Further Considerations
1. **Breaking changes** - Should new async methods be added alongside existing sync methods (non-breaking), or should sync methods eventually be deprecated? Recommend additive approach for backward compatibility.
2. **Performance impact** - Header parsing for formats like Zip/Tar is often small; consider whether truly async parsing adds value vs sync parsing wrapped in Task, or make it conditional based on stream type (network vs file).
3. **7Zip complexity** - The LZMA async bug fix (Step 4) may be challenging due to state management in the decoder; consider whether to scope it separately or implement a simpler workaround that maintains correctness.

View File

@@ -1,123 +0,0 @@
# Plan: Modernize SharpCompress Public API
Based on comprehensive analysis, the API has several inconsistencies around factory patterns, async support, format capabilities, and options classes. Most improvements can be done incrementally without breaking changes.
## Steps
1. **Standardize factory patterns** by deprecating format-specific static `Open` methods in [Archives/Zip/ZipArchive.cs](src/SharpCompress/Archives/Zip/ZipArchive.cs), [Archives/Tar/TarArchive.cs](src/SharpCompress/Archives/Tar/TarArchive.cs), etc. in favor of centralized [Factories/ArchiveFactory.cs](src/SharpCompress/Factories/ArchiveFactory.cs)
2. **Complete async implementation** in [Writers/Zip/ZipWriter.cs](src/SharpCompress/Writers/Zip/ZipWriter.cs) and other writers that currently use sync-over-async, implementing true async I/O throughout the writer hierarchy
3. **Unify options classes** by making [Common/ExtractionOptions.cs](src/SharpCompress/Common/ExtractionOptions.cs) inherit from `OptionsBase` and adding progress reporting to extraction methods consistently
4. **Clarify GZip semantics** in [Archives/GZip/GZipArchive.cs](src/SharpCompress/Archives/GZip/GZipArchive.cs) by adding XML documentation explaining single-entry limitation and relationship to GZip compression used in Tar.gz
## Further Considerations
1. **Breaking changes roadmap** - Should we plan a major version (2.0) to remove deprecated factory methods, clean up `ArchiveType` enum (remove Arc/Arj or add full support), and consolidate naming patterns?
2. **Progress reporting consistency** - Should `IProgress<ArchiveExtractionProgress<IEntry>>` be added to all extraction extension methods or consolidated into options classes?
## Detailed Analysis
### Factory Pattern Issues
Three different factory patterns exist with overlapping functionality:
1. **Static Factories**: ArchiveFactory, ReaderFactory, WriterFactory
2. **Instance Factories**: IArchiveFactory, IReaderFactory, IWriterFactory
3. **Format-specific static methods**: Each archive class has static `Open` methods
**Example confusion:**
```csharp
// Three ways to open a Zip archive - which is recommended?
var archive1 = ArchiveFactory.Open("file.zip");
var archive2 = ZipArchive.Open("file.zip");
var archive3 = ArchiveFactory.AutoFactory.Open(fileInfo, options);
```
### Async Support Gaps
Base `IWriter` interface has async methods, but writer implementations provide minimal async support. Most writers just call synchronous methods:
```csharp
public virtual async Task WriteAsync(...)
{
// Default implementation calls synchronous version
Write(filename, source, modificationTime);
await Task.CompletedTask.ConfigureAwait(false);
}
```
Real async implementations only in:
- `TarWriter` - Proper async implementation
- Most other writers use sync-over-async
### GZip Archive Special Case
GZip is treated as both a compression format and an archive format, but only supports single-entry archives:
```csharp
protected override GZipArchiveEntry CreateEntryInternal(...)
{
if (Entries.Any())
{
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
}
// ...
}
```
### Options Class Hierarchy
```
OptionsBase (LeaveStreamOpen, ArchiveEncoding)
├─ ReaderOptions (LookForHeader, Password, DisableCheckIncomplete, BufferSize, ExtensionHint, Progress)
├─ WriterOptions (CompressionType, CompressionLevel, Progress)
│ ├─ ZipWriterOptions (ArchiveComment, UseZip64)
│ ├─ TarWriterOptions (FinalizeArchiveOnClose, HeaderFormat)
│ └─ GZipWriterOptions (no additional properties)
└─ ExtractionOptions (standalone - Overwrite, ExtractFullPath, PreserveFileTime, PreserveAttributes)
```
**Issues:**
- `ExtractionOptions` doesn't inherit from `OptionsBase` - no encoding support during extraction
- Progress reporting inconsistency between readers and extraction
- Obsolete properties (`ChecksumIsValid`, `Version`) with unclear migration path
### Implementation Priorities
**High Priority (Non-Breaking):**
1. Add API usage guide (Archive vs Reader, factory recommendations, async best practices)
2. Fix progress reporting consistency
3. Complete async implementation in writers
**Medium Priority (Next Major Version):**
1. Unify factory pattern - deprecate format-specific static `Open` methods
2. Clean up options classes - make `ExtractionOptions` inherit from `OptionsBase`
3. Clarify archive types - remove Arc/Arj from `ArchiveType` enum or add full support
4. Standardize naming across archive types
**Low Priority:**
1. Add BZip2 archive support similar to GZipArchive
2. Complete obsolete property cleanup with migration guide
### Backward Compatibility Strategy
**Safe (Non-Breaking) Changes:**
- Add new methods to interfaces (use default implementations)
- Add new options properties (with defaults)
- Add new factory methods
- Improve async implementations
- Add progress reporting support
**Breaking Changes to Avoid:**
- ❌ Removing format-specific `Open` methods (deprecate instead)
- ❌ Changing `LeaveStreamOpen` default (currently `true`)
- ❌ Removing obsolete properties before major version bump
- ❌ Changing return types or signatures of existing methods
**Deprecation Pattern:**
- Use `[Obsolete]` for one major version
- Use `[EditorBrowsable(EditorBrowsableState.Never)]` in next major version
- Remove in following major version

View File

@@ -1,155 +0,0 @@
# NuGet Release Workflow
This document describes the automated NuGet release workflow for SharpCompress.
## Overview
The `nuget-release.yml` workflow automatically builds, tests, and publishes SharpCompress packages to NuGet.org when:
- Changes are pushed to the `master` or `release` branch
- A version tag (format: `MAJOR.MINOR.PATCH`) is pushed
The workflow runs on both Windows and Ubuntu, but only the Windows build publishes to NuGet.
## How It Works
### Version Determination
The workflow automatically determines the version based on whether the commit is tagged using C# code in the build project:
1. **Tagged Release (Stable)**:
- If the current commit has a version tag (e.g., `0.42.1`)
- Uses the tag as the version number
- Published as a stable release
2. **Untagged Release (Prerelease)**:
- If the current commit is NOT tagged
- Creates a prerelease version based on the next minor version
- Format: `{NEXT_MINOR_VERSION}-beta.{COMMIT_COUNT}`
- Example: `0.43.0-beta.123` (if last tag is 0.42.x)
- Published as a prerelease to NuGet.org (Windows build only)
### Workflow Steps
The workflow runs on a matrix of operating systems (Windows and Ubuntu):
1. **Checkout**: Fetches the repository with full history for version detection
2. **Setup .NET**: Installs .NET 10.0
3. **Determine Version**: Runs `determine-version` build target to check for tags and determine version
4. **Update Version**: Runs `update-version` build target to update the version in the project file
5. **Build and Test**: Runs the full build and test suite on both platforms
6. **Upload Artifacts**: Uploads the generated `.nupkg` files as workflow artifacts (separate for each OS)
7. **Push to NuGet**: (Windows only) Runs `push-to-nuget` build target to publish the package to NuGet.org using the API key
All version detection, file updates, and publishing logic is implemented in C# in the `build/Program.cs` file using build targets.
## Setup Requirements
### 1. NuGet API Key Secret
The workflow requires a `NUGET_API_KEY` secret to be configured in the repository settings:
1. Go to https://www.nuget.org/account/apikeys
2. Create a new API key with "Push" permission for the SharpCompress package
3. In GitHub, go to: **Settings****Secrets and variables****Actions**
4. Create a new secret named `NUGET_API_KEY` with the API key value
### 2. Branch Protection (Recommended)
Consider enabling branch protection rules for the `release` branch to ensure:
- Code reviews are required before merging
- Status checks pass before merging
- Only authorized users can push to the branch
## Usage
### Creating a Stable Release
There are two ways to trigger a stable release:
**Method 1: Push tag to trigger workflow**
1. Ensure all changes are committed on the `master` or `release` branch
2. Create and push a version tag:
```bash
git checkout master # or release
git tag 0.43.0
git push origin 0.43.0
```
3. The workflow will automatically trigger, build, test, and publish `SharpCompress 0.43.0` to NuGet.org (Windows build)
**Method 2: Tag after pushing to branch**
1. Ensure all changes are merged and pushed to the `master` or `release` branch
2. Create and push a version tag on the already-pushed commit:
```bash
git checkout master # or release
git tag 0.43.0
git push origin 0.43.0
```
3. The workflow will automatically trigger, build, test, and publish `SharpCompress 0.43.0` to NuGet.org (Windows build)
### Creating a Prerelease
1. Push changes to the `master` or `release` branch without tagging:
```bash
git checkout master # or release
git push origin master # or release
```
2. The workflow will automatically:
- Build and test the project on both Windows and Ubuntu
- Publish a prerelease version like `0.43.0-beta.456` to NuGet.org (Windows build)
## Troubleshooting
### Workflow Fails to Push to NuGet
- **Check the API Key**: Ensure `NUGET_API_KEY` is set correctly in repository secrets
- **Check API Key Permissions**: Verify the API key has "Push" permission for SharpCompress
- **Check API Key Expiration**: NuGet API keys may expire; create a new one if needed
### Version Conflict
If you see "Package already exists" errors:
- The workflow uses `--skip-duplicate` flag to handle this gracefully
- If you need to republish the same version, delete it from NuGet.org first (if allowed)
### Build or Test Failures
- The workflow will not push to NuGet if build or tests fail
- Check the workflow logs in GitHub Actions for details
- Fix the issues and push again
## Manual Package Creation
If you need to create a package manually without publishing:
```bash
dotnet run --project build/build.csproj -- publish
```
The package will be created in the `artifacts/` directory.
## Build Targets
The workflow uses the following C# build targets defined in `build/Program.cs`:
- **determine-version**: Detects version from git tags and outputs VERSION and PRERELEASE variables
- **update-version**: Updates VersionPrefix, AssemblyVersion, and FileVersion in the project file
- **push-to-nuget**: Pushes the generated NuGet packages to NuGet.org (requires NUGET_API_KEY)
These targets can be run manually for testing:
```bash
# Determine the version
dotnet run --project build/build.csproj -- determine-version
# Update version in project file
VERSION=0.43.0 dotnet run --project build/build.csproj -- update-version
# Push to NuGet (requires NUGET_API_KEY environment variable)
NUGET_API_KEY=your-key dotnet run --project build/build.csproj -- push-to-nuget
```
## Related Files
- `.github/workflows/nuget-release.yml` - The workflow definition
- `build/Program.cs` - Build script with version detection and publishing logic
- `src/SharpCompress/SharpCompress.csproj` - Project file with version information

View File

@@ -1,120 +0,0 @@
# Testing Guide for NuGet Release Workflow
This document describes how to test the NuGet release workflow.
## Testing Strategy
Since this workflow publishes to NuGet.org and requires repository secrets, testing should be done carefully. The workflow runs on both Windows and Ubuntu, but only the Windows build publishes to NuGet.
## Pre-Testing Checklist
- [x] Workflow YAML syntax validated
- [x] Version determination logic tested locally
- [x] Version update logic tested locally
- [x] Build script works (`dotnet run --project build/build.csproj`)
## Manual Testing Steps
### 1. Test Prerelease Publishing (Recommended First Test)
This tests the workflow on untagged commits to the master or release branch.
**Steps:**
1. Ensure `NUGET_API_KEY` secret is configured in repository settings
2. Create a test commit on the `master` or `release` branch (e.g., update a comment or README)
3. Push to the `master` or `release` branch
4. Monitor the GitHub Actions workflow at: https://github.com/adamhathcock/sharpcompress/actions
5. Verify:
- Workflow triggers and runs successfully on both Windows and Ubuntu
- Version is determined correctly (e.g., `0.43.0-beta.XXX` if last tag is 0.42.x)
- Build and tests pass on both platforms
- Package artifacts are uploaded for both platforms
- Package is pushed to NuGet.org as prerelease (Windows build only)
**Expected Outcome:**
- A new prerelease package appears on NuGet.org: https://www.nuget.org/packages/SharpCompress/
- Package version follows pattern: `{NEXT_MINOR_VERSION}-beta.{COMMIT_COUNT}`
### 2. Test Tagged Release Publishing
This tests the workflow when a version tag is pushed.
**Steps:**
1. Prepare the `master` or `release` branch with all desired changes
2. Create a version tag (must be a pure semantic version like `MAJOR.MINOR.PATCH`):
```bash
git checkout master # or release
git tag 0.42.2
git push origin 0.42.2
```
3. Monitor the GitHub Actions workflow
4. Verify:
- Workflow triggers and runs successfully on both Windows and Ubuntu
- Version is determined as the tag (e.g., `0.42.2`)
- Build and tests pass on both platforms
- Package artifacts are uploaded for both platforms
- Package is pushed to NuGet.org as stable release (Windows build only)
**Expected Outcome:**
- A new stable release package appears on NuGet.org
- Package version matches the tag
### 3. Test Duplicate Package Handling
This tests the `--skip-duplicate` flag behavior.
**Steps:**
1. Push to the `release` branch without making changes
2. Monitor the workflow
3. Verify:
- Workflow runs but NuGet push is skipped with "duplicate" message
- No errors occur
### 4. Test Build Failure Handling
This tests that failed builds don't publish packages.
**Steps:**
1. Introduce a breaking change in a test or code
2. Push to the `release` branch
3. Verify:
- Workflow runs and detects the failure
- Build or test step fails
- NuGet push step is skipped
- No package is published
## Verification
After each test, verify:
1. **GitHub Actions Logs**: Check the workflow logs for any errors or warnings
2. **NuGet.org**: Verify the package appears with correct version and metadata
3. **Artifacts**: Download and inspect the uploaded artifacts
## Rollback/Cleanup
If testing produces unwanted packages:
1. **Prerelease packages**: Can be unlisted on NuGet.org (Settings → Unlist)
2. **Stable packages**: Cannot be deleted, only unlisted (use test versions)
3. **Tags**: Can be deleted with:
```bash
git tag -d 0.42.2
git push origin :refs/tags/0.42.2
```
## Known Limitations
- NuGet.org does not allow re-uploading the same version
- Deleted packages on NuGet.org reserve the version number
- The workflow requires the `NUGET_API_KEY` secret to be set
## Success Criteria
The workflow is considered successful if:
- ✅ Prerelease versions are published correctly with beta suffix
- ✅ Tagged versions are published as stable releases
- ✅ Build and test failures prevent publishing
- ✅ Duplicate packages are handled gracefully
- ✅ Workflow logs are clear and informative

24
.github/workflows/dotnetcore.yml vendored Normal file
View File

@@ -0,0 +1,24 @@
name: SharpCompress
on: [push, pull_request]
jobs:
build:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [windows-latest, ubuntu-latest]
steps:
- uses: actions/checkout@v1
- uses: actions/setup-dotnet@v1
with:
dotnet-version: 5.0.101
- run: dotnet run -p build/build.csproj
- uses: actions/upload-artifact@v2
with:
name: ${{ matrix.os }}-sharpcompress.nupkg
path: artifacts/*
- uses: actions/upload-artifact@v2
with:
name: ${{ matrix.os }}-sharpcompress.snupkg
path: artifacts/*

View File

@@ -1,61 +0,0 @@
name: NuGet Release
on:
push:
branches:
- 'master'
- 'release'
tags:
- '[0-9]+.[0-9]+.[0-9]+'
pull_request:
branches:
- 'master'
- 'release'
permissions:
contents: read
jobs:
build-and-publish:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [windows-latest, ubuntu-latest]
steps:
- uses: actions/checkout@v6
with:
fetch-depth: 0 # Fetch all history for versioning
- uses: actions/setup-dotnet@v5
with:
dotnet-version: 10.0.x
# Determine version using C# build target
- name: Determine Version
id: version
run: dotnet run --project build/build.csproj -- determine-version
# Update version in project file using C# build target
- name: Update Version in Project
run: dotnet run --project build/build.csproj -- update-version
env:
VERSION: ${{ steps.version.outputs.version }}
# Build and test
- name: Build and Test
run: dotnet run --project build/build.csproj
# Upload artifacts for verification
- name: Upload NuGet Package
uses: actions/upload-artifact@v6
with:
name: ${{ matrix.os }}-nuget-package
path: artifacts/*.nupkg
# Push to NuGet.org using C# build target (Windows only, not on PRs)
- name: Push to NuGet
if: success() && matrix.os == 'windows-latest' && github.event_name != 'pull_request'
run: dotnet run --project build/build.csproj -- push-to-nuget
env:
NUGET_API_KEY: ${{ secrets.NUGET_API_KEY }}

12
.gitignore vendored
View File

@@ -4,23 +4,17 @@ _ReSharper.SharpCompress/
bin/
*.suo
*.user
tests/TestArchives/Scratch/
tests/TestArchives/Scratch2/
TestArchives/Scratch/
TestArchives/Scratch2/
TestResults/
*.nupkg
packages/*/
project.lock.json
tests/TestArchives/Scratch
tests/TestArchives/*/Scratch
tests/TestArchives/*/Scratch2
.vs
tools
.vscode
.idea/
artifacts/
.DS_Store
*.snupkg
# BenchmarkDotNet artifacts
BenchmarkDotNet.Artifacts/
**/BenchmarkDotNet.Artifacts/

View File

@@ -1,9 +0,0 @@
{
"recommendations": [
"ms-dotnettools.csdevkit",
"ms-dotnettools.csharp",
"ms-dotnettools.vscode-dotnet-runtime",
"csharpier.csharpier-vscode",
"formulahendry.dotnet-test-explorer"
]
}

97
.vscode/launch.json vendored
View File

@@ -1,97 +0,0 @@
{
"version": "0.2.0",
"configurations": [
{
"name": "Debug Tests (net10.0)",
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build",
"program": "dotnet",
"args": [
"test",
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
"-f",
"net10.0",
"--no-build",
"--verbosity=normal"
],
"cwd": "${workspaceFolder}",
"console": "internalConsole",
"stopAtEntry": false
},
{
"name": "Debug Specific Test (net10.0)",
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build",
"program": "dotnet",
"args": [
"test",
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
"-f",
"net10.0",
"--no-build",
"--filter",
"FullyQualifiedName~${input:testName}"
],
"cwd": "${workspaceFolder}",
"console": "internalConsole",
"stopAtEntry": false
},
{
"name": "Debug Performance Tests",
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build",
"program": "dotnet",
"args": [
"run",
"--project",
"${workspaceFolder}/tests/SharpCompress.Performance/SharpCompress.Performance.csproj",
"--no-build"
],
"cwd": "${workspaceFolder}",
"console": "internalConsole",
"stopAtEntry": false
},
{
"name": "Debug Build Script",
"type": "coreclr",
"request": "launch",
"program": "dotnet",
"args": [
"run",
"--project",
"${workspaceFolder}/build/build.csproj",
"--",
"${input:buildTarget}"
],
"cwd": "${workspaceFolder}",
"console": "internalConsole",
"stopAtEntry": false
}
],
"inputs": [
{
"id": "testName",
"type": "promptString",
"description": "Enter test name or pattern (e.g., TestMethodName or ClassName)",
"default": ""
},
{
"id": "buildTarget",
"type": "pickString",
"description": "Select build target",
"options": [
"clean",
"restore",
"build",
"test",
"format",
"publish",
"default"
],
"default": "build"
}
]
}

29
.vscode/settings.json vendored
View File

@@ -1,29 +0,0 @@
{
"dotnet.defaultSolution": "SharpCompress.sln",
"files.exclude": {
"**/bin": true,
"**/obj": true
},
"files.watcherExclude": {
"**/bin/**": true,
"**/obj/**": true,
"**/artifacts/**": true
},
"search.exclude": {
"**/bin": true,
"**/obj": true,
"**/artifacts": true
},
"editor.formatOnSave": false,
"[csharp]": {
"editor.defaultFormatter": "csharpier.csharpier-vscode",
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.fixAll": "explicit"
}
},
"csharpier.enableDebugLogs": false,
"omnisharp.enableRoslynAnalyzers": true,
"omnisharp.enableEditorConfigSupport": true,
"dotnet-test-explorer.testProjectPath": "tests/**/*.csproj"
}

178
.vscode/tasks.json vendored
View File

@@ -1,178 +0,0 @@
{
"version": "2.0.0",
"tasks": [
{
"label": "build",
"command": "dotnet",
"type": "process",
"args": [
"build",
"${workspaceFolder}/SharpCompress.sln",
"/property:GenerateFullPaths=true",
"/consoleloggerparameters:NoSummary;ForceNoAlign"
],
"problemMatcher": "$msCompile",
"group": {
"kind": "build",
"isDefault": true
}
},
{
"label": "build-release",
"command": "dotnet",
"type": "process",
"args": [
"build",
"${workspaceFolder}/SharpCompress.sln",
"-c",
"Release",
"/property:GenerateFullPaths=true",
"/consoleloggerparameters:NoSummary;ForceNoAlign"
],
"problemMatcher": "$msCompile",
"group": "build"
},
{
"label": "build-library",
"command": "dotnet",
"type": "process",
"args": [
"build",
"${workspaceFolder}/src/SharpCompress/SharpCompress.csproj",
"/property:GenerateFullPaths=true",
"/consoleloggerparameters:NoSummary;ForceNoAlign"
],
"problemMatcher": "$msCompile",
"group": "build"
},
{
"label": "restore",
"command": "dotnet",
"type": "process",
"args": [
"restore",
"${workspaceFolder}/SharpCompress.sln"
],
"problemMatcher": "$msCompile"
},
{
"label": "clean",
"command": "dotnet",
"type": "process",
"args": [
"clean",
"${workspaceFolder}/SharpCompress.sln"
],
"problemMatcher": "$msCompile"
},
{
"label": "test",
"command": "dotnet",
"type": "process",
"args": [
"test",
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
"--no-build",
"--verbosity=normal"
],
"problemMatcher": "$msCompile",
"group": {
"kind": "test",
"isDefault": true
},
"dependsOn": "build"
},
{
"label": "test-net10",
"command": "dotnet",
"type": "process",
"args": [
"test",
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
"-f",
"net10.0",
"--no-build",
"--verbosity=normal"
],
"problemMatcher": "$msCompile",
"group": "test",
"dependsOn": "build"
},
{
"label": "test-net48",
"command": "dotnet",
"type": "process",
"args": [
"test",
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
"-f",
"net48",
"--no-build",
"--verbosity=normal"
],
"problemMatcher": "$msCompile",
"group": "test",
"dependsOn": "build"
},
{
"label": "format",
"command": "dotnet",
"type": "process",
"args": [
"csharpier",
"."
],
"problemMatcher": []
},
{
"label": "format-check",
"command": "dotnet",
"type": "process",
"args": [
"csharpier",
"check",
"."
],
"problemMatcher": []
},
{
"label": "run-build-script",
"command": "dotnet",
"type": "process",
"args": [
"run",
"--project",
"${workspaceFolder}/build/build.csproj"
],
"problemMatcher": "$msCompile"
},
{
"label": "pack",
"command": "dotnet",
"type": "process",
"args": [
"pack",
"${workspaceFolder}/src/SharpCompress/SharpCompress.csproj",
"-c",
"Release",
"-o",
"${workspaceFolder}/artifacts/"
],
"problemMatcher": "$msCompile",
"dependsOn": "build-release"
},
{
"label": "performance-tests",
"command": "dotnet",
"type": "process",
"args": [
"run",
"--project",
"${workspaceFolder}/tests/SharpCompress.Performance/SharpCompress.Performance.csproj",
"-c",
"Release"
],
"problemMatcher": "$msCompile"
}
]
}

181
AGENTS.md
View File

@@ -1,181 +0,0 @@
---
description: 'Guidelines for building SharpCompress - A C# compression library'
applyTo: '**/*.cs'
---
# SharpCompress Development
## About SharpCompress
SharpCompress is a pure C# compression library supporting multiple archive formats (Zip, Tar, GZip, BZip2, 7Zip, Rar, LZip, XZ, ZStandard) for .NET Framework 4.62, .NET Standard 2.1, .NET 6.0, and .NET 8.0. The library provides both seekable Archive APIs and forward-only Reader/Writer APIs for streaming scenarios.
## C# Instructions
- Always use the latest version C#, currently C# 13 features.
- Write clear and concise comments for each function.
- Follow the existing code style and patterns in the codebase.
## General Instructions
- **Agents should NEVER commit to git** - Agents should stage files and leave committing to the user. Only create commits when the user explicitly requests them.
- Make only high confidence suggestions when reviewing code changes.
- Write code with good maintainability practices, including comments on why certain design decisions were made.
- Handle edge cases and write clear exception handling.
- For libraries or external dependencies, mention their usage and purpose in comments.
- Preserve backward compatibility when making changes to public APIs.
## Naming Conventions
- Follow PascalCase for component names, method names, and public members.
- Use camelCase for private fields and local variables.
- Prefix interface names with "I" (e.g., IUserService).
## Code Formatting
**Copilot agents: You MUST run the `format` task after making code changes to ensure consistency.**
- Use CSharpier for code formatting to ensure consistent style across the project
- CSharpier is configured as a local tool in `.config/dotnet-tools.json`
### Commands
1. **Restore tools** (first time only):
```bash
dotnet tool restore
```
2. **Check if files are formatted correctly** (doesn't modify files):
```bash
dotnet csharpier check .
```
- Exit code 0: All files are properly formatted
- Exit code 1: Some files need formatting (will show which files and differences)
3. **Format files** (modifies files):
```bash
dotnet csharpier format .
```
- Formats all files in the project to match CSharpier style
- Run from project root directory
4. **Configure your IDE** to format on save using CSharpier for the best experience
### Additional Notes
- The project also uses `.editorconfig` for editor settings (indentation, encoding, etc.)
- Let CSharpier handle code style while `.editorconfig` handles editor behavior
- Always run `dotnet csharpier check .` before committing to verify formatting
## Project Setup and Structure
- The project targets multiple frameworks: .NET Framework 4.62, .NET Standard 2.1, .NET 6.0, and .NET 8.0
- Main library is in `src/SharpCompress/`
- Tests are in `tests/SharpCompress.Test/`
- Performance tests are in `tests/SharpCompress.Performance/`
- Test archives are in `tests/TestArchives/`
- Build project is in `build/`
- Use `dotnet build` to build the solution
- Use `dotnet test` to run tests
- Solution file: `SharpCompress.sln`
### Directory Structure
```
src/SharpCompress/
├── Archives/ # IArchive implementations (Zip, Tar, Rar, 7Zip, GZip)
├── Readers/ # IReader implementations (forward-only)
├── Writers/ # IWriter implementations (forward-only)
├── Compressors/ # Low-level compression streams (BZip2, Deflate, LZMA, etc.)
├── Factories/ # Format detection and factory pattern
├── Common/ # Shared types (ArchiveType, Entry, Options)
├── Crypto/ # Encryption implementations
└── IO/ # Stream utilities and wrappers
tests/SharpCompress.Test/
├── Zip/, Tar/, Rar/, SevenZip/, GZip/, BZip2/ # Format-specific tests
├── TestBase.cs # Base test class with helper methods
└── TestArchives/ # Test data (not checked into main test project)
```
### Factory Pattern
All format types implement factory interfaces (`IArchiveFactory`, `IReaderFactory`, `IWriterFactory`) for auto-detection:
- `ReaderFactory.Open()` - Auto-detects format by probing stream
- `WriterFactory.Open()` - Creates writer for specified `ArchiveType`
- Factories located in: `src/SharpCompress/Factories/`
## Nullable Reference Types
- Declare variables non-nullable, and check for `null` at entry points.
- Always use `is null` or `is not null` instead of `== null` or `!= null`.
- Trust the C# null annotations and don't add null checks when the type system says a value cannot be null.
## SharpCompress-Specific Guidelines
### Supported Formats
SharpCompress supports multiple archive and compression formats:
- **Archive Formats**: Zip, Tar, 7Zip, Rar (read-only)
- **Compression**: DEFLATE, BZip2, LZMA/LZMA2, PPMd, ZStandard (decompress only), Deflate64 (decompress only)
- **Combined Formats**: Tar.GZip, Tar.BZip2, Tar.LZip, Tar.XZ, Tar.ZStandard
- See [docs/FORMATS.md](docs/FORMATS.md) for complete format support matrix
### Stream Handling Rules
- **Disposal**: As of version 0.21, SharpCompress closes wrapped streams by default
- Use `ReaderOptions` or `WriterOptions` with `LeaveStreamOpen = true` to control stream disposal
- Use `NonDisposingStream` wrapper when working with compression streams directly to prevent disposal
- Always dispose of readers, writers, and archives in `using` blocks
- For forward-only operations, use Reader/Writer APIs; for random access, use Archive APIs
### Async/Await Patterns
- All I/O operations support async/await with `CancellationToken`
- Async methods follow the naming convention: `MethodNameAsync`
- Key async methods:
- `WriteEntryToAsync` - Extract entry asynchronously
- `WriteAllToDirectoryAsync` - Extract all entries asynchronously
- `WriteAsync` - Write entry asynchronously
- `WriteAllAsync` - Write directory asynchronously
- `OpenEntryStreamAsync` - Open entry stream asynchronously
- Always provide `CancellationToken` parameter in async methods
### Archive APIs vs Reader/Writer APIs
- **Archive API**: Use for random access with seekable streams (e.g., `ZipArchive`, `TarArchive`)
- **Reader API**: Use for forward-only reading on non-seekable streams (e.g., `ZipReader`, `TarReader`)
- **Writer API**: Use for forward-only writing on streams (e.g., `ZipWriter`, `TarWriter`)
- 7Zip only supports Archive API due to format limitations
### Tar-Specific Considerations
- Tar format requires file size in the header
- If no size is specified to TarWriter and the stream is not seekable, an exception will be thrown
- Tar combined with compression (GZip, BZip2, LZip, XZ) is supported
### Zip-Specific Considerations
- Supports Zip64 for large files (seekable streams only)
- Supports PKWare and WinZip AES encryption
- Multiple compression methods: None, Shrink, Reduce, Implode, DEFLATE, Deflate64, BZip2, LZMA, PPMd
- Encrypted LZMA is not supported
### Performance Considerations
- For large files, use Reader/Writer APIs with non-seekable streams to avoid loading entire file in memory
- Leverage async I/O for better scalability
- Consider compression level trade-offs (speed vs. size)
- Use appropriate buffer sizes for stream operations
## Testing
- Always include test cases for critical paths of the application.
- Test with multiple archive formats when making changes to core functionality.
- Include tests for both Archive and Reader/Writer APIs when applicable.
- Test async operations with cancellation tokens.
- Do not emit "Act", "Arrange" or "Assert" comments.
- Copy existing style in nearby files for test method names and capitalization.
- Use test archives from `tests/TestArchives` directory for consistency.
- Test stream disposal and `LeaveStreamOpen` behavior.
- Test edge cases: empty archives, large files, corrupted archives, encrypted archives.
### Test Organization
- Base class: `TestBase` - Provides `TEST_ARCHIVES_PATH`, `SCRATCH_FILES_PATH`, temp directory management
- Framework: xUnit with AwesomeAssertions
- Test archives: `tests/TestArchives/` - Use existing archives, don't create new ones unnecessarily
- Match naming style of nearby test files
## Common Pitfalls
1. **Don't mix Archive and Reader APIs** - Archive needs seekable stream, Reader doesn't
2. **Solid archives (Rar, 7Zip)** - Use `ExtractAllEntries()` for best performance, not individual entry extraction
3. **Stream disposal** - Always set `LeaveStreamOpen` explicitly when needed (default is to close)
4. **Tar + non-seekable stream** - Must provide file size or it will throw
6. **Format detection** - Use `ReaderFactory.Open()` for auto-detection, test with actual archive files

View File

@@ -1,17 +0,0 @@
<Project>
<PropertyGroup>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<AnalysisMode>Recommended</AnalysisMode>
<WarningsAsErrors>true</WarningsAsErrors>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<CodeAnalysisTreatWarningsAsErrors>true</CodeAnalysisTreatWarningsAsErrors>
<EnforceCodeStyleInBuild>true</EnforceCodeStyleInBuild>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
<RunAnalyzersDuringLiveAnalysis>False</RunAnalyzersDuringLiveAnalysis>
<RunAnalyzersDuringBuild>False</RunAnalyzersDuringBuild>
<ManagePackageVersionsCentrally>true</ManagePackageVersionsCentrally>
<RestorePackagesWithLockFile>true</RestorePackagesWithLockFile>
<CentralPackageTransitivePinningEnabled>true</CentralPackageTransitivePinningEnabled>
</PropertyGroup>
</Project>

View File

@@ -1,25 +0,0 @@
<Project>
<ItemGroup>
<PackageVersion Include="BenchmarkDotNet" Version="0.14.0" />
<PackageVersion Include="Bullseye" Version="6.1.0" />
<PackageVersion Include="AwesomeAssertions" Version="9.3.0" />
<PackageVersion Include="Glob" Version="1.1.9" />
<PackageVersion Include="JetBrains.Profiler.SelfApi" Version="2.5.15" />
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="10.0.0" />
<PackageVersion Include="Microsoft.NET.ILLink.Task" Version="10.0.0" />
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="18.0.1" />
<PackageVersion Include="Mono.Posix.NETStandard" Version="1.0.0" />
<PackageVersion Include="SimpleExec" Version="13.0.0" />
<PackageVersion Include="System.Text.Encoding.CodePages" Version="10.0.0" />
<PackageVersion Include="System.Buffers" Version="4.6.1" />
<PackageVersion Include="System.Memory" Version="4.6.3" />
<PackageVersion Include="xunit" Version="2.9.3" />
<PackageVersion Include="xunit.runner.visualstudio" Version="3.1.5" />
<GlobalPackageReference Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
<GlobalPackageReference Include="Microsoft.NETFramework.ReferenceAssemblies" Version="1.0.3" />
<GlobalPackageReference
Include="Microsoft.VisualStudio.Threading.Analyzers"
Version="17.14.15"
/>
</ItemGroup>
</Project>

View File

@@ -10,43 +10,23 @@
| Archive Format | Compression Format(s) | Compress/Decompress | Archive API | Reader API | Writer API |
| ---------------------- | ------------------------------------------------- | ------------------- | --------------- | ---------- | ------------- |
| Ace | None | Decompress | N/A | AceReader | N/A |
| Arc | None, Packed, Squeezed, Crunched | Decompress | N/A | ArcReader | N/A |
| Arj | None | Decompress | N/A | ArjReader | N/A |
| Rar | Rar | Decompress | RarArchive | RarReader | N/A |
| Zip (2) | None, Shrink, Reduce, Implode, DEFLATE, Deflate64, BZip2, LZMA/LZMA2, PPMd | Both | ZipArchive | ZipReader | ZipWriter |
| Rar | Rar | Decompress (1) | RarArchive | RarReader | N/A |
| Zip (2) | None, DEFLATE, Deflate64, BZip2, LZMA/LZMA2, PPMd | Both | ZipArchive | ZipReader | ZipWriter |
| Tar | None | Both | TarArchive | TarReader | TarWriter (3) |
| Tar.GZip | DEFLATE | Both | TarArchive | TarReader | TarWriter (3) |
| Tar.BZip2 | BZip2 | Both | TarArchive | TarReader | TarWriter (3) |
| Tar.Zstandard | ZStandard | Decompress | TarArchive | TarReader | N/A |
| Tar.LZip | LZMA | Both | TarArchive | TarReader | TarWriter (3) |
| Tar.XZ | LZMA2 | Decompress | TarArchive | TarReader | TarWriter (3) |
| GZip (single file) | DEFLATE | Both | GZipArchive | GZipReader | GZipWriter |
| 7Zip (4) | LZMA, LZMA2, BZip2, PPMd, BCJ, BCJ2, Deflate | Decompress | SevenZipArchive | N/A | N/A |
| LZip (single file) (5) | LZip (LZMA) | Both | LZipArchive | LZipReader | LZipWriter |
1. SOLID Rars are only supported in the RarReader API.
2. Zip format supports pkware and WinzipAES encryption. However, encrypted LZMA is not supported. Zip64 reading/writing is supported but only with seekable streams as the Zip spec doesn't support Zip64 data in post data descriptors. Deflate64 is only supported for reading. See [Zip Format Notes](#zip-format-notes) for details on multi-volume archives and streaming behavior.
2. Zip format supports pkware and WinzipAES encryption. However, encrypted LZMA is not supported. Zip64 reading/writing is supported but only with seekable streams as the Zip spec doesn't support Zip64 data in post data descriptors. Deflate64 is only supported for reading.
3. The Tar format requires a file size in the header. If no size is specified to the TarWriter and the stream is not seekable, then an exception will be thrown.
4. The 7Zip format doesn't allow for reading as a forward-only stream so 7Zip is only supported through the Archive API. See [7Zip Format Notes](#7zip-format-notes) for details on async extraction behavior.
4. The 7Zip format doesn't allow for reading as a forward-only stream so 7Zip is only supported through the Archive API
5. LZip has no support for extra data like the file name or timestamp. There is a default filename used when looking at the entry Key on the archive.
### Zip Format Notes
- Multi-volume/split ZIP archives require ZipArchive (seekable streams) as ZipReader cannot seek across volume files.
- ZipReader processes entries from LocalEntry headers (which include directory entries ending with `/`) and intentionally skips DirectoryEntry headers from the central directory, as they are redundant in streaming mode - all entry data comes from LocalEntry headers which ZipReader has already processed.
### 7Zip Format Notes
- **Async Extraction Performance**: When using async extraction methods (e.g., `ExtractAllEntries()` with `MoveToNextEntryAsync()`), each file creates its own decompression stream to avoid state corruption in the LZMA decoder. This is less efficient than synchronous extraction, which can reuse a single decompression stream for multiple files in the same folder.
**Performance Impact**: For archives with many small files in the same compression folder, async extraction will be slower than synchronous extraction because it must:
1. Create a new LZMA decoder for each file
2. Skip through the decompressed data to reach each file's starting position
**Recommendation**: For best performance with 7Zip archives, use synchronous extraction methods (`MoveToNextEntry()` and `WriteEntryToDirectory()`) when possible. Use async methods only when you need to avoid blocking the thread (e.g., in UI applications or async-only contexts).
**Technical Details**: 7Zip archives group files into "folders" (compression units), where all files in a folder share one continuous LZMA-compressed stream. The LZMA decoder maintains internal state (dictionary window, decoder positions) that assumes sequential, non-interruptible processing. Async operations can yield control during awaits, which would corrupt this shared state. To avoid this, async extraction creates a fresh decoder stream for each file.
## Compression Streams
For those who want to directly compress/decompress bits. The single file formats are represented here as well. However, BZip2, LZip and XZ have no metadata (GZip has a little) so using them without something like a Tar file makes little sense.
@@ -62,7 +42,6 @@ For those who want to directly compress/decompress bits. The single file formats
| ADCStream | Decompress |
| LZipStream | Both |
| XZStream | Decompress |
| ZStandardStream | Decompress |
## Archive Formats vs Compression

View File

@@ -1,9 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<configuration>
<packageSourceMapping>
<!-- key value for <packageSource> should match key values from <packageSources> element -->
<packageSource key="nuget.org">
<package pattern="*" />
</packageSource>
</packageSourceMapping>
</configuration>

162
README.md
View File

@@ -1,20 +1,17 @@
# SharpCompress
SharpCompress is a compression library in pure C# for .NET Framework 4.8, .NET 8.0 and .NET 10.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip, unzstd, unarc and unarj with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
SharpCompress is a compression library in pure C# for .NET Standard 2.0, 2.1, .NET Core 3.1 and .NET 5.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).
**NEW:** All I/O operations now support async/await for improved performance and scalability. See the [USAGE.md](docs/USAGE.md#async-examples) for examples.
GitHub Actions Build -
[![SharpCompress](https://github.com/adamhathcock/sharpcompress/actions/workflows/dotnetcore.yml/badge.svg)](https://github.com/adamhathcock/sharpcompress/actions/workflows/dotnetcore.yml)
[![Static Badge](https://img.shields.io/badge/API%20Docs-DNDocs-190088?logo=readme&logoColor=white)](https://dndocs.com/d/sharpcompress/api/index.html)
[![GitHubActions](https://github.com/adamhathcock/sharpcompress/workflows/SharpCompress/badge.svg)](https://circleci.com/gh/adamhathcock/sharpcompress)
## Need Help?
Post Issues on Github!
Check the [Supported Formats](docs/FORMATS.md) and [Basic Usage.](docs/USAGE.md)
Check the [Supported Formats](FORMATS.md) and [Basic Usage.](USAGE.md)
## Recommended Formats
@@ -22,12 +19,10 @@ In general, I recommend GZip (Deflate)/BZip2 (BZip)/LZip (LZMA) as the simplicit
Zip is okay, but it's a very hap-hazard format and the variation in headers and implementations makes it hard to get correct. Uses Deflate by default but supports a lot of compression methods.
RAR is not recommended as it's a proprietary format and the compression is closed source. Use Tar/LZip for LZMA
RAR is not recommended as it's a propriatory format and the compression is closed source. Use Tar/LZip for LZMA
7Zip and XZ both are overly complicated. 7Zip does not support streamable formats. XZ has known holes explained here: (http://www.nongnu.org/lzip/xz_inadequate.html) Use Tar/LZip for LZMA compression instead.
ZStandard is an efficient format that works well for streaming with a flexible compression level to tweak the speed/performance trade off you are looking for. We currently only implement decompression for ZStandard but as we leverage the [ZstdSharp](https://github.com/oleg-st/ZstdSharp) library one could likely add compression support without much trouble (PRs are welcome!).
## A Simple Request
Hi everyone. I hope you're using SharpCompress and finding it useful. Please give me feedback on what you'd like to see changed especially as far as usability goes. New feature suggestions are always welcome as well. I would also like to know what projects SharpCompress is being used in. I like seeing how it is used to give me ideas for future versions. Thanks!
@@ -38,12 +33,155 @@ Please do not email me directly to ask for help. If you think there is a real is
I'm always looking for help or ideas. Please submit code or email with ideas. Unfortunately, just letting me know you'd like to help is not enough because I really have no overall plan of what needs to be done. I'll definitely accept code submissions and add you as a member of the project!
## Notes
## TODOs (always lots)
* RAR 5 decryption support
* 7Zip writing
* Zip64 (Need writing and extend Reading)
* Multi-volume Zip support.
## Version Log
### Version 0.18
* [Now on Github releases](https://github.com/adamhathcock/sharpcompress/releases/tag/0.18)
### Version 0.17.1
* Fix - [Bug Fix for .NET Core on Windows](https://github.com/adamhathcock/sharpcompress/pull/257)
### Version 0.17.0
* New - Full LZip support! Can read and write LZip files and Tars inside LZip files. [Make LZip a first class citizen. #241](https://github.com/adamhathcock/sharpcompress/issues/241)
* New - XZ read support! Can read XZ files and Tars inside XZ files. [XZ in SharpCompress #91](https://github.com/adamhathcock/sharpcompress/issues/94)
* Fix - [Regression - zip file writing on seekable streams always assumed stream start was 0. Introduced with Zip64 writing.](https://github.com/adamhathcock/sharpcompress/issues/244)
* Fix - [Zip files with post-data descriptors can be properly skipped via decompression](https://github.com/adamhathcock/sharpcompress/issues/162)
### Version 0.16.2
* Fix [.NET 3.5 should support files and cryptography (was a regression from 0.16.0)](https://github.com/adamhathcock/sharpcompress/pull/251)
* Fix [Zip per entry compression customization wrote the wrong method into the zip archive](https://github.com/adamhathcock/sharpcompress/pull/249)
### Version 0.16.1
* Fix [Preserve compression method when getting a compressed stream](https://github.com/adamhathcock/sharpcompress/pull/235)
* Fix [RAR entry key normalization fix](https://github.com/adamhathcock/sharpcompress/issues/201)
### Version 0.16.0
* Breaking - [Progress Event Tracking rethink](https://github.com/adamhathcock/sharpcompress/pull/226)
* Update to VS2017 - [VS2017](https://github.com/adamhathcock/sharpcompress/pull/231) - Framework targets have been changed.
* New - [Add Zip64 writing](https://github.com/adamhathcock/sharpcompress/pull/211)
* [Fix invalid/mismatching Zip version flags.](https://github.com/adamhathcock/sharpcompress/issues/164) - This allows nuget/System.IO.Packaging to read zip files generated by SharpCompress
* [Fix 7Zip directory hiding](https://github.com/adamhathcock/sharpcompress/pull/215/files)
* [Verify RAR CRC headers](https://github.com/adamhathcock/sharpcompress/pull/220)
### Version 0.15.2
* [Fix invalid headers](https://github.com/adamhathcock/sharpcompress/pull/210) - fixes an issue creating large-ish zip archives that was introduced with zip64 reading.
### Version 0.15.1
* [Zip64 extending information and ZipReader](https://github.com/adamhathcock/sharpcompress/pull/206)
### Version 0.15.0
* [Add zip64 support for ZipArchive extraction](https://github.com/adamhathcock/sharpcompress/pull/205)
### Version 0.14.1
* [.NET Assemblies aren't strong named](https://github.com/adamhathcock/sharpcompress/issues/158)
* [Pkware encryption for Zip files didn't allow for multiple reads of an entry](https://github.com/adamhathcock/sharpcompress/issues/197)
* [GZip Entry couldn't be read multiple times](https://github.com/adamhathcock/sharpcompress/issues/198)
### Version 0.14.0
* [Support for LZip reading in for Tars](https://github.com/adamhathcock/sharpcompress/pull/191)
### Version 0.13.1
* [Fix null password on ReaderFactory. Fix null options on SevenZipArchive](https://github.com/adamhathcock/sharpcompress/pull/188)
* [Make PpmdProperties lazy to avoid unnecessary allocations.](https://github.com/adamhathcock/sharpcompress/pull/185)
### Version 0.13.0
* Breaking change: Big refactor of Options on API.
* 7Zip supports Deflate
### Version 0.12.4
* Forward only zip issue fix https://github.com/adamhathcock/sharpcompress/issues/160
* Try to fix frameworks again by copying targets from JSON.NET
### Version 0.12.3
* 7Zip fixes https://github.com/adamhathcock/sharpcompress/issues/73
* Maybe all profiles will work with project.json now
### Version 0.12.2
* Support Profile 259 again
### Version 0.12.1
* Support Silverlight 5
### Version 0.12.0
* .NET Core RTM!
* Bug fix for Tar long paths
### Version 0.11.6
* Bug fix for global header in Tar
* Writers now have a leaveOpen `bool` overload. They won't close streams if not-requested to.
### Version 0.11.5
* Bug fix in Skip method
### Version 0.11.4
* SharpCompress is now endian neutral (matters for Mono platforms)
* Fix for Inflate (need to change implementation)
* Fixes for RAR detection
### Version 0.11.1
* Added Cancel on IReader
* Removed .NET 2.0 support and LinqBridge dependency
### Version 0.11
* Been over a year, contains mainly fixes from contributors!
* Possible breaking change: ArchiveEncoding is UTF8 by default now.
* TAR supports writing long names using longlink
* RAR Protect Header added
### Version 0.10.3
* Finally fixed Disposal issue when creating a new archive with the Archive API
### Version 0.10.2
* Fixed Rar Header reading for invalid extended time headers.
* Windows Store assembly is now strong named
* Known issues with Long Tar names being worked on
* Updated to VS2013
* Portable targets SL5 and Windows Phone 8 (up from SL4 and WP7)
### Version 0.10.1
* Fixed 7Zip extraction performance problem
### Version 0.10:
* Added support for RAR Decryption (thanks to https://github.com/hrasyid)
* Embedded some BouncyCastle crypto classes to allow RAR Decryption and Winzip AES Decryption in Portable and Windows Store DLLs
* Built in Release (I think)
XZ implementation based on: https://github.com/sambott/XZ.NET by @sambott
XZ BCJ filters support contributed by Louis-Michel Bergeron, on behalf of aDolus Technology Inc. - 2022
7Zip implementation based on: https://code.google.com/p/managed-lzma/
LICENSE

View File

@@ -3,6 +3,8 @@ Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio 15
VisualStudioVersion = 15.0.26430.6
MinimumVisualStudioVersion = 10.0.40219.1
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{F18F1765-4A02-42FD-9BEF-F0E2FCBD9D17}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{3C5BE746-03E5-4895-9988-0B57F162F86C}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "tests", "tests", "{0F0901FF-E8D9-426A-B5A2-17C7F47C1529}"
@@ -13,21 +15,6 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SharpCompress.Test", "tests
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "build", "build\build.csproj", "{D4D613CB-5E94-47FB-85BE-B8423D20C545}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Config", "Config", "{CDB42573-7D22-4490-BA12-1B7FB99CE7FB}"
ProjectSection(SolutionItems) = preProject
Directory.Build.props = Directory.Build.props
global.json = global.json
.editorconfig = .editorconfig
.gitignore = .gitignore
Directory.Packages.props = Directory.Packages.props
NuGet.config = NuGet.config
.github\workflows\nuget-release.yml = .github\workflows\nuget-release.yml
README.md = README.md
AGENTS.md = AGENTS.md
EndProjectSection
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SharpCompress.Performance", "tests\SharpCompress.Performance\SharpCompress.Performance.csproj", "{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -46,10 +33,6 @@ Global
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Debug|Any CPU.Build.0 = Debug|Any CPU
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Release|Any CPU.ActiveCfg = Release|Any CPU
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Release|Any CPU.Build.0 = Release|Any CPU
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}.Debug|Any CPU.Build.0 = Debug|Any CPU
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}.Release|Any CPU.ActiveCfg = Release|Any CPU
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
@@ -57,6 +40,5 @@ Global
GlobalSection(NestedProjects) = preSolution
{FD19DDD8-72B2-4024-8665-0D1F7A2AA998} = {3C5BE746-03E5-4895-9988-0B57F162F86C}
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F} = {0F0901FF-E8D9-426A-B5A2-17C7F47C1529}
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17} = {0F0901FF-E8D9-426A-B5A2-17C7F47C1529}
EndGlobalSection
EndGlobal

View File

@@ -15,17 +15,17 @@
<s:String x:Key="/Default/CodeStyle/CodeCleanup/SilentCleanupProfile/@EntryValue">Basic Clean</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/APPLY_ON_COMPLETION/@EntryValue">True</s:Boolean>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/ARGUMENTS_NAMED/@EntryValue">Positional</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/ARGUMENTS_NAMED/@EntryValue">Named</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_FOR/@EntryValue">Required</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_FOREACH/@EntryValue">Required</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_IFELSE/@EntryValue">Required</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_WHILE/@EntryValue">Required</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_FIRST_ARG_BY_PAREN/@EntryValue">False</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_FIRST_ARG_BY_PAREN/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_LINQ_QUERY/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_ARGUMENT/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_ARRAY_AND_OBJECT_INITIALIZER/@EntryValue">False</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_ARRAY_AND_OBJECT_INITIALIZER/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_CALLS_CHAIN/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_EXPRESSION/@EntryValue">False</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_EXPRESSION/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_EXTENDS_LIST/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_FOR_STMT/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_PARAMETER/@EntryValue">True</s:Boolean>
@@ -42,7 +42,7 @@
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_IFELSE_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_USING_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_WHILE_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/INDENT_ANONYMOUS_METHOD_BLOCK/@EntryValue">False</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/INDENT_ANONYMOUS_METHOD_BLOCK/@EntryValue">True</s:Boolean>
<s:Int64 x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/KEEP_BLANK_LINES_IN_CODE/@EntryValue">1</s:Int64>
<s:Int64 x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/KEEP_BLANK_LINES_IN_DECLARATIONS/@EntryValue">1</s:Int64>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_ACCESSOR_ATTRIBUTE_ON_SAME_LINE_EX/@EntryValue">NEVER</s:String>
@@ -50,12 +50,12 @@
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_CONSTRUCTOR_INITIALIZER_ON_SAME_LINE/@EntryValue">False</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_FIELD_ATTRIBUTE_ON_SAME_LINE/@EntryValue">False</s:Boolean>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_FIELD_ATTRIBUTE_ON_SAME_LINE_EX/@EntryValue">NEVER</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_ACCESSORHOLDER_ON_SINGLE_LINE/@EntryValue">False</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_ACCESSORHOLDER_ON_SINGLE_LINE/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_ACCESSOR_ATTRIBUTE_ON_SAME_LINE/@EntryValue">False</s:Boolean>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_EMBEDDED_STATEMENT_ON_SAME_LINE/@EntryValue">NEVER</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_INITIALIZER_ON_SINGLE_LINE/@EntryValue">False</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_INITIALIZER_ON_SINGLE_LINE/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_WHILE_ON_NEW_LINE/@EntryValue">False</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_WHILE_ON_NEW_LINE/@EntryValue">True</s:Boolean>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SIMPLE_EMBEDDED_STATEMENT_STYLE/@EntryValue">LINE_BREAK</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SPACE_AFTER_TYPECAST_PARENTHESES/@EntryValue">False</s:Boolean>
@@ -67,22 +67,18 @@
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SPACE_BEFORE_TYPEOF_PARENTHESES/@EntryValue">False</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/STICK_COMMENT/@EntryValue">False</s:Boolean>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_ARGUMENTS_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_ARRAY_INITIALIZER_STYLE/@EntryValue">CHOP_ALWAYS</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_ARRAY_INITIALIZER_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_EXTENDS_LIST_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_LINES/@EntryValue">False</s:Boolean>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_PARAMETERS_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForBuiltInTypes/@EntryValue">UseVar</s:String>
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForOtherTypes/@EntryValue">UseVar</s:String>
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForSimpleTypes/@EntryValue">UseVar</s:String>
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForBuiltInTypes/@EntryValue">UseVarWhenEvident</s:String>
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForOtherTypes/@EntryValue">UseVarWhenEvident</s:String>
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForSimpleTypes/@EntryValue">UseVarWhenEvident</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateInstanceFields/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="_" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateStaticFields/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateStaticReadonly/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=StaticReadonly/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=15b5b1f1_002D457c_002D4ca6_002Db278_002D5615aedc07d3/@EntryIndexedValue">&lt;Policy&gt;&lt;Descriptor Staticness="Static" AccessRightKinds="Private" Description="Static readonly fields (private)"&gt;&lt;ElementKinds&gt;&lt;Kind Name="READONLY_FIELD" /&gt;&lt;/ElementKinds&gt;&lt;/Descriptor&gt;&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;&lt;/Policy&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=4a98fdf6_002D7d98_002D4f5a_002Dafeb_002Dea44ad98c70c/@EntryIndexedValue">&lt;Policy&gt;&lt;Descriptor Staticness="Instance" AccessRightKinds="Private" Description="Instance fields (private)"&gt;&lt;ElementKinds&gt;&lt;Kind Name="FIELD" /&gt;&lt;Kind Name="READONLY_FIELD" /&gt;&lt;/ElementKinds&gt;&lt;/Descriptor&gt;&lt;Policy Inspect="True" Prefix="_" Suffix="" Style="aaBb" /&gt;&lt;/Policy&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=c873eafb_002Dd57f_002D481d_002D8c93_002D77f6863c2f88/@EntryIndexedValue">&lt;Policy&gt;&lt;Descriptor Staticness="Static" AccessRightKinds="Protected, ProtectedInternal, Internal, Public, PrivateProtected" Description="Static readonly fields (not private)"&gt;&lt;ElementKinds&gt;&lt;Kind Name="READONLY_FIELD" /&gt;&lt;/ElementKinds&gt;&lt;/Descriptor&gt;&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;&lt;/Policy&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=f9fce829_002De6f4_002D4cb2_002D80f1_002D5497c44f51df/@EntryIndexedValue">&lt;Policy&gt;&lt;Descriptor Staticness="Static" AccessRightKinds="Private" Description="Static fields (private)"&gt;&lt;ElementKinds&gt;&lt;Kind Name="FIELD" /&gt;&lt;/ElementKinds&gt;&lt;/Descriptor&gt;&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;&lt;/Policy&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FCONSTANT/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FFUNCTION/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FVARIABLE/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
@@ -122,7 +118,6 @@
<s:String x:Key="/Default/CodeStyle/Naming/XamlNaming/UserRules/=NAMESPACE_005FALIAS/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/XamlNaming/UserRules/=XAML_005FFIELD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/XamlNaming/UserRules/=XAML_005FRESOURCE/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CustomTools/CustomToolsData/@EntryValue"></s:String>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpAttributeForSingleLineMethodUpgrade/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpKeepExistingMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpPlaceEmbeddedOnSameLineMigration/@EntryIndexedValue">True</s:Boolean>
@@ -132,7 +127,6 @@
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002ECSharpPlaceAttributeOnSameLineMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateBlankLinesAroundFieldToBlankLinesAroundProperty/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateThisQualifierSettings/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EPredefinedNamingRulesToUserRulesUpgrade/@EntryIndexedValue">True</s:Boolean>
<s:String x:Key="/Default/Environment/UnitTesting/UnitTestSessionStore/Sessions/=6af8f80e_002D9fdd_002D4223_002D8e02_002D473db916f9b2/@EntryIndexedValue">&lt;SessionState ContinuousTestingIsOn="False" ContinuousTestingMode="0" FrameworkVersion="{x:Null}" IsLocked="False" Name="All tests from Solution" PlatformMonoPreference="{x:Null}" PlatformType="{x:Null}" xmlns="urn:schemas-jetbrains-com:jetbrains-ut-session" xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"&gt;
&lt;Solution /&gt;
&lt;/SessionState&gt;</s:String></wpf:ResourceDictionary>

146
USAGE.md Normal file
View File

@@ -0,0 +1,146 @@
# SharpCompress Usage
## Stream Rules (changed with 0.21)
When dealing with Streams, the rule should be that you don't close a stream you didn't create. This, in effect, should mean you should always put a Stream in a using block to dispose it.
However, the .NET Framework often has classes that will dispose streams by default to make things "easy" like the following:
```C#
using (var reader = new StreamReader(File.Open("foo")))
{
...
}
```
In this example, reader should get disposed. However, stream rules should say the the `FileStream` created by `File.Open` should remain open. However, the .NET Framework closes it for you by default unless you override the constructor. In general, you should be writing Stream code like this:
```C#
using (var fileStream = File.Open("foo"))
using (var reader = new StreamReader(fileStream))
{
...
}
```
To deal with the "correct" rules as well as the expectations of users, I've decided to always close wrapped streams as of 0.21.
To be explicit though, consider always using the overloads that use `ReaderOptions` or `WriterOptions` and explicitly set `LeaveStreamOpen` the way you want.
If using Compression Stream classes directly and you don't want the wrapped stream to be closed. Use the `NonDisposingStream` as a wrapped to prevent the stream being disposed. The change in 0.21 simplified a lot even though the usage is a bit more convoluted.
## Samples
Also, look over the tests for more thorough [examples](https://github.com/adamhathcock/sharpcompress/tree/master/tests/SharpCompress.Test)
### Create Zip Archive from all files in a directory to a file
```C#
using (var archive = ZipArchive.Create())
{
archive.AddAllFromDirectory("D:\\temp");
archive.SaveTo("C:\\temp.zip", CompressionType.Deflate);
}
```
### Create Zip Archive from all files in a directory and save in memory
```C#
var memoryStream = new MemoryStream();
using (var archive = ZipArchive.Create())
{
archive.AddAllFromDirectory("D:\\temp");
archive.SaveTo(memoryStream, new WriterOptions(CompressionType.Deflate)
{
LeaveStreamOpen = true
});
}
//reset memoryStream to be usable now
memoryStream.Position = 0;
```
### Extract all files from a Rar file to a directory using RarArchive
```C#
using (var archive = RarArchive.Open("Test.rar"))
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory("D:\\temp", new ExtractionOptions()
{
ExtractFullPath = true,
Overwrite = true
});
}
}
```
### Use ReaderFactory to autodetect archive type and Open the entry stream
```C#
using (Stream stream = File.OpenRead("Tar.tar.bz2"))
using (var reader = ReaderFactory.Open(stream))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Console.WriteLine(reader.Entry.Key);
reader.WriteEntryToDirectory(@"C:\temp", new ExtractionOptions()
{
ExtractFullPath = true,
Overwrite = true
});
}
}
}
```
### Use ReaderFactory to autodetect archive type and Open the entry stream
```C#
using (Stream stream = File.OpenRead("Tar.tar.bz2"))
using (var reader = ReaderFactory.Open(stream))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
using (var entryStream = reader.OpenEntryStream())
{
entryStream.CopyTo(...);
}
}
}
}
```
### Use WriterFactory to write all files from a directory in a streaming manner.
```C#
using (Stream stream = File.OpenWrite("C:\\temp.tgz"))
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, new WriterOptions(CompressionType.GZip)
{
LeaveOpenStream = true
}))
{
writer.WriteAll("D:\\temp", "*", SearchOption.AllDirectories);
}
```
### Extract zip which has non-utf8 encoded filename(cp932)
```C#
var opts = new SharpCompress.Readers.ReaderOptions();
var encoding = Encoding.GetEncoding(932);
opts.ArchiveEncoding = new SharpCompress.Common.ArchiveEncoding();
opts.ArchiveEncoding.CustomDecoder = (data, x, y) =>
{
return encoding.GetString(data);
};
var tr = SharpCompress.Archives.Zip.ZipArchive.Open("test.zip", opts);
foreach(var entry in tr.Entries)
{
Console.WriteLine($"{entry.Key}");
}
```

View File

@@ -1,268 +1,83 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
using GlobExpressions;
using static Bullseye.Targets;
using static SimpleExec.Command;
const string Clean = "clean";
const string Restore = "restore";
const string Build = "build";
const string Test = "test";
const string Format = "format";
const string CheckFormat = "check-format";
const string Publish = "publish";
const string DetermineVersion = "determine-version";
const string UpdateVersion = "update-version";
const string PushToNuGet = "push-to-nuget";
Target(
Clean,
["**/bin", "**/obj"],
dir =>
{
IEnumerable<string> GetDirectories(string d)
{
return Glob.Directories(".", d);
}
void RemoveDirectory(string d)
{
if (Directory.Exists(d))
{
Console.WriteLine(d);
Directory.Delete(d, true);
}
}
foreach (var d in GetDirectories(dir))
{
RemoveDirectory(d);
}
}
);
Target(
Format,
() =>
{
Run("dotnet", "tool restore");
Run("dotnet", "csharpier format .");
}
);
Target(
CheckFormat,
() =>
{
Run("dotnet", "tool restore");
Run("dotnet", "csharpier check .");
}
);
Target(Restore, [CheckFormat], () => Run("dotnet", "restore"));
Target(
Build,
[Restore],
() =>
{
Run("dotnet", "build src/SharpCompress/SharpCompress.csproj -c Release --no-restore");
}
);
Target(
Test,
[Build],
["net10.0", "net48"],
framework =>
{
IEnumerable<string> GetFiles(string d)
{
return Glob.Files(".", d);
}
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows) && framework == "net48")
{
return;
}
foreach (var file in GetFiles("**/*.Test.csproj"))
{
Run("dotnet", $"test {file} -c Release -f {framework} --no-restore --verbosity=normal");
}
}
);
Target(
Publish,
[Test],
() =>
{
Run("dotnet", "pack src/SharpCompress/SharpCompress.csproj -c Release -o artifacts/");
}
);
Target(
DetermineVersion,
async () =>
{
var (version, isPrerelease) = await GetVersion();
Console.WriteLine($"VERSION={version}");
Console.WriteLine($"PRERELEASE={isPrerelease.ToString().ToLower()}");
// Write to environment file for GitHub Actions
var githubOutput = Environment.GetEnvironmentVariable("GITHUB_OUTPUT");
if (!string.IsNullOrEmpty(githubOutput))
{
File.AppendAllText(githubOutput, $"version={version}\n");
File.AppendAllText(githubOutput, $"prerelease={isPrerelease.ToString().ToLower()}\n");
}
}
);
Target(
UpdateVersion,
async () =>
{
var version = Environment.GetEnvironmentVariable("VERSION");
if (string.IsNullOrEmpty(version))
{
var (detectedVersion, _) = await GetVersion();
version = detectedVersion;
}
Console.WriteLine($"Updating project file with version: {version}");
var projectPath = "src/SharpCompress/SharpCompress.csproj";
var content = File.ReadAllText(projectPath);
// Get base version (without prerelease suffix)
var baseVersion = version.Split('-')[0];
// Update VersionPrefix
content = Regex.Replace(
content,
@"<VersionPrefix>[^<]*</VersionPrefix>",
$"<VersionPrefix>{version}</VersionPrefix>"
);
// Update AssemblyVersion
content = Regex.Replace(
content,
@"<AssemblyVersion>[^<]*</AssemblyVersion>",
$"<AssemblyVersion>{baseVersion}</AssemblyVersion>"
);
// Update FileVersion
content = Regex.Replace(
content,
@"<FileVersion>[^<]*</FileVersion>",
$"<FileVersion>{baseVersion}</FileVersion>"
);
File.WriteAllText(projectPath, content);
Console.WriteLine($"Updated VersionPrefix to: {version}");
Console.WriteLine($"Updated AssemblyVersion and FileVersion to: {baseVersion}");
}
);
Target(
PushToNuGet,
() =>
{
var apiKey = Environment.GetEnvironmentVariable("NUGET_API_KEY");
if (string.IsNullOrEmpty(apiKey))
{
Console.WriteLine(
"NUGET_API_KEY environment variable is not set. Skipping NuGet push."
);
return;
}
var packages = Directory.GetFiles("artifacts", "*.nupkg");
if (packages.Length == 0)
{
Console.WriteLine("No packages found in artifacts directory.");
return;
}
foreach (var package in packages)
{
Console.WriteLine($"Pushing {package} to NuGet.org");
try
{
// Note: API key is passed via command line argument which is standard practice for dotnet nuget push
// The key is already in an environment variable and not displayed in normal output
Run(
"dotnet",
$"nuget push \"{package}\" --api-key {apiKey} --source https://api.nuget.org/v3/index.json --skip-duplicate"
);
}
catch (Exception ex)
{
Console.WriteLine($"Failed to push {package}: {ex.Message}");
throw;
}
}
}
);
Target("default", [Publish], () => Console.WriteLine("Done!"));
await RunTargetsAndExitAsync(args);
static async Task<(string version, bool isPrerelease)> GetVersion()
class Program
{
// Check if current commit has a version tag
var currentTag = (await GetGitOutput("tag", "--points-at HEAD"))
.Split('\n', StringSplitOptions.RemoveEmptyEntries)
.FirstOrDefault(tag => Regex.IsMatch(tag.Trim(), @"^\d+\.\d+\.\d+$"));
private const string Clean = "clean";
private const string Format = "format";
private const string Build = "build";
private const string Test = "test";
private const string Publish = "publish";
if (!string.IsNullOrEmpty(currentTag))
static void Main(string[] args)
{
// Tagged release - use the tag as version
var version = currentTag.Trim();
Console.WriteLine($"Building tagged release version: {version}");
return (version, false);
Target(Clean,
ForEach("**/bin", "**/obj"),
dir =>
{
IEnumerable<string> GetDirectories(string d)
{
return Glob.Directories(".", d);
}
void RemoveDirectory(string d)
{
if (Directory.Exists(d))
{
Console.WriteLine(d);
Directory.Delete(d, true);
}
}
foreach (var d in GetDirectories(dir))
{
RemoveDirectory(d);
}
});
Target(Format, () =>
{
Run("dotnet", "tool restore");
Run("dotnet", "format --check");
});
Target(Build, DependsOn(Format),
framework =>
{
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows) && framework == "net46")
{
return;
}
Run("dotnet", "build src/SharpCompress/SharpCompress.csproj -c Release");
});
Target(Test, DependsOn(Build), ForEach("net5.0"),
framework =>
{
IEnumerable<string> GetFiles(string d)
{
return Glob.Files(".", d);
}
foreach (var file in GetFiles("**/*.Test.csproj"))
{
Run("dotnet", $"test {file} -c Release -f {framework}");
}
});
Target(Publish, DependsOn(Test),
() =>
{
Run("dotnet", "pack src/SharpCompress/SharpCompress.csproj -c Release -o artifacts/");
});
Target("default", DependsOn(Publish), () => Console.WriteLine("Done!"));
RunTargetsAndExit(args);
}
else
{
// Not tagged - create prerelease version based on next minor version
var allTags = (await GetGitOutput("tag", "--list"))
.Split('\n', StringSplitOptions.RemoveEmptyEntries)
.Where(tag => Regex.IsMatch(tag.Trim(), @"^\d+\.\d+\.\d+$"))
.Select(tag => tag.Trim())
.ToList();
var lastTag = allTags.OrderBy(tag => Version.Parse(tag)).LastOrDefault() ?? "0.0.0";
var lastVersion = Version.Parse(lastTag);
// Increment minor version for next release
var nextVersion = new Version(lastVersion.Major, lastVersion.Minor + 1, 0);
// Use commit count since the last version tag if available; otherwise, fall back to total count
var revListArgs = allTags.Any() ? $"--count {lastTag}..HEAD" : "--count HEAD";
var commitCount = (await GetGitOutput("rev-list", revListArgs)).Trim();
var version = $"{nextVersion}-beta.{commitCount}";
Console.WriteLine($"Building prerelease version: {version}");
return (version, true);
}
}
static async Task<string> GetGitOutput(string command, string args)
{
try
{
// Use SimpleExec's Read to execute git commands in a cross-platform way
var (output, _) = await ReadAsync("git", $"{command} {args}");
return output;
}
catch (Exception ex)
{
throw new Exception($"Git command failed: git {command} {args}\n{ex.Message}", ex);
}
}
}

View File

@@ -1,11 +1,14 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net10.0</TargetFramework>
<TargetFramework>net5.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Bullseye" />
<PackageReference Include="Glob" />
<PackageReference Include="SimpleExec" />
<PackageReference Include="Bullseye" Version="3.6.0" />
<PackageReference Include="Glob" Version="1.1.8" />
<PackageReference Include="SimpleExec" Version="6.4.0" />
</ItemGroup>
</Project>

View File

@@ -1,65 +0,0 @@
{
"version": 2,
"dependencies": {
"net10.0": {
"Bullseye": {
"type": "Direct",
"requested": "[6.1.0, )",
"resolved": "6.1.0",
"contentHash": "fltnAJDe0BEX5eymXGUq+il2rSUA0pHqUonNDRH2TrvRu8SkU17mYG0IVpdmG2ibtfhdjNrv4CuTCxHOwcozCA=="
},
"Glob": {
"type": "Direct",
"requested": "[1.1.9, )",
"resolved": "1.1.9",
"contentHash": "AfK5+ECWYTP7G3AAdnU8IfVj+QpGjrh9GC2mpdcJzCvtQ4pnerAGwHsxJ9D4/RnhDUz2DSzd951O/lQjQby2Sw=="
},
"Microsoft.NETFramework.ReferenceAssemblies": {
"type": "Direct",
"requested": "[1.0.3, )",
"resolved": "1.0.3",
"contentHash": "vUc9Npcs14QsyOD01tnv/m8sQUnGTGOw1BCmKcv77LBJY7OxhJ+zJF7UD/sCL3lYNFuqmQEVlkfS4Quif6FyYg==",
"dependencies": {
"Microsoft.NETFramework.ReferenceAssemblies.net461": "1.0.3"
}
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
"dependencies": {
"Microsoft.Build.Tasks.Git": "8.0.0",
"Microsoft.SourceLink.Common": "8.0.0"
}
},
"Microsoft.VisualStudio.Threading.Analyzers": {
"type": "Direct",
"requested": "[17.14.15, )",
"resolved": "17.14.15",
"contentHash": "mXQPJsbuUD2ydq4/ffd8h8tSOFCXec+2xJOVNCvXjuMOq/+5EKHq3D2m2MC2+nUaXeFMSt66VS/J4HdKBixgcw=="
},
"SimpleExec": {
"type": "Direct",
"requested": "[13.0.0, )",
"resolved": "13.0.0",
"contentHash": "zcCR1pupa1wI1VqBULRiQKeHKKZOuJhi/K+4V5oO+rHJZlaOD53ViFo1c3PavDoMAfSn/FAXGAWpPoF57rwhYg=="
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
},
"Microsoft.NETFramework.ReferenceAssemblies.net461": {
"type": "Transitive",
"resolved": "1.0.3",
"contentHash": "AmOJZwCqnOCNp6PPcf9joyogScWLtwy0M1WkqfEQ0M9nYwyDD7EX9ZjscKS5iYnyvteX7kzSKFCKt9I9dXA6mA=="
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
}
}
}
}

View File

@@ -1,512 +0,0 @@
# API Quick Reference
Quick reference for commonly used SharpCompress APIs.
## Factory Methods
### Opening Archives
```csharp
// Auto-detect format
using (var reader = ReaderFactory.Open(stream))
{
// Works with Zip, Tar, GZip, Rar, 7Zip, etc.
}
// Specific format - Archive API
using (var archive = ZipArchive.Open("file.zip"))
using (var archive = TarArchive.Open("file.tar"))
using (var archive = RarArchive.Open("file.rar"))
using (var archive = SevenZipArchive.Open("file.7z"))
using (var archive = GZipArchive.Open("file.gz"))
// With options
var options = new ReaderOptions
{
Password = "password",
LeaveStreamOpen = true,
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(932) }
};
using (var archive = ZipArchive.Open("encrypted.zip", options))
```
### Creating Archives
```csharp
// Writer Factory
using (var writer = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.Deflate))
{
// Write entries
}
// Specific writer
using (var archive = ZipArchive.Create())
using (var archive = TarArchive.Create())
using (var archive = GZipArchive.Create())
// With options
var options = new WriterOptions(CompressionType.Deflate)
{
CompressionLevel = 9,
LeaveStreamOpen = false
};
using (var archive = ZipArchive.Create())
{
archive.SaveTo("output.zip", options);
}
```
---
## Archive API Methods
### Reading/Extracting
```csharp
using (var archive = ZipArchive.Open("file.zip"))
{
// Get all entries
IEnumerable<IArchiveEntry> entries = archive.Entries;
// Find specific entry
var entry = archive.Entries.FirstOrDefault(e => e.Key == "file.txt");
// Extract all
archive.WriteToDirectory(@"C:\output", new ExtractionOptions
{
ExtractFullPath = true,
Overwrite = true
});
// Extract single entry
var entry = archive.Entries.First();
entry.WriteToFile(@"C:\output\file.txt");
entry.WriteToFile(@"C:\output\file.txt", new ExtractionOptions { Overwrite = true });
// Get entry stream
using (var stream = entry.OpenEntryStream())
{
stream.CopyTo(outputStream);
}
}
// Async extraction (requires IAsyncArchive)
using (var asyncArchive = await ZipArchive.OpenAsync("file.zip"))
{
await asyncArchive.WriteToDirectoryAsync(
@"C:\output",
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
cancellationToken: cancellationToken
);
}
using (var stream = await entry.OpenEntryStreamAsync(cancellationToken))
{
// ...
}
```
### Entry Properties
```csharp
foreach (var entry in archive.Entries)
{
string name = entry.Key; // Entry name/path
long size = entry.Size; // Uncompressed size
long compressedSize = entry.CompressedSize;
bool isDir = entry.IsDirectory;
DateTime? modTime = entry.LastModifiedTime;
CompressionType compression = entry.CompressionType;
}
```
### Creating Archives
```csharp
using (var archive = ZipArchive.Create())
{
// Add file
archive.AddEntry("file.txt", @"C:\source\file.txt");
// Add multiple files
archive.AddAllFromDirectory(@"C:\source");
archive.AddAllFromDirectory(@"C:\source", "*.txt"); // Pattern
// Save to file
archive.SaveTo("output.zip", CompressionType.Deflate);
// Save to stream
archive.SaveTo(outputStream, new WriterOptions(CompressionType.Deflate)
{
CompressionLevel = 9,
LeaveStreamOpen = true
});
}
```
---
## Reader API Methods
### Forward-Only Reading
```csharp
using (var stream = File.OpenRead("file.zip"))
using (var reader = ReaderFactory.Open(stream))
{
while (reader.MoveToNextEntry())
{
IArchiveEntry entry = reader.Entry;
if (!entry.IsDirectory)
{
// Extract entry
reader.WriteEntryToDirectory(@"C:\output");
reader.WriteEntryToFile(@"C:\output\file.txt");
// Or get stream
using (var entryStream = reader.OpenEntryStream())
{
entryStream.CopyTo(outputStream);
}
}
}
}
// Async variants (use OpenAsync to get IAsyncReader)
using (var stream = File.OpenRead("file.zip"))
using (var reader = await ReaderFactory.OpenAsync(stream))
{
while (await reader.MoveToNextEntryAsync())
{
await reader.WriteEntryToFileAsync(
@"C:\output\" + reader.Entry.Key,
cancellationToken: cancellationToken
);
}
// Async extraction of all entries
await reader.WriteAllToDirectoryAsync(
@"C:\output",
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
cancellationToken
);
}
```
---
## Writer API Methods
### Creating Archives (Streaming)
```csharp
using (var stream = File.Create("output.zip"))
using (var writer = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.Deflate))
{
// Write single file
using (var fileStream = File.OpenRead("source.txt"))
{
writer.Write("entry.txt", fileStream, DateTime.Now);
}
// Write directory
writer.WriteAll("C:\\source", "*", SearchOption.AllDirectories);
writer.WriteAll("C:\\source", "*.txt", SearchOption.TopDirectoryOnly);
// Async variants
using (var fileStream = File.OpenRead("source.txt"))
{
await writer.WriteAsync("entry.txt", fileStream, DateTime.Now, cancellationToken);
}
await writer.WriteAllAsync("C:\\source", "*", SearchOption.AllDirectories, cancellationToken);
}
```
---
## Common Options
### ReaderOptions
```csharp
var options = new ReaderOptions
{
Password = "password", // For encrypted archives
LeaveStreamOpen = true, // Don't close wrapped stream
ArchiveEncoding = new ArchiveEncoding // Custom character encoding
{
Default = Encoding.GetEncoding(932)
}
};
using (var archive = ZipArchive.Open("file.zip", options))
{
// ...
}
```
### WriterOptions
```csharp
var options = new WriterOptions(CompressionType.Deflate)
{
CompressionLevel = 9, // 0-9 for Deflate
LeaveStreamOpen = true, // Don't close stream
};
archive.SaveTo("output.zip", options);
```
### ExtractionOptions
```csharp
var options = new ExtractionOptions
{
ExtractFullPath = true, // Recreate directory structure
Overwrite = true, // Overwrite existing files
PreserveFileTime = true // Keep original timestamps
};
archive.WriteToDirectory(@"C:\output", options);
```
---
## Compression Types
### Available Compressions
```csharp
// For creating archives
CompressionType.None // No compression (store)
CompressionType.Deflate // DEFLATE (default for ZIP/GZip)
CompressionType.Deflate64 // Deflate64
CompressionType.BZip2 // BZip2
CompressionType.LZMA // LZMA (for 7Zip, LZip, XZ)
CompressionType.PPMd // PPMd (for ZIP)
CompressionType.Rar // RAR compression (read-only)
CompressionType.ZStandard // ZStandard
ArchiveType.Arc
ArchiveType.Arj
ArchiveType.Ace
// For Tar archives with compression
// Use WriterFactory to create compressed tar archives
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, CompressionType.GZip)) // Tar.GZip
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, CompressionType.BZip2)) // Tar.BZip2
```
### Archive Types
```csharp
ArchiveType.Zip
ArchiveType.Tar
ArchiveType.GZip
ArchiveType.BZip2
ArchiveType.Rar
ArchiveType.SevenZip
ArchiveType.XZ
ArchiveType.ZStandard
```
---
## Patterns & Examples
### Extract with Error Handling
```csharp
try
{
using (var archive = ZipArchive.Open("archive.zip",
new ReaderOptions { Password = "password" }))
{
archive.WriteToDirectory(@"C:\output", new ExtractionOptions
{
ExtractFullPath = true,
Overwrite = true
});
}
}
catch (PasswordRequiredException)
{
Console.WriteLine("Password required");
}
catch (InvalidArchiveException)
{
Console.WriteLine("Archive is invalid");
}
catch (SharpCompressException ex)
{
Console.WriteLine($"Error: {ex.Message}");
}
```
### Extract with Progress
```csharp
var progress = new Progress<ProgressReport>(report =>
{
Console.WriteLine($"Extracting {report.EntryPath}: {report.PercentComplete}%");
});
var options = new ReaderOptions { Progress = progress };
using (var archive = ZipArchive.Open("archive.zip", options))
{
archive.WriteToDirectory(@"C:\output");
}
```
### Async Extract with Cancellation
```csharp
var cts = new CancellationTokenSource();
cts.CancelAfter(TimeSpan.FromMinutes(5));
try
{
using (var archive = await ZipArchive.OpenAsync("archive.zip"))
{
await archive.WriteToDirectoryAsync(
@"C:\output",
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
cancellationToken: cts.Token
);
}
}
catch (OperationCanceledException)
{
Console.WriteLine("Extraction cancelled");
}
```
### Create with Custom Compression
```csharp
using (var archive = ZipArchive.Create())
{
archive.AddAllFromDirectory(@"D:\source");
// Fastest
archive.SaveTo("fast.zip", new WriterOptions(CompressionType.Deflate)
{
CompressionLevel = 1
});
// Balanced (default)
archive.SaveTo("normal.zip", CompressionType.Deflate);
// Best compression
archive.SaveTo("best.zip", new WriterOptions(CompressionType.Deflate)
{
CompressionLevel = 9
});
}
```
### Stream Processing (No File I/O)
```csharp
using (var outputStream = new MemoryStream())
using (var archive = ZipArchive.Create())
{
// Add content from memory
using (var contentStream = new MemoryStream(Encoding.UTF8.GetBytes("Hello")))
{
archive.AddEntry("file.txt", contentStream);
}
// Save to memory
archive.SaveTo(outputStream, CompressionType.Deflate);
// Get bytes
byte[] archiveBytes = outputStream.ToArray();
}
```
### Extract Specific Files
```csharp
using (var archive = ZipArchive.Open("archive.zip"))
{
var filesToExtract = new[] { "file1.txt", "file2.txt" };
foreach (var entry in archive.Entries.Where(e => filesToExtract.Contains(e.Key)))
{
entry.WriteToFile(@"C:\output\" + entry.Key);
}
}
```
### List Archive Contents
```csharp
using (var archive = ZipArchive.Open("archive.zip"))
{
foreach (var entry in archive.Entries)
{
if (entry.IsDirectory)
Console.WriteLine($"[DIR] {entry.Key}");
else
Console.WriteLine($"[FILE] {entry.Key} ({entry.Size} bytes)");
}
}
```
---
## Common Mistakes
### ✗ Wrong - Stream not disposed
```csharp
var stream = File.OpenRead("archive.zip");
var archive = ZipArchive.Open(stream);
archive.WriteToDirectory(@"C:\output");
// stream not disposed - leaked resource
```
### ✓ Correct - Using blocks
```csharp
using (var stream = File.OpenRead("archive.zip"))
using (var archive = ZipArchive.Open(stream))
{
archive.WriteToDirectory(@"C:\output");
}
// Both properly disposed
```
### ✗ Wrong - Mixing API styles
```csharp
// Loading entire archive then iterating
using (var archive = ZipArchive.Open("large.zip"))
{
var entries = archive.Entries.ToList(); // Loads all in memory
foreach (var e in entries)
{
e.WriteToFile(...); // Then extracts each
}
}
```
### ✓ Correct - Use Reader for large files
```csharp
// Streaming iteration
using (var stream = File.OpenRead("large.zip"))
using (var reader = ReaderFactory.Open(stream))
{
while (reader.MoveToNextEntry())
{
reader.WriteEntryToDirectory(@"C:\output");
}
}
```
---
## Related Documentation
- [USAGE.md](USAGE.md) - Complete code examples
- [FORMATS.md](FORMATS.md) - Supported formats
- [PERFORMANCE.md](PERFORMANCE.md) - API selection guide

View File

@@ -1,659 +0,0 @@
# SharpCompress Architecture Guide
This guide explains the internal architecture and design patterns of SharpCompress for contributors.
## Overview
SharpCompress is organized into three main layers:
```
┌─────────────────────────────────────────┐
│ User-Facing APIs (Top Layer) │
│ Archive, Reader, Writer Factories │
├─────────────────────────────────────────┤
│ Format-Specific Implementations │
│ ZipArchive, TarReader, GZipWriter, │
│ RarArchive, SevenZipArchive, etc. │
├─────────────────────────────────────────┤
│ Compression & Crypto (Bottom Layer) │
│ Deflate, LZMA, BZip2, AES, CRC32 │
└─────────────────────────────────────────┘
```
---
## Directory Structure
### `src/SharpCompress/`
#### `Archives/` - Archive Implementations
Contains `IArchive` implementations for seekable, random-access APIs.
**Key Files:**
- `AbstractArchive.cs` - Base class for all archives
- `IArchive.cs` - Archive interface definition
- `ArchiveFactory.cs` - Factory for opening archives
- Format-specific: `ZipArchive.cs`, `TarArchive.cs`, `RarArchive.cs`, `SevenZipArchive.cs`, `GZipArchive.cs`
**Use Archive API when:**
- Stream is seekable (file, memory)
- Need random access to entries
- Archive fits in memory
- Simplicity is important
#### `Readers/` - Reader Implementations
Contains `IReader` implementations for forward-only, non-seekable APIs.
**Key Files:**
- `AbstractReader.cs` - Base reader class
- `IReader.cs` - Reader interface
- `ReaderFactory.cs` - Auto-detection factory
- `ReaderOptions.cs` - Configuration for readers
- Format-specific: `ZipReader.cs`, `TarReader.cs`, `GZipReader.cs`, `RarReader.cs`, etc.
**Use Reader API when:**
- Stream is non-seekable (network, pipe, compressed)
- Processing large files
- Memory is limited
- Forward-only processing is acceptable
#### `Writers/` - Writer Implementations
Contains `IWriter` implementations for forward-only writing.
**Key Files:**
- `AbstractWriter.cs` - Base writer class
- `IWriter.cs` - Writer interface
- `WriterFactory.cs` - Factory for creating writers
- `WriterOptions.cs` - Configuration for writers
- Format-specific: `ZipWriter.cs`, `TarWriter.cs`, `GZipWriter.cs`
#### `Factories/` - Format Detection
Factory classes for auto-detecting archive format and creating appropriate readers/writers.
**Key Files:**
- `Factory.cs` - Base factory class
- `IFactory.cs` - Factory interface
- Format-specific: `ZipFactory.cs`, `TarFactory.cs`, `RarFactory.cs`, etc.
**How It Works:**
1. `ReaderFactory.Open(stream)` probes stream signatures
2. Identifies format by magic bytes
3. Creates appropriate reader instance
4. Returns generic `IReader` interface
#### `Common/` - Shared Types
Common types, options, and enumerations used across formats.
**Key Files:**
- `IEntry.cs` - Entry interface (file within archive)
- `Entry.cs` - Entry implementation
- `ArchiveType.cs` - Enum for archive formats
- `CompressionType.cs` - Enum for compression methods
- `ArchiveEncoding.cs` - Character encoding configuration
- `ExtractionOptions.cs` - Extraction configuration
- Format-specific headers: `Zip/Headers/`, `Tar/Headers/`, `Rar/Headers/`, etc.
#### `Compressors/` - Compression Algorithms
Low-level compression streams implementing specific algorithms.
**Algorithms:**
- `Deflate/` - DEFLATE compression (Zip default)
- `BZip2/` - BZip2 compression
- `LZMA/` - LZMA compression (7Zip, XZ, LZip)
- `PPMd/` - Prediction by Partial Matching (Zip, 7Zip)
- `ZStandard/` - ZStandard compression (decompression only)
- `Xz/` - XZ format (decompression only)
- `Rar/` - RAR-specific unpacking
- `Arj/`, `Arc/`, `Ace/` - Legacy format decompression
- `Filters/` - BCJ/BCJ2 filters for executable compression
**Each Compressor:**
- Implements a `Stream` subclass
- Provides both compression and decompression
- Some are read-only (decompression only)
#### `Crypto/` - Encryption & Hashing
Cryptographic functions and stream wrappers.
**Key Files:**
- `Crc32Stream.cs` - CRC32 calculation wrapper
- `BlockTransformer.cs` - Block cipher transformations
- AES, PKWare, WinZip encryption implementations
#### `IO/` - Stream Utilities
Stream wrappers and utilities.
**Key Classes:**
- `SharpCompressStream` - Base stream class
- `ProgressReportingStream` - Progress tracking wrapper
- `MarkingBinaryReader` - Binary reader with position marks
- `BufferedSubStream` - Buffered read-only substream
- `ReadOnlySubStream` - Read-only view of parent stream
- `NonDisposingStream` - Prevents wrapped stream disposal
---
## Design Patterns
### 1. Factory Pattern
**Purpose:** Auto-detect format and create appropriate reader/writer.
**Example:**
```csharp
// User calls factory
using (var reader = ReaderFactory.Open(stream)) // Returns IReader
{
while (reader.MoveToNextEntry())
{
// Process entry
}
}
// Behind the scenes:
// 1. Factory.Open() probes stream signatures
// 2. Detects format (Zip, Tar, Rar, etc.)
// 3. Creates appropriate reader (ZipReader, TarReader, etc.)
// 4. Returns as generic IReader interface
```
**Files:**
- `src/SharpCompress/Factories/ReaderFactory.cs`
- `src/SharpCompress/Factories/WriterFactory.cs`
- `src/SharpCompress/Factories/ArchiveFactory.cs`
### 2. Strategy Pattern
**Purpose:** Encapsulate compression algorithms as swappable strategies.
**Example:**
```csharp
// Different compression strategies
CompressionType.Deflate // DEFLATE
CompressionType.BZip2 // BZip2
CompressionType.LZMA // LZMA
CompressionType.PPMd // PPMd
// Writer uses strategy pattern
var archive = ZipArchive.Create();
archive.SaveTo("output.zip", CompressionType.Deflate); // Use Deflate
archive.SaveTo("output.bz2", CompressionType.BZip2); // Use BZip2
```
**Files:**
- `src/SharpCompress/Compressors/` - Strategy implementations
### 3. Decorator Pattern
**Purpose:** Wrap streams with additional functionality.
**Example:**
```csharp
// Progress reporting decorator
var progressStream = new ProgressReportingStream(baseStream, progressReporter);
progressStream.Read(buffer, 0, buffer.Length); // Reports progress
// Non-disposing decorator
var nonDisposingStream = new NonDisposingStream(baseStream);
using (var compressor = new DeflateStream(nonDisposingStream))
{
// baseStream won't be disposed when compressor is disposed
}
```
**Files:**
- `src/SharpCompress/IO/ProgressReportingStream.cs`
- `src/SharpCompress/IO/NonDisposingStream.cs`
### 4. Template Method Pattern
**Purpose:** Define algorithm skeleton in base class, let subclasses fill details.
**Example:**
```csharp
// AbstractArchive defines common archive operations
public abstract class AbstractArchive : IArchive
{
// Template methods
public virtual void WriteToDirectory(string destinationDirectory, ExtractionOptions options)
{
// Common extraction logic
foreach (var entry in Entries)
{
// Call subclass method
entry.WriteToFile(destinationPath, options);
}
}
// Subclasses override format-specific details
protected abstract Entry CreateEntry(EntryData data);
}
```
**Files:**
- `src/SharpCompress/Archives/AbstractArchive.cs`
- `src/SharpCompress/Readers/AbstractReader.cs`
### 5. Iterator Pattern
**Purpose:** Provide sequential access to entries.
**Example:**
```csharp
// Archive API - provides collection
IEnumerable<IEntry> entries = archive.Entries;
foreach (var entry in entries)
{
// Random access - entries already in memory
}
// Reader API - provides iterator
IReader reader = ReaderFactory.Open(stream);
while (reader.MoveToNextEntry())
{
// Forward-only iteration - one entry at a time
var entry = reader.Entry;
}
```
---
## Key Interfaces
### IArchive - Random Access API
```csharp
public interface IArchive : IDisposable
{
IEnumerable<IEntry> Entries { get; }
void WriteToDirectory(string destinationDirectory,
ExtractionOptions options = null);
IEntry FirstOrDefault(Func<IEntry, bool> predicate);
// ... format-specific methods
}
```
**Implementations:** `ZipArchive`, `TarArchive`, `RarArchive`, `SevenZipArchive`, `GZipArchive`
### IReader - Forward-Only API
```csharp
public interface IReader : IDisposable
{
IEntry Entry { get; }
bool MoveToNextEntry();
void WriteEntryToDirectory(string destinationDirectory,
ExtractionOptions options = null);
Stream OpenEntryStream();
// ... async variants
}
```
**Implementations:** `ZipReader`, `TarReader`, `RarReader`, `GZipReader`, etc.
### IWriter - Writing API
```csharp
public interface IWriter : IDisposable
{
void Write(string entryPath, Stream source,
DateTime? modificationTime = null);
void WriteAll(string sourceDirectory, string searchPattern,
SearchOption searchOption);
// ... async variants
}
```
**Implementations:** `ZipWriter`, `TarWriter`, `GZipWriter`
### IEntry - Archive Entry
```csharp
public interface IEntry
{
string Key { get; }
uint Size { get; }
uint CompressedSize { get; }
bool IsDirectory { get; }
DateTime? LastModifiedTime { get; }
CompressionType CompressionType { get; }
void WriteToFile(string fullPath, ExtractionOptions options = null);
void WriteToStream(Stream destinationStream);
Stream OpenEntryStream();
// ... async variants
}
```
---
## Adding Support for a New Format
### Step 1: Understand the Format
- Research format specification
- Understand compression/encryption used
- Study existing similar formats in codebase
### Step 2: Create Format Structure Classes
**Create:** `src/SharpCompress/Common/NewFormat/`
```csharp
// Headers and data structures
public class NewFormatHeader
{
public uint Magic { get; set; }
public ushort Version { get; set; }
// ... other fields
public static NewFormatHeader Read(BinaryReader reader)
{
// Deserialize from binary
}
}
public class NewFormatEntry
{
public string FileName { get; set; }
public uint CompressedSize { get; set; }
public uint UncompressedSize { get; set; }
// ... other fields
}
```
### Step 3: Create Archive Implementation
**Create:** `src/SharpCompress/Archives/NewFormat/NewFormatArchive.cs`
```csharp
public class NewFormatArchive : AbstractArchive
{
private NewFormatHeader _header;
private List<NewFormatEntry> _entries;
public static NewFormatArchive Open(Stream stream)
{
var archive = new NewFormatArchive();
archive._header = NewFormatHeader.Read(stream);
archive.LoadEntries(stream);
return archive;
}
public override IEnumerable<IEntry> Entries => _entries.Select(e => new Entry(e));
protected override Stream OpenEntryStream(Entry entry)
{
// Return decompressed stream for entry
}
// ... other abstract method implementations
}
```
### Step 4: Create Reader Implementation
**Create:** `src/SharpCompress/Readers/NewFormat/NewFormatReader.cs`
```csharp
public class NewFormatReader : AbstractReader
{
private NewFormatHeader _header;
private BinaryReader _reader;
public NewFormatReader(Stream stream)
{
_reader = new BinaryReader(stream);
_header = NewFormatHeader.Read(_reader);
}
public override bool MoveToNextEntry()
{
// Read next entry header
if (!_reader.BaseStream.CanRead) return false;
var entryData = NewFormatEntry.Read(_reader);
// ... set this.Entry
return entryData != null;
}
// ... other abstract method implementations
}
```
### Step 5: Create Factory
**Create:** `src/SharpCompress/Factories/NewFormatFactory.cs`
```csharp
public class NewFormatFactory : Factory, IArchiveFactory, IReaderFactory
{
// Archive format magic bytes (signature)
private static readonly byte[] NewFormatSignature = new byte[] { 0x4E, 0x46 }; // "NF"
public static NewFormatFactory Instance { get; } = new();
public IArchive CreateArchive(Stream stream)
=> NewFormatArchive.Open(stream);
public IReader CreateReader(Stream stream, ReaderOptions options)
=> new NewFormatReader(stream) { Options = options };
public bool Matches(Stream stream, ReadOnlySpan<byte> signature)
=> signature.StartsWith(NewFormatSignature);
}
```
### Step 6: Register Factory
**Update:** `src/SharpCompress/Factories/ArchiveFactory.cs`
```csharp
private static readonly IFactory[] Factories =
{
ZipFactory.Instance,
TarFactory.Instance,
RarFactory.Instance,
SevenZipFactory.Instance,
GZipFactory.Instance,
NewFormatFactory.Instance, // Add here
// ... other factories
};
```
### Step 7: Add Tests
**Create:** `tests/SharpCompress.Test/NewFormat/NewFormatTests.cs`
```csharp
public class NewFormatTests : TestBase
{
[Fact]
public void NewFormat_Extracts_Successfully()
{
var archivePath = Path.Combine(TEST_ARCHIVES_PATH, "archive.newformat");
using (var archive = NewFormatArchive.Open(archivePath))
{
archive.WriteToDirectory(SCRATCH_FILES_PATH);
// Assert extraction
}
}
[Fact]
public void NewFormat_Reader_Works()
{
var archivePath = Path.Combine(TEST_ARCHIVES_PATH, "archive.newformat");
using (var stream = File.OpenRead(archivePath))
using (var reader = new NewFormatReader(stream))
{
Assert.True(reader.MoveToNextEntry());
Assert.NotNull(reader.Entry);
}
}
}
```
### Step 8: Add Test Archives
Place test files in `tests/TestArchives/Archives/NewFormat/` directory.
### Step 9: Document
Update `docs/FORMATS.md` with format support information.
---
## Compression Algorithm Implementation
### Creating a New Compression Stream
**Example:** Creating `CustomStream` for a custom compression algorithm
```csharp
public class CustomStream : Stream
{
private readonly Stream _baseStream;
private readonly bool _leaveOpen;
public CustomStream(Stream baseStream, bool leaveOpen = false)
{
_baseStream = baseStream;
_leaveOpen = leaveOpen;
}
public override int Read(byte[] buffer, int offset, int count)
{
// Decompress data from _baseStream into buffer
// Return number of decompressed bytes
}
public override void Write(byte[] buffer, int offset, int count)
{
// Compress data from buffer into _baseStream
}
protected override void Dispose(bool disposing)
{
if (disposing && !_leaveOpen)
{
_baseStream?.Dispose();
}
base.Dispose(disposing);
}
}
```
---
## Stream Handling Best Practices
### Disposal Pattern
```csharp
// Correct: Nested using blocks
using (var fileStream = File.OpenRead("archive.zip"))
using (var archive = ZipArchive.Open(fileStream))
{
archive.WriteToDirectory(@"C:\output");
}
// Both archive and fileStream properly disposed
// Correct: Using with options
var options = new ReaderOptions { LeaveStreamOpen = true };
var stream = File.OpenRead("archive.zip");
using (var archive = ZipArchive.Open(stream, options))
{
archive.WriteToDirectory(@"C:\output");
}
stream.Dispose(); // Manually dispose if LeaveStreamOpen = true
```
### NonDisposingStream Wrapper
```csharp
// Prevent unwanted stream closure
var baseStream = File.OpenRead("data.bin");
var nonDisposing = new NonDisposingStream(baseStream);
using (var compressor = new DeflateStream(nonDisposing))
{
// Compressor won't close baseStream when disposed
}
// baseStream still usable
baseStream.Position = 0; // Works
baseStream.Dispose(); // Manual disposal
```
---
## Performance Considerations
### Memory Efficiency
1. **Avoid loading entire archive in memory** - Use Reader API for large files
2. **Process entries sequentially** - Especially for solid archives
3. **Use appropriate buffer sizes** - Larger buffers for network I/O
4. **Dispose streams promptly** - Free resources when done
### Algorithm Selection
1. **Archive API** - Fast for small archives with random access
2. **Reader API** - Efficient for large files or streaming
3. **Solid archives** - Sequential extraction much faster
4. **Compression levels** - Trade-off between speed and size
---
## Testing Guidelines
### Test Coverage
1. **Happy path** - Normal extraction works
2. **Edge cases** - Empty archives, single file, many files
3. **Corrupted data** - Handle gracefully
4. **Error cases** - Missing passwords, unsupported compression
5. **Async operations** - Both sync and async code paths
### Test Archives
- Use `tests/TestArchives/` for test data
- Create format-specific subdirectories
- Include encrypted, corrupted, and edge case archives
- Don't recreate existing archives
### Test Patterns
```csharp
[Fact]
public void Archive_Extraction_Works()
{
// Arrange
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "test.zip");
// Act
using (var archive = ZipArchive.Open(testArchive))
{
archive.WriteToDirectory(SCRATCH_FILES_PATH);
}
// Assert
Assert.True(File.Exists(Path.Combine(SCRATCH_FILES_PATH, "file.txt")));
}
```
---
## Related Documentation
- [AGENTS.md](../AGENTS.md) - Development guidelines
- [FORMATS.md](FORMATS.md) - Supported formats

View File

@@ -1,610 +0,0 @@
# SharpCompress Character Encoding Guide
This guide explains how SharpCompress handles character encoding for archive entries (filenames, comments, etc.).
## Overview
Most archive formats store filenames and metadata as bytes. SharpCompress must convert these bytes to strings using the appropriate character encoding.
**Common Problem:** Archives created on systems with non-UTF8 encodings (especially Japanese, Chinese systems) appear with corrupted filenames when extracted on systems that assume UTF8.
---
## ArchiveEncoding Class
### Basic Usage
```csharp
using SharpCompress.Common;
using SharpCompress.Readers;
// Configure encoding before opening archive
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding(932) // cp932 for Japanese
}
};
using (var archive = ZipArchive.Open("japanese.zip", options))
{
foreach (var entry in archive.Entries)
{
Console.WriteLine(entry.Key); // Now shows correct characters
}
}
```
### ArchiveEncoding Properties
| Property | Purpose |
|----------|---------|
| `Default` | Default encoding for filenames (fallback) |
| `CustomDecoder` | Custom decoding function for special cases |
### Setting for Different APIs
**Archive API:**
```csharp
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(932) }
};
using (var archive = ZipArchive.Open("file.zip", options))
{
// Use archive with correct encoding
}
```
**Reader API:**
```csharp
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(932) }
};
using (var stream = File.OpenRead("file.zip"))
using (var reader = ReaderFactory.Open(stream, options))
{
while (reader.MoveToNextEntry())
{
// Filenames decoded correctly
}
}
```
---
## Common Encodings
### Asian Encodings
#### cp932 (Japanese)
```csharp
// Windows-31J, Shift-JIS variant used on Japanese Windows
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding(932)
}
};
using (var archive = ZipArchive.Open("japanese.zip", options))
{
// Correctly decodes Japanese filenames
}
```
**When to use:**
- Archives from Japanese Windows systems
- Files with Japanese characters in names
#### gb2312 (Simplified Chinese)
```csharp
// Simplified Chinese
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding("gb2312")
}
};
```
#### gbk (Extended Simplified Chinese)
```csharp
// Extended Simplified Chinese (more characters than gb2312)
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding("gbk")
}
};
```
#### big5 (Traditional Chinese)
```csharp
// Traditional Chinese (Taiwan, Hong Kong)
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding("big5")
}
};
```
#### euc-jp (Japanese, Unix)
```csharp
// Extended Unix Code for Japanese
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding("eucjp")
}
};
```
#### euc-kr (Korean)
```csharp
// Extended Unix Code for Korean
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding("euc-kr")
}
};
```
### Western European Encodings
#### iso-8859-1 (Latin-1)
```csharp
// Western European (includes accented characters)
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding("iso-8859-1")
}
};
```
**When to use:**
- Archives from French, German, Spanish systems
- Files with accented characters (é, ñ, ü, etc.)
#### cp1252 (Windows-1252)
```csharp
// Windows Western European
// Very similar to iso-8859-1 but with additional printable characters
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding("cp1252")
}
};
```
**When to use:**
- Archives from older Western European Windows systems
- Files with smart quotes and other Windows-specific characters
#### iso-8859-15 (Latin-9)
```csharp
// Western European with Euro symbol support
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding("iso-8859-15")
}
};
```
### Cyrillic Encodings
#### cp1251 (Windows Cyrillic)
```csharp
// Russian, Serbian, Bulgarian, etc.
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding("cp1251")
}
};
```
#### koi8-r (KOI8 Russian)
```csharp
// Russian (Unix standard)
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding("koi8-r")
}
};
```
### UTF Encodings (Modern)
#### UTF-8 (Default)
```csharp
// Modern standard - usually correct for new archives
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.UTF8
}
};
```
#### UTF-16
```csharp
// Unicode - rarely used in archives
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.Unicode
}
};
```
---
## Encoding Auto-Detection
SharpCompress attempts to auto-detect encoding, but this isn't always reliable:
```csharp
// Auto-detection (default)
using (var archive = ZipArchive.Open("file.zip")) // Uses UTF8 by default
{
// May show corrupted characters if archive uses different encoding
}
// Explicit encoding (more reliable)
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(932) }
};
using (var archive = ZipArchive.Open("file.zip", options))
{
// Correct characters displayed
}
```
### When Manual Override is Needed
| Situation | Solution |
|-----------|----------|
| Archive shows corrupted characters | Specify the encoding explicitly |
| Archives from specific region | Use that region's encoding |
| Mixed encodings in archive | Use CustomDecoder |
| Testing with international files | Try different encodings |
---
## Custom Decoder
For complex scenarios where a single encoding isn't sufficient:
### Basic Custom Decoder
```csharp
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
CustomDecoder = (data, offset, length) =>
{
// Custom decoding logic
var bytes = new byte[length];
Array.Copy(data, offset, bytes, 0, length);
// Try UTF8 first
try
{
return Encoding.UTF8.GetString(bytes);
}
catch
{
// Fallback to cp932 if UTF8 fails
return Encoding.GetEncoding(932).GetString(bytes);
}
}
}
};
using (var archive = ZipArchive.Open("mixed.zip", options))
{
foreach (var entry in archive.Entries)
{
Console.WriteLine(entry.Key); // Uses custom decoder
}
}
```
### Advanced: Detect Encoding by Content
```csharp
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
CustomDecoder = DetectAndDecode
}
};
private static string DetectAndDecode(byte[] data, int offset, int length)
{
var bytes = new byte[length];
Array.Copy(data, offset, bytes, 0, length);
// Try UTF8 (most modern archives)
try
{
var str = Encoding.UTF8.GetString(bytes);
// Verify it decoded correctly (no replacement characters)
if (!str.Contains('\uFFFD'))
return str;
}
catch { }
// Try cp932 (Japanese)
try
{
var str = Encoding.GetEncoding(932).GetString(bytes);
if (!str.Contains('\uFFFD'))
return str;
}
catch { }
// Fallback to iso-8859-1 (always succeeds)
return Encoding.GetEncoding("iso-8859-1").GetString(bytes);
}
```
---
## Code Examples
### Extract Archive with Japanese Filenames
```csharp
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding(932) // cp932
}
};
using (var archive = ZipArchive.Open("japanese_files.zip", options))
{
archive.WriteToDirectory(@"C:\output", new ExtractionOptions
{
ExtractFullPath = true,
Overwrite = true
});
}
// Files extracted with correct Japanese names
```
### Extract Archive with Western European Filenames
```csharp
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding("iso-8859-1")
}
};
using (var archive = ZipArchive.Open("french_files.zip", options))
{
archive.WriteToDirectory(@"C:\output");
}
// Accented characters (é, è, ê, etc.) display correctly
```
### Extract Archive with Chinese Filenames
```csharp
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding("gbk") // Simplified Chinese
}
};
using (var archive = ZipArchive.Open("chinese_files.zip", options))
{
archive.WriteToDirectory(@"C:\output");
}
```
### Extract Archive with Russian Filenames
```csharp
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding("cp1251") // Windows Cyrillic
}
};
using (var archive = ZipArchive.Open("russian_files.zip", options))
{
archive.WriteToDirectory(@"C:\output");
}
```
### Reader API with Encoding
```csharp
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding(932)
}
};
using (var stream = File.OpenRead("japanese.zip"))
using (var reader = ReaderFactory.Open(stream, options))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Console.WriteLine(reader.Entry.Key); // Correct characters
reader.WriteEntryToDirectory(@"C:\output");
}
}
}
```
---
## Creating Archives with Correct Encoding
When creating archives, SharpCompress uses UTF8 by default (recommended):
```csharp
// Create with UTF8 (default, recommended)
using (var archive = ZipArchive.Create())
{
archive.AddAllFromDirectory(@"D:\my_files");
archive.SaveTo("output.zip", CompressionType.Deflate);
// Archives created with UTF8 encoding
}
```
If you need to create archives for systems that expect specific encodings:
```csharp
// Note: SharpCompress Writer API uses UTF8 for encoding
// To create archives with other encodings, consider:
// 1. Let users on those systems create archives
// 2. Use system tools (7-Zip, WinRAR) with desired encoding
// 3. Post-process archives if absolutely necessary
// For now, recommend modern UTF8-based archives
```
---
## Troubleshooting Encoding Issues
### Filenames Show Question Marks (?)
```
✗ Wrong encoding detected
test文件.txt → test???.txt
```
**Solution:** Specify correct encoding explicitly
```csharp
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding("gbk") // Try different encodings
}
};
```
### Filenames Show Replacement Character (￿)
```
✗ Invalid bytes for selected encoding
café.txt → caf￿.txt
```
**Solution:**
1. Try a different encoding (see Common Encodings table)
2. Use CustomDecoder with fallback encoding
3. Archive might be corrupted
### Mixed Encodings in Single Archive
```csharp
// Use CustomDecoder to handle mixed encodings
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
CustomDecoder = (data, offset, length) =>
{
// Try multiple encodings in priority order
var bytes = new byte[length];
Array.Copy(data, offset, bytes, 0, length);
foreach (var encoding in new[]
{
Encoding.UTF8,
Encoding.GetEncoding(932),
Encoding.GetEncoding("iso-8859-1")
})
{
try
{
var str = encoding.GetString(bytes);
if (!str.Contains('\uFFFD'))
return str;
}
catch { }
}
// Final fallback
return Encoding.GetEncoding("iso-8859-1").GetString(bytes);
}
}
};
```
---
## Encoding Reference Table
| Encoding | Code | Use Case |
|----------|------|----------|
| UTF-8 | (default) | Modern archives, recommended |
| cp932 | 932 | Japanese Windows |
| gb2312 | "gb2312" | Simplified Chinese |
| gbk | "gbk" | Extended Simplified Chinese |
| big5 | "big5" | Traditional Chinese |
| iso-8859-1 | "iso-8859-1" | Western European |
| cp1252 | "cp1252" | Windows Western European |
| cp1251 | "cp1251" | Russian/Cyrillic |
| euc-jp | "euc-jp" | Japanese Unix |
| euc-kr | "euc-kr" | Korean |
---
## Best Practices
1. **Use UTF-8 for new archives** - Most modern systems support it
2. **Ask the archive creator** - When receiving archives with corrupted names
3. **Provide encoding options** - If your app handles user archives
4. **Document your assumption** - Tell users what encoding you're using
5. **Test with international files** - Before releasing production code
---
## Related Documentation
- [USAGE.md](USAGE.md#extract-zip-which-has-non-utf8-encoded-filenamycp932) - Usage examples

View File

@@ -1,142 +0,0 @@
# Version Log
* [Releases](https://github.com/adamhathcock/sharpcompress/releases)
## Version 0.18
* [Now on Github releases](https://github.com/adamhathcock/sharpcompress/releases/tag/0.18)
## Version 0.17.1
* Fix - [Bug Fix for .NET Core on Windows](https://github.com/adamhathcock/sharpcompress/pull/257)
## Version 0.17.0
* New - Full LZip support! Can read and write LZip files and Tars inside LZip files. [Make LZip a first class citizen. #241](https://github.com/adamhathcock/sharpcompress/issues/241)
* New - XZ read support! Can read XZ files and Tars inside XZ files. [XZ in SharpCompress #91](https://github.com/adamhathcock/sharpcompress/issues/94)
* Fix - [Regression - zip file writing on seekable streams always assumed stream start was 0. Introduced with Zip64 writing.](https://github.com/adamhathcock/sharpcompress/issues/244)
* Fix - [Zip files with post-data descriptors can be properly skipped via decompression](https://github.com/adamhathcock/sharpcompress/issues/162)
## Version 0.16.2
* Fix [.NET 3.5 should support files and cryptography (was a regression from 0.16.0)](https://github.com/adamhathcock/sharpcompress/pull/251)
* Fix [Zip per entry compression customization wrote the wrong method into the zip archive](https://github.com/adamhathcock/sharpcompress/pull/249)
## Version 0.16.1
* Fix [Preserve compression method when getting a compressed stream](https://github.com/adamhathcock/sharpcompress/pull/235)
* Fix [RAR entry key normalization fix](https://github.com/adamhathcock/sharpcompress/issues/201)
## Version 0.16.0
* Breaking - [Progress Event Tracking rethink](https://github.com/adamhathcock/sharpcompress/pull/226)
* Update to VS2017 - [VS2017](https://github.com/adamhathcock/sharpcompress/pull/231) - Framework targets have been changed.
* New - [Add Zip64 writing](https://github.com/adamhathcock/sharpcompress/pull/211)
* [Fix invalid/mismatching Zip version flags.](https://github.com/adamhathcock/sharpcompress/issues/164) - This allows nuget/System.IO.Packaging to read zip files generated by SharpCompress
* [Fix 7Zip directory hiding](https://github.com/adamhathcock/sharpcompress/pull/215/files)
* [Verify RAR CRC headers](https://github.com/adamhathcock/sharpcompress/pull/220)
## Version 0.15.2
* [Fix invalid headers](https://github.com/adamhathcock/sharpcompress/pull/210) - fixes an issue creating large-ish zip archives that was introduced with zip64 reading.
## Version 0.15.1
* [Zip64 extending information and ZipReader](https://github.com/adamhathcock/sharpcompress/pull/206)
## Version 0.15.0
* [Add zip64 support for ZipArchive extraction](https://github.com/adamhathcock/sharpcompress/pull/205)
## Version 0.14.1
* [.NET Assemblies aren't strong named](https://github.com/adamhathcock/sharpcompress/issues/158)
* [Pkware encryption for Zip files didn't allow for multiple reads of an entry](https://github.com/adamhathcock/sharpcompress/issues/197)
* [GZip Entry couldn't be read multiple times](https://github.com/adamhathcock/sharpcompress/issues/198)
## Version 0.14.0
* [Support for LZip reading in for Tars](https://github.com/adamhathcock/sharpcompress/pull/191)
## Version 0.13.1
* [Fix null password on ReaderFactory. Fix null options on SevenZipArchive](https://github.com/adamhathcock/sharpcompress/pull/188)
* [Make PpmdProperties lazy to avoid unnecessary allocations.](https://github.com/adamhathcock/sharpcompress/pull/185)
## Version 0.13.0
* Breaking change: Big refactor of Options on API.
* 7Zip supports Deflate
## Version 0.12.4
* Forward only zip issue fix https://github.com/adamhathcock/sharpcompress/issues/160
* Try to fix frameworks again by copying targets from JSON.NET
## Version 0.12.3
* 7Zip fixes https://github.com/adamhathcock/sharpcompress/issues/73
* Maybe all profiles will work with project.json now
## Version 0.12.2
* Support Profile 259 again
## Version 0.12.1
* Support Silverlight 5
## Version 0.12.0
* .NET Core RTM!
* Bug fix for Tar long paths
## Version 0.11.6
* Bug fix for global header in Tar
* Writers now have a leaveOpen `bool` overload. They won't close streams if not-requested to.
## Version 0.11.5
* Bug fix in Skip method
## Version 0.11.4
* SharpCompress is now endian neutral (matters for Mono platforms)
* Fix for Inflate (need to change implementation)
* Fixes for RAR detection
## Version 0.11.1
* Added Cancel on IReader
* Removed .NET 2.0 support and LinqBridge dependency
## Version 0.11
* Been over a year, contains mainly fixes from contributors!
* Possible breaking change: ArchiveEncoding is UTF8 by default now.
* TAR supports writing long names using longlink
* RAR Protect Header added
## Version 0.10.3
* Finally fixed Disposal issue when creating a new archive with the Archive API
## Version 0.10.2
* Fixed Rar Header reading for invalid extended time headers.
* Windows Store assembly is now strong named
* Known issues with Long Tar names being worked on
* Updated to VS2013
* Portable targets SL5 and Windows Phone 8 (up from SL4 and WP7)
## Version 0.10.1
* Fixed 7Zip extraction performance problem
## Version 0.10:
* Added support for RAR Decryption (thanks to https://github.com/hrasyid)
* Embedded some BouncyCastle crypto classes to allow RAR Decryption and Winzip AES Decryption in Portable and Windows Store DLLs
* Built in Release (I think)

View File

@@ -1,474 +0,0 @@
# SharpCompress Performance Guide
This guide helps you optimize SharpCompress for performance in various scenarios.
## API Selection Guide
### Archive API vs Reader API
Choose the right API based on your use case:
| Aspect | Archive API | Reader API |
|--------|------------|-----------|
| **Stream Type** | Seekable only | Non-seekable OK |
| **Memory Usage** | All entries in memory | One entry at a time |
| **Random Access** | ✓ Yes | ✗ No |
| **Best For** | Small-to-medium archives | Large or streaming data |
| **Performance** | Fast for random access | Better for large files |
### Archive API (Fast for Random Access)
```csharp
// Use when:
// - Archive fits in memory
// - You need random access to entries
// - Stream is seekable (file, MemoryStream)
using (var archive = ZipArchive.Open("archive.zip"))
{
// Random access - all entries available
var specific = archive.Entries.FirstOrDefault(e => e.Key == "file.txt");
if (specific != null)
{
specific.WriteToFile(@"C:\output\file.txt");
}
}
```
**Performance Characteristics:**
- ✓ Instant entry lookup
- ✓ Parallel extraction possible
- ✗ Entire archive in memory
- ✗ Can't process while downloading
### Reader API (Best for Large Files)
```csharp
// Use when:
// - Processing large archives (>100 MB)
// - Streaming from network/pipe
// - Memory is constrained
// - Forward-only processing is acceptable
using (var stream = File.OpenRead("large.zip"))
using (var reader = ReaderFactory.Open(stream))
{
while (reader.MoveToNextEntry())
{
// Process one entry at a time
reader.WriteEntryToDirectory(@"C:\output");
}
}
```
**Performance Characteristics:**
- ✓ Minimal memory footprint
- ✓ Works with non-seekable streams
- ✓ Can process while downloading
- ✗ Forward-only (no random access)
- ✗ Entry lookup requires iteration
---
## Buffer Sizing
### Understanding Buffers
SharpCompress uses internal buffers for reading compressed data. Buffer size affects:
- **Speed:** Larger buffers = fewer I/O operations = faster
- **Memory:** Larger buffers = higher memory usage
### Recommended Buffer Sizes
| Scenario | Size | Notes |
|----------|------|-------|
| Embedded/IoT devices | 4-8 KB | Minimal memory usage |
| Memory-constrained | 16-32 KB | Conservative default |
| Standard use (default) | 64 KB | Recommended default |
| Large file streaming | 256 KB | Better throughput |
| High-speed SSD | 512 KB - 1 MB | Maximum throughput |
### How Buffer Size Affects Performance
```csharp
// SharpCompress manages buffers internally
// You can't directly set buffer size, but you can:
// 1. Use Stream.CopyTo with explicit buffer size
using (var entryStream = reader.OpenEntryStream())
using (var fileStream = File.Create(@"C:\output\file.txt"))
{
// 64 KB buffer (default)
entryStream.CopyTo(fileStream);
// Or specify larger buffer for faster copy
entryStream.CopyTo(fileStream, bufferSize: 262144); // 256 KB
}
// 2. Use custom buffer for writing
using (var entryStream = reader.OpenEntryStream())
using (var fileStream = File.Create(@"C:\output\file.txt"))
{
byte[] buffer = new byte[262144]; // 256 KB
int bytesRead;
while ((bytesRead = entryStream.Read(buffer, 0, buffer.Length)) > 0)
{
fileStream.Write(buffer, 0, bytesRead);
}
}
```
---
## Streaming Large Files
### Non-Seekable Stream Patterns
For processing archives from downloads or pipes:
```csharp
// Download stream (non-seekable)
using (var httpStream = await httpClient.GetStreamAsync(url))
using (var reader = ReaderFactory.Open(httpStream))
{
// Process entries as they arrive
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
reader.WriteEntryToDirectory(@"C:\output");
}
}
}
```
**Performance Tips:**
- Don't try to buffer the entire stream
- Process entries immediately
- Use async APIs for better responsiveness
### Download-Then-Extract vs Streaming
Choose based on your constraints:
| Approach | When to Use |
|----------|------------|
| **Download then extract** | Moderate size, need random access |
| **Stream during download** | Large files, bandwidth limited, memory constrained |
```csharp
// Download then extract (requires disk space)
var archivePath = await DownloadFile(url, @"C:\temp\archive.zip");
using (var archive = ZipArchive.Open(archivePath))
{
archive.WriteToDirectory(@"C:\output");
}
// Stream during download (on-the-fly extraction)
using (var httpStream = await httpClient.GetStreamAsync(url))
using (var reader = ReaderFactory.Open(httpStream))
{
while (reader.MoveToNextEntry())
{
reader.WriteEntryToDirectory(@"C:\output");
}
}
```
---
## Solid Archive Optimization
### Why Solid Archives Are Slow
Solid archives (Rar, 7Zip) group files together in a single compressed stream:
```
Solid Archive Layout:
[Header] [Compressed Stream] [Footer]
├─ File1 compressed data
├─ File2 compressed data
├─ File3 compressed data
└─ File4 compressed data
```
Extracting File3 requires decompressing File1 and File2 first.
### Sequential vs Random Extraction
**Random Extraction (Slow):**
```csharp
using (var archive = RarArchive.Open("solid.rar"))
{
foreach (var entry in archive.Entries)
{
entry.WriteToFile(@"C:\output\" + entry.Key); // ✗ Slow!
// Each entry triggers full decompression from start
}
}
```
**Sequential Extraction (Fast):**
```csharp
using (var archive = RarArchive.Open("solid.rar"))
{
// Method 1: Use WriteToDirectory (recommended)
archive.WriteToDirectory(@"C:\output", new ExtractionOptions
{
ExtractFullPath = true,
Overwrite = true
});
// Method 2: Use ExtractAllEntries
archive.ExtractAllEntries();
// Method 3: Use Reader API (also sequential)
using (var reader = RarReader.Open(File.OpenRead("solid.rar")))
{
while (reader.MoveToNextEntry())
{
reader.WriteEntryToDirectory(@"C:\output");
}
}
}
```
**Performance Impact:**
- Random extraction: O(n²) - very slow for many files
- Sequential extraction: O(n) - 10-100x faster
### Best Practices for Solid Archives
1. **Always extract sequentially** when possible
2. **Use Reader API** for large solid archives
3. **Process entries in order** from the archive
4. **Consider using 7Zip command-line** for scripted extractions
---
## Compression Level Trade-offs
### Deflate/GZip Levels
```csharp
// Level 1 = Fastest, largest size
// Level 6 = Default (balanced)
// Level 9 = Slowest, best compression
// Write with different compression levels
using (var archive = ZipArchive.Create())
{
archive.AddAllFromDirectory(@"D:\data");
// Fast compression (level 1)
archive.SaveTo("fast.zip", new WriterOptions(CompressionType.Deflate)
{
CompressionLevel = 1
});
// Default compression (level 6)
archive.SaveTo("default.zip", CompressionType.Deflate);
// Best compression (level 9)
archive.SaveTo("best.zip", new WriterOptions(CompressionType.Deflate)
{
CompressionLevel = 9
});
}
```
**Speed vs Size:**
| Level | Speed | Size | Use Case |
|-------|-------|------|----------|
| 1 | 10x | 90% | Network, streaming |
| 6 | 1x | 75% | Default (good balance) |
| 9 | 0.1x | 65% | Archival, static storage |
### BZip2 Block Size
```csharp
// BZip2 block size affects memory and compression
// 100K to 900K (default 900K)
// Smaller block size = lower memory, faster
// Larger block size = better compression, slower
using (var archive = TarArchive.Create())
{
archive.AddAllFromDirectory(@"D:\data");
// These are preset in WriterOptions via CompressionLevel
archive.SaveTo("archive.tar.bz2", CompressionType.BZip2);
}
```
### LZMA Settings
LZMA compression is very powerful but memory-intensive:
```csharp
// LZMA (7Zip, .tar.lzma):
// - Dictionary size: 16 KB to 1 GB (default 32 MB)
// - Faster preset: smaller dictionary
// - Better compression: larger dictionary
// Preset via CompressionType
using (var archive = TarArchive.Create())
{
archive.AddAllFromDirectory(@"D:\data");
archive.SaveTo("archive.tar.xz", CompressionType.LZMA); // Default settings
}
```
---
## Async Performance
### When Async Helps
Async is beneficial when:
- **Long I/O operations** (network, slow disks)
- **UI responsiveness** needed (Windows Forms, WPF, Blazor)
- **Server applications** (ASP.NET, multiple concurrent operations)
```csharp
// Async extraction (non-blocking)
using (var archive = ZipArchive.Open("archive.zip"))
{
await archive.WriteToDirectoryAsync(
@"C:\output",
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
cancellationToken
);
}
// Thread can handle other work while I/O happens
```
### When Async Doesn't Help
Async doesn't improve performance for:
- **CPU-bound operations** (already fast)
- **Local SSD I/O** (I/O is fast enough)
- **Single-threaded scenarios** (no parallelism benefit)
```csharp
// Sync extraction (simpler, same performance on fast I/O)
using (var archive = ZipArchive.Open("archive.zip"))
{
archive.WriteToDirectory(
@"C:\output",
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
// Simple and fast - no async needed
```
### Cancellation Pattern
```csharp
var cts = new CancellationTokenSource();
// Cancel after 5 minutes
cts.CancelAfter(TimeSpan.FromMinutes(5));
try
{
using (var archive = ZipArchive.Open("archive.zip"))
{
await archive.WriteToDirectoryAsync(
@"C:\output",
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
cts.Token
);
}
}
catch (OperationCanceledException)
{
Console.WriteLine("Extraction cancelled");
// Clean up partial extraction if needed
}
```
---
## Practical Performance Tips
### 1. Choose the Right API
| Scenario | API | Why |
|----------|-----|-----|
| Small archives | Archive | Faster random access |
| Large archives | Reader | Lower memory |
| Streaming | Reader | Works on non-seekable streams |
| Download streams | Reader | Async extraction while downloading |
### 2. Batch Operations
```csharp
// ✗ Slow - opens each archive separately
foreach (var file in files)
{
using (var archive = ZipArchive.Open("archive.zip"))
{
archive.WriteToDirectory(@"C:\output");
}
}
// ✓ Better - process multiple entries at once
using (var archive = ZipArchive.Open("archive.zip"))
{
archive.WriteToDirectory(@"C:\output");
}
```
### 3. Profile Your Code
```csharp
var sw = Stopwatch.StartNew();
using (var archive = ZipArchive.Open("large.zip"))
{
archive.WriteToDirectory(@"C:\output");
}
sw.Stop();
Console.WriteLine($"Extraction took {sw.ElapsedMilliseconds}ms");
// Measure memory before/after
var beforeMem = GC.GetTotalMemory(true);
// ... do work ...
var afterMem = GC.GetTotalMemory(true);
Console.WriteLine($"Memory used: {(afterMem - beforeMem) / 1024 / 1024}MB");
```
---
## Troubleshooting Performance
### Extraction is Slow
1. **Check if solid archive** → Use sequential extraction
2. **Check API** → Reader API might be faster for large files
3. **Check compression level** → Higher levels are slower to decompress
4. **Check I/O** → Network drives are much slower than SSD
5. **Check buffer size** → May need larger buffers for network
### High Memory Usage
1. **Use Reader API** instead of Archive API
2. **Process entries immediately** rather than buffering
3. **Reduce compression level** if writing
4. **Check for memory leaks** in your code
### CPU Usage at 100%
1. **Normal for compression** - especially with high compression levels
2. **Consider lower level** for faster processing
3. **Reduce parallelism** if processing multiple archives
4. **Check if awaiting properly** in async code
---
## Related Documentation
- [PERFORMANCE.md](USAGE.md) - Usage examples with performance considerations
- [FORMATS.md](FORMATS.md) - Format-specific performance notes

View File

@@ -1,335 +0,0 @@
# SharpCompress Usage
## Async/Await Support (Beta)
SharpCompress now provides full async/await support for all I/O operations. All `Read`, `Write`, and extraction operations have async equivalents ending in `Async` that accept an optional `CancellationToken`. This enables better performance and scalability for I/O-bound operations.
**Key Async Methods:**
- `reader.WriteEntryToAsync(stream, cancellationToken)` - Extract entry asynchronously
- `reader.WriteAllToDirectoryAsync(path, options, cancellationToken)` - Extract all asynchronously
- `writer.WriteAsync(filename, stream, modTime, cancellationToken)` - Write entry asynchronously
- `writer.WriteAllAsync(directory, pattern, searchOption, cancellationToken)` - Write directory asynchronously
- `entry.OpenEntryStreamAsync(cancellationToken)` - Open entry stream asynchronously
See [Async Examples](#async-examples) section below for usage patterns.
## Stream Rules
When dealing with Streams, the rule should be that you don't close a stream you didn't create. This, in effect, should mean you should always put a Stream in a using block to dispose it.
However, the .NET Framework often has classes that will dispose streams by default to make things "easy" like the following:
```C#
using (var reader = new StreamReader(File.Open("foo")))
{
...
}
```
In this example, reader should get disposed. However, stream rules should say the the `FileStream` created by `File.Open` should remain open. However, the .NET Framework closes it for you by default unless you override the constructor. In general, you should be writing Stream code like this:
```C#
using (var fileStream = File.Open("foo"))
using (var reader = new StreamReader(fileStream))
{
...
}
```
To deal with the "correct" rules as well as the expectations of users, I've decided to always close wrapped streams as of 0.21.
To be explicit though, consider always using the overloads that use `ReaderOptions` or `WriterOptions` and explicitly set `LeaveStreamOpen` the way you want.
If using Compression Stream classes directly and you don't want the wrapped stream to be closed. Use the `NonDisposingStream` as a wrapper to prevent the stream being disposed. The change in 0.21 simplified a lot even though the usage is a bit more convoluted.
## Samples
Also, look over the tests for more thorough [examples](https://github.com/adamhathcock/sharpcompress/tree/master/tests/SharpCompress.Test)
### Create Zip Archive from multiple files
```C#
using(var archive = ZipArchive.Create())
{
archive.AddEntry("file01.txt", "C:\\file01.txt");
archive.AddEntry("file02.txt", "C:\\file02.txt");
...
archive.SaveTo("C:\\temp.zip", CompressionType.Deflate);
}
```
### Create Zip Archive from all files in a directory to a file
```C#
using (var archive = ZipArchive.Create())
{
archive.AddAllFromDirectory("D:\\temp");
archive.SaveTo("C:\\temp.zip", CompressionType.Deflate);
}
```
### Create Zip Archive from all files in a directory and save in memory
```C#
var memoryStream = new MemoryStream();
using (var archive = ZipArchive.Create())
{
archive.AddAllFromDirectory("D:\\temp");
archive.SaveTo(memoryStream, new WriterOptions(CompressionType.Deflate)
{
LeaveStreamOpen = true
});
}
//reset memoryStream to be usable now
memoryStream.Position = 0;
```
### Extract all files from a rar file to a directory using RarArchive
Note: Extracting a solid rar or 7z file needs to be done in sequential order to get acceptable decompression speed.
`ExtractAllEntries` is primarily intended for solid archives (like solid Rar) or 7Zip archives, where sequential extraction provides the best performance. For general/simple extraction with any supported archive type, use `archive.WriteToDirectory()` instead.
```C#
using (var archive = RarArchive.Open("Test.rar"))
{
// Simple extraction with RarArchive; this WriteToDirectory pattern works for all archive types
archive.WriteToDirectory(@"D:\temp", new ExtractionOptions()
{
ExtractFullPath = true,
Overwrite = true
});
}
```
### Iterate over all files from a Rar file using RarArchive
```C#
using (var archive = RarArchive.Open("Test.rar"))
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
Console.WriteLine($"{entry.Key}: {entry.Size} bytes");
}
}
```
### Extract solid Rar or 7Zip archives with progress reporting
`ExtractAllEntries` only works for solid archives (Rar) or 7Zip archives. For optimal performance with these archive types, use this method:
```C#
using SharpCompress.Common;
using SharpCompress.Readers;
var progress = new Progress<ProgressReport>(report =>
{
Console.WriteLine($"Extracting {report.EntryPath}: {report.PercentComplete}%");
});
using (var archive = RarArchive.Open("archive.rar", new ReaderOptions { Progress = progress })) // Must be solid Rar or 7Zip
{
archive.WriteToDirectory(@"D:\output", new ExtractionOptions()
{
ExtractFullPath = true,
Overwrite = true
});
}
```
### Use ReaderFactory to autodetect archive type and Open the entry stream
```C#
using (Stream stream = File.OpenRead("Tar.tar.bz2"))
using (var reader = ReaderFactory.Open(stream))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Console.WriteLine(reader.Entry.Key);
reader.WriteEntryToDirectory(@"C:\temp", new ExtractionOptions()
{
ExtractFullPath = true,
Overwrite = true
});
}
}
}
```
### Use ReaderFactory to autodetect archive type and Open the entry stream
```C#
using (Stream stream = File.OpenRead("Tar.tar.bz2"))
using (var reader = ReaderFactory.Open(stream))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
using (var entryStream = reader.OpenEntryStream())
{
entryStream.CopyTo(...);
}
}
}
}
```
### Use WriterFactory to write all files from a directory in a streaming manner.
```C#
using (Stream stream = File.OpenWrite("C:\\temp.tgz"))
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, new WriterOptions(CompressionType.GZip)
{
LeaveOpenStream = true
}))
{
writer.WriteAll("D:\\temp", "*", SearchOption.AllDirectories);
}
```
### Extract zip which has non-utf8 encoded filename(cp932)
```C#
var opts = new SharpCompress.Readers.ReaderOptions();
var encoding = Encoding.GetEncoding(932);
opts.ArchiveEncoding = new SharpCompress.Common.ArchiveEncoding();
opts.ArchiveEncoding.CustomDecoder = (data, x, y) =>
{
return encoding.GetString(data);
};
var tr = SharpCompress.Archives.Zip.ZipArchive.Open("test.zip", opts);
foreach(var entry in tr.Entries)
{
Console.WriteLine($"{entry.Key}");
}
```
## Async Examples
### Async Reader Examples
**Extract single entry asynchronously:**
```C#
using (Stream stream = File.OpenRead("archive.zip"))
using (var reader = ReaderFactory.Open(stream))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
using (var entryStream = reader.OpenEntryStream())
{
using (var outputStream = File.Create("output.bin"))
{
await reader.WriteEntryToAsync(outputStream, cancellationToken);
}
}
}
}
}
```
**Extract all entries asynchronously:**
```C#
using (Stream stream = File.OpenRead("archive.tar.gz"))
using (var reader = ReaderFactory.Open(stream))
{
await reader.WriteAllToDirectoryAsync(
@"D:\temp",
new ExtractionOptions()
{
ExtractFullPath = true,
Overwrite = true
},
cancellationToken
);
}
```
**Open and process entry stream asynchronously:**
```C#
using (var archive = ZipArchive.Open("archive.zip"))
{
foreach (var entry in archive.Entries.Where(e => !e.IsDirectory))
{
using (var entryStream = await entry.OpenEntryStreamAsync(cancellationToken))
{
// Process the decompressed stream asynchronously
await ProcessStreamAsync(entryStream, cancellationToken);
}
}
}
```
### Async Writer Examples
**Write single file asynchronously:**
```C#
using (Stream archiveStream = File.OpenWrite("output.zip"))
using (var writer = WriterFactory.Open(archiveStream, ArchiveType.Zip, CompressionType.Deflate))
{
using (Stream fileStream = File.OpenRead("input.txt"))
{
await writer.WriteAsync("entry.txt", fileStream, DateTime.Now, cancellationToken);
}
}
```
**Write entire directory asynchronously:**
```C#
using (Stream stream = File.OpenWrite("backup.tar.gz"))
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, new WriterOptions(CompressionType.GZip)))
{
await writer.WriteAllAsync(
@"D:\files",
"*",
SearchOption.AllDirectories,
cancellationToken
);
}
```
**Write with progress tracking and cancellation:**
```C#
var cts = new CancellationTokenSource();
// Set timeout or cancel from UI
cts.CancelAfter(TimeSpan.FromMinutes(5));
using (Stream stream = File.OpenWrite("archive.zip"))
using (var writer = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.Deflate))
{
try
{
await writer.WriteAllAsync(@"D:\data", "*", SearchOption.AllDirectories, cts.Token);
}
catch (OperationCanceledException)
{
Console.WriteLine("Operation was cancelled");
}
}
```
### Archive Async Examples
**Extract from archive asynchronously:**
```C#
using (var archive = ZipArchive.Open("archive.zip"))
{
// Simple async extraction - works for all archive types
await archive.WriteToDirectoryAsync(
@"C:\output",
new ExtractionOptions() { ExtractFullPath = true, Overwrite = true },
cancellationToken
);
}
```
**Benefits of Async Operations:**
- Non-blocking I/O for better application responsiveness
- Improved scalability for server applications
- Support for cancellation via CancellationToken
- Better resource utilization in async/await contexts
- Compatible with modern .NET async patterns

View File

@@ -1,6 +1,5 @@
{
"sdk": {
"version": "10.0.100",
"rollForward": "latestFeature"
"version": "5.0.101"
}
}
}

View File

@@ -1,486 +0,0 @@
// Copyright (c) Six Labors.
// Licensed under the Apache License, Version 2.0.
#if !NETSTANDARD2_0 && !NETSTANDARD2_1 && !NETFRAMEWORK
#define SUPPORTS_RUNTIME_INTRINSICS
#define SUPPORTS_HOTPATH
#endif
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
#if SUPPORTS_RUNTIME_INTRINSICS
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
#endif
#pragma warning disable IDE0007 // Use implicit type
namespace SharpCompress.Algorithms;
/// <summary>
/// Calculates the 32 bit Adler checksum of a given buffer according to
/// RFC 1950. ZLIB Compressed Data Format Specification version 3.3)
/// </summary>
internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/blob/main/src/ImageSharp/Compression/Zlib/Adler32.cs
{
/// <summary>
/// Global inlining options. Helps temporarily disable inlining for better profiler output.
/// </summary>
private static class InliningOptions // From https://github.com/SixLabors/ImageSharp/blob/main/src/ImageSharp/Common/Helpers/InliningOptions.cs
{
/// <summary>
/// <see cref="MethodImplOptions.AggressiveInlining"/> regardless of the build conditions.
/// </summary>
public const MethodImplOptions AlwaysInline = MethodImplOptions.AggressiveInlining;
#if PROFILING
public const MethodImplOptions HotPath = MethodImplOptions.NoInlining;
public const MethodImplOptions ShortMethod = MethodImplOptions.NoInlining;
#else
#if SUPPORTS_HOTPATH
public const MethodImplOptions HotPath = MethodImplOptions.AggressiveOptimization;
#else
public const MethodImplOptions HotPath = MethodImplOptions.AggressiveInlining;
#endif
public const MethodImplOptions ShortMethod = MethodImplOptions.AggressiveInlining;
#endif
public const MethodImplOptions ColdPath = MethodImplOptions.NoInlining;
}
#if SUPPORTS_RUNTIME_INTRINSICS
/// <summary>
/// Provides optimized static methods for trigonometric, logarithmic,
/// and other common mathematical functions.
/// </summary>
private static class Numerics // From https://github.com/SixLabors/ImageSharp/blob/main/src/ImageSharp/Common/Helpers/Numerics.cs
{
/// <summary>
/// Reduces elements of the vector into one sum.
/// </summary>
/// <param name="accumulator">The accumulator to reduce.</param>
/// <returns>The sum of all elements.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int ReduceSum(Vector256<int> accumulator)
{
// Add upper lane to lower lane.
var vsum = Sse2.Add(accumulator.GetLower(), accumulator.GetUpper());
// Add odd to even.
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_11_01_01));
// Add high to low.
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_10_11_10));
return Sse2.ConvertToInt32(vsum);
}
/// <summary>
/// Reduces even elements of the vector into one sum.
/// </summary>
/// <param name="accumulator">The accumulator to reduce.</param>
/// <returns>The sum of even elements.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int EvenReduceSum(Vector256<int> accumulator)
{
var vsum = Sse2.Add(accumulator.GetLower(), accumulator.GetUpper()); // add upper lane to lower lane
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_10_11_10)); // add high to low
// Vector128<int>.ToScalar() isn't optimized pre-net5.0 https://github.com/dotnet/runtime/pull/37882
return Sse2.ConvertToInt32(vsum);
}
}
#endif
/// <summary>
/// The default initial seed value of a Adler32 checksum calculation.
/// </summary>
public const uint SeedValue = 1U;
// Largest prime smaller than 65536
private const uint BASE = 65521;
// NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1
private const uint NMAX = 5552;
#if SUPPORTS_RUNTIME_INTRINSICS
private const int MinBufferSize = 64;
private const int BlockSize = 1 << 5;
// The C# compiler emits this as a compile-time constant embedded in the PE file.
private static ReadOnlySpan<byte> Tap1Tap2 =>
new byte[]
{
32,
31,
30,
29,
28,
27,
26,
25,
24,
23,
22,
21,
20,
19,
18,
17, // tap1
16,
15,
14,
13,
12,
11,
10,
9,
8,
7,
6,
5,
4,
3,
2,
1, // tap2
};
#endif
/// <summary>
/// Calculates the Adler32 checksum with the bytes taken from the span.
/// </summary>
/// <param name="buffer">The readonly span of bytes.</param>
/// <returns>The <see cref="uint"/>.</returns>
[MethodImpl(InliningOptions.ShortMethod)]
public static uint Calculate(ReadOnlySpan<byte> buffer) => Calculate(SeedValue, buffer);
/// <summary>
/// Calculates the Adler32 checksum with the bytes taken from the span and seed.
/// </summary>
/// <param name="adler">The input Adler32 value.</param>
/// <param name="buffer">The readonly span of bytes.</param>
/// <returns>The <see cref="uint"/>.</returns>
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
public static uint Calculate(uint adler, ReadOnlySpan<byte> buffer)
{
if (buffer.IsEmpty)
{
return adler;
}
#if SUPPORTS_RUNTIME_INTRINSICS
if (Avx2.IsSupported && buffer.Length >= MinBufferSize)
{
return CalculateAvx2(adler, buffer);
}
if (Ssse3.IsSupported && buffer.Length >= MinBufferSize)
{
return CalculateSse(adler, buffer);
}
return CalculateScalar(adler, buffer);
#else
return CalculateScalar(adler, buffer);
#endif
}
// Based on https://github.com/chromium/chromium/blob/master/third_party/zlib/adler32_simd.c
#if SUPPORTS_RUNTIME_INTRINSICS
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
private static unsafe uint CalculateSse(uint adler, ReadOnlySpan<byte> buffer)
{
var s1 = adler & 0xFFFF;
var s2 = (adler >> 16) & 0xFFFF;
// Process the data in blocks.
var length = (uint)buffer.Length;
var blocks = length / BlockSize;
length -= blocks * BlockSize;
fixed (byte* bufferPtr = &MemoryMarshal.GetReference(buffer))
{
fixed (byte* tapPtr = &MemoryMarshal.GetReference(Tap1Tap2))
{
var localBufferPtr = bufferPtr;
// _mm_setr_epi8 on x86
var tap1 = Sse2.LoadVector128((sbyte*)tapPtr);
var tap2 = Sse2.LoadVector128((sbyte*)(tapPtr + 0x10));
var zero = Vector128<byte>.Zero;
var ones = Vector128.Create((short)1);
while (blocks > 0)
{
var n = NMAX / BlockSize; /* The NMAX constraint. */
if (n > blocks)
{
n = blocks;
}
blocks -= n;
// Process n blocks of data. At most NMAX data bytes can be
// processed before s2 must be reduced modulo BASE.
var v_ps = Vector128.CreateScalar(s1 * n);
var v_s2 = Vector128.CreateScalar(s2);
var v_s1 = Vector128<uint>.Zero;
do
{
// Load 32 input bytes.
var bytes1 = Sse3.LoadDquVector128(localBufferPtr);
var bytes2 = Sse3.LoadDquVector128(localBufferPtr + 0x10);
// Add previous block byte sum to v_ps.
v_ps = Sse2.Add(v_ps, v_s1);
// Horizontally add the bytes for s1, multiply-adds the
// bytes by [ 32, 31, 30, ... ] for s2.
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes1, zero).AsUInt32());
var mad1 = Ssse3.MultiplyAddAdjacent(bytes1, tap1);
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad1, ones).AsUInt32());
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes2, zero).AsUInt32());
var mad2 = Ssse3.MultiplyAddAdjacent(bytes2, tap2);
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad2, ones).AsUInt32());
localBufferPtr += BlockSize;
} while (--n > 0);
v_s2 = Sse2.Add(v_s2, Sse2.ShiftLeftLogical(v_ps, 5));
// Sum epi32 ints v_s1(s2) and accumulate in s1(s2).
const byte S2301 = 0b1011_0001; // A B C D -> B A D C
const byte S1032 = 0b0100_1110; // A B C D -> C D A B
v_s1 = Sse2.Add(v_s1, Sse2.Shuffle(v_s1, S1032));
s1 += v_s1.ToScalar();
v_s2 = Sse2.Add(v_s2, Sse2.Shuffle(v_s2, S2301));
v_s2 = Sse2.Add(v_s2, Sse2.Shuffle(v_s2, S1032));
s2 = v_s2.ToScalar();
// Reduce.
s1 %= BASE;
s2 %= BASE;
}
if (length > 0)
{
HandleLeftOver(localBufferPtr, length, ref s1, ref s2);
}
return s1 | (s2 << 16);
}
}
}
// Based on: https://github.com/zlib-ng/zlib-ng/blob/develop/arch/x86/adler32_avx2.c
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
public static unsafe uint CalculateAvx2(uint adler, ReadOnlySpan<byte> buffer)
{
var s1 = adler & 0xFFFF;
var s2 = (adler >> 16) & 0xFFFF;
var length = (uint)buffer.Length;
fixed (byte* bufferPtr = &MemoryMarshal.GetReference(buffer))
{
var localBufferPtr = bufferPtr;
var zero = Vector256<byte>.Zero;
var dot3v = Vector256.Create((short)1);
var dot2v = Vector256.Create(
32,
31,
30,
29,
28,
27,
26,
25,
24,
23,
22,
21,
20,
19,
18,
17,
16,
15,
14,
13,
12,
11,
10,
9,
8,
7,
6,
5,
4,
3,
2,
1
);
// Process n blocks of data. At most NMAX data bytes can be
// processed before s2 must be reduced modulo BASE.
var vs1 = Vector256.CreateScalar(s1);
var vs2 = Vector256.CreateScalar(s2);
while (length >= 32)
{
var k = length < NMAX ? (int)length : (int)NMAX;
k -= k % 32;
length -= (uint)k;
var vs10 = vs1;
var vs3 = Vector256<uint>.Zero;
while (k >= 32)
{
// Load 32 input bytes.
var block = Avx.LoadVector256(localBufferPtr);
// Sum of abs diff, resulting in 2 x int32's
var vs1sad = Avx2.SumAbsoluteDifferences(block, zero);
vs1 = Avx2.Add(vs1, vs1sad.AsUInt32());
vs3 = Avx2.Add(vs3, vs10);
// sum 32 uint8s to 16 shorts.
var vshortsum2 = Avx2.MultiplyAddAdjacent(block, dot2v);
// sum 16 shorts to 8 uint32s.
var vsum2 = Avx2.MultiplyAddAdjacent(vshortsum2, dot3v);
vs2 = Avx2.Add(vsum2.AsUInt32(), vs2);
vs10 = vs1;
localBufferPtr += BlockSize;
k -= 32;
}
// Defer the multiplication with 32 to outside of the loop.
vs3 = Avx2.ShiftLeftLogical(vs3, 5);
vs2 = Avx2.Add(vs2, vs3);
s1 = (uint)Numerics.EvenReduceSum(vs1.AsInt32());
s2 = (uint)Numerics.ReduceSum(vs2.AsInt32());
s1 %= BASE;
s2 %= BASE;
vs1 = Vector256.CreateScalar(s1);
vs2 = Vector256.CreateScalar(s2);
}
if (length > 0)
{
HandleLeftOver(localBufferPtr, length, ref s1, ref s2);
}
return s1 | (s2 << 16);
}
}
private static unsafe void HandleLeftOver(
byte* localBufferPtr,
uint length,
ref uint s1,
ref uint s2
)
{
if (length >= 16)
{
s2 += s1 += localBufferPtr[0];
s2 += s1 += localBufferPtr[1];
s2 += s1 += localBufferPtr[2];
s2 += s1 += localBufferPtr[3];
s2 += s1 += localBufferPtr[4];
s2 += s1 += localBufferPtr[5];
s2 += s1 += localBufferPtr[6];
s2 += s1 += localBufferPtr[7];
s2 += s1 += localBufferPtr[8];
s2 += s1 += localBufferPtr[9];
s2 += s1 += localBufferPtr[10];
s2 += s1 += localBufferPtr[11];
s2 += s1 += localBufferPtr[12];
s2 += s1 += localBufferPtr[13];
s2 += s1 += localBufferPtr[14];
s2 += s1 += localBufferPtr[15];
localBufferPtr += 16;
length -= 16;
}
while (length-- > 0)
{
s2 += s1 += *localBufferPtr++;
}
if (s1 >= BASE)
{
s1 -= BASE;
}
s2 %= BASE;
}
#endif
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
private static unsafe uint CalculateScalar(uint adler, ReadOnlySpan<byte> buffer)
{
var s1 = adler & 0xFFFF;
var s2 = (adler >> 16) & 0xFFFF;
uint k;
fixed (byte* bufferPtr = buffer)
{
var localBufferPtr = bufferPtr;
var length = (uint)buffer.Length;
while (length > 0)
{
k = length < NMAX ? length : NMAX;
length -= k;
while (k >= 16)
{
s2 += s1 += localBufferPtr[0];
s2 += s1 += localBufferPtr[1];
s2 += s1 += localBufferPtr[2];
s2 += s1 += localBufferPtr[3];
s2 += s1 += localBufferPtr[4];
s2 += s1 += localBufferPtr[5];
s2 += s1 += localBufferPtr[6];
s2 += s1 += localBufferPtr[7];
s2 += s1 += localBufferPtr[8];
s2 += s1 += localBufferPtr[9];
s2 += s1 += localBufferPtr[10];
s2 += s1 += localBufferPtr[11];
s2 += s1 += localBufferPtr[12];
s2 += s1 += localBufferPtr[13];
s2 += s1 += localBufferPtr[14];
s2 += s1 += localBufferPtr[15];
localBufferPtr += 16;
k -= 16;
}
while (k-- > 0)
{
s2 += s1 += *localBufferPtr++;
}
s1 %= BASE;
s2 %= BASE;
}
return (s2 << 16) | s1;
}
}
}

View File

@@ -0,0 +1,285 @@
// Copyright (c) Six Labors and contributors.
// Licensed under the GNU Affero General Public License, Version 3.
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
#if !NETSTANDARD2_0 && !NETSTANDARD2_1
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
#endif
namespace SharpCompress.Algorithms
{
/// <summary>
/// Calculates the 32 bit Adler checksum of a given buffer according to
/// RFC 1950. ZLIB Compressed Data Format Specification version 3.3)
/// </summary>
internal static class Adler32
{
/// <summary>
/// The default initial seed value of a Adler32 checksum calculation.
/// </summary>
public const uint SeedValue = 1U;
#if !NETSTANDARD2_0 && !NETSTANDARD2_1
private const int MinBufferSize = 64;
#endif
// Largest prime smaller than 65536
private const uint BASE = 65521;
// NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1
private const uint NMAX = 5552;
/// <summary>
/// Calculates the Adler32 checksum with the bytes taken from the span.
/// </summary>
/// <param name="buffer">The readonly span of bytes.</param>
/// <returns>The <see cref="uint"/>.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static uint Calculate(ReadOnlySpan<byte> buffer)
{
return Calculate(SeedValue, buffer);
}
/// <summary>
/// Calculates the Adler32 checksum with the bytes taken from the span and seed.
/// </summary>
/// <param name="adler">The input Adler32 value.</param>
/// <param name="buffer">The readonly span of bytes.</param>
/// <returns>The <see cref="uint"/>.</returns>
public static uint Calculate(uint adler, ReadOnlySpan<byte> buffer)
{
if (buffer.IsEmpty)
{
return SeedValue;
}
#if !NETSTANDARD2_0 && !NETSTANDARD2_1
if (Sse3.IsSupported && buffer.Length >= MinBufferSize)
{
return CalculateSse(adler, buffer);
}
return CalculateScalar(adler, buffer);
#else
return CalculateScalar(adler, buffer);
#endif
}
// Based on https://github.com/chromium/chromium/blob/master/third_party/zlib/adler32_simd.c
#if !NETSTANDARD2_0 && !NETSTANDARD2_1
private static unsafe uint CalculateSse(uint adler, ReadOnlySpan<byte> buffer)
{
uint s1 = adler & 0xFFFF;
uint s2 = (adler >> 16) & 0xFFFF;
// Process the data in blocks.
const int BLOCK_SIZE = 1 << 5;
uint length = (uint)buffer.Length;
uint blocks = length / BLOCK_SIZE;
length -= blocks * BLOCK_SIZE;
int index = 0;
fixed (byte* bufferPtr = &buffer[0])
{
index += (int)blocks * BLOCK_SIZE;
var localBufferPtr = bufferPtr;
// _mm_setr_epi8 on x86
var tap1 = Vector128.Create(32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17);
var tap2 = Vector128.Create(16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1);
Vector128<byte> zero = Vector128<byte>.Zero;
var ones = Vector128.Create((short)1);
while (blocks > 0)
{
uint n = NMAX / BLOCK_SIZE; /* The NMAX constraint. */
if (n > blocks)
{
n = blocks;
}
blocks -= n;
// Process n blocks of data. At most NMAX data bytes can be
// processed before s2 must be reduced modulo BASE.
Vector128<int> v_ps = Vector128.CreateScalar(s1 * n).AsInt32();
Vector128<int> v_s2 = Vector128.CreateScalar(s2).AsInt32();
Vector128<int> v_s1 = Vector128<int>.Zero;
do
{
// Load 32 input bytes.
Vector128<byte> bytes1 = Sse3.LoadDquVector128(localBufferPtr);
Vector128<byte> bytes2 = Sse3.LoadDquVector128(localBufferPtr + 16);
// Add previous block byte sum to v_ps.
v_ps = Sse2.Add(v_ps, v_s1);
// Horizontally add the bytes for s1, multiply-adds the
// bytes by [ 32, 31, 30, ... ] for s2.
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes1, zero).AsInt32());
Vector128<short> mad1 = Ssse3.MultiplyAddAdjacent(bytes1, tap1);
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad1, ones));
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes2, zero).AsInt32());
Vector128<short> mad2 = Ssse3.MultiplyAddAdjacent(bytes2, tap2);
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad2, ones));
localBufferPtr += BLOCK_SIZE;
}
while (--n > 0);
v_s2 = Sse2.Add(v_s2, Sse2.ShiftLeftLogical(v_ps, 5));
// Sum epi32 ints v_s1(s2) and accumulate in s1(s2).
const byte S2301 = 0b1011_0001; // A B C D -> B A D C
const byte S1032 = 0b0100_1110; // A B C D -> C D A B
v_s1 = Sse2.Add(v_s1, Sse2.Shuffle(v_s1, S2301));
v_s1 = Sse2.Add(v_s1, Sse2.Shuffle(v_s1, S1032));
s1 += (uint)v_s1.ToScalar();
v_s2 = Sse2.Add(v_s2, Sse2.Shuffle(v_s2, S2301));
v_s2 = Sse2.Add(v_s2, Sse2.Shuffle(v_s2, S1032));
s2 = (uint)v_s2.ToScalar();
// Reduce.
s1 %= BASE;
s2 %= BASE;
}
}
ref byte bufferRef = ref MemoryMarshal.GetReference(buffer);
if (length > 0)
{
if (length >= 16)
{
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
length -= 16;
}
while (length-- > 0)
{
s2 += s1 += Unsafe.Add(ref bufferRef, index++);
}
if (s1 >= BASE)
{
s1 -= BASE;
}
s2 %= BASE;
}
return s1 | (s2 << 16);
}
#endif
private static uint CalculateScalar(uint adler, ReadOnlySpan<byte> buffer)
{
uint s1 = adler & 0xFFFF;
uint s2 = (adler >> 16) & 0xFFFF;
uint k;
ref byte bufferRef = ref MemoryMarshal.GetReference<byte>(buffer);
uint length = (uint)buffer.Length;
int index = 0;
while (length > 0)
{
k = length < NMAX ? length : NMAX;
length -= k;
while (k >= 16)
{
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
k -= 16;
}
if (k != 0)
{
do
{
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
}
while (--k != 0);
}
s1 %= BASE;
s2 %= BASE;
}
return (s2 << 16) | s1;
}
}
}

View File

@@ -1,240 +1,149 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IAsyncArchive
where TEntry : IArchiveEntry
where TVolume : IVolume
namespace SharpCompress.Archives
{
private readonly LazyReadOnlyCollection<TVolume> _lazyVolumes;
private readonly LazyReadOnlyCollection<TEntry> _lazyEntries;
private bool _disposed;
private readonly SourceStream? _sourceStream;
protected ReaderOptions ReaderOptions { get; }
internal AbstractArchive(ArchiveType type, SourceStream sourceStream)
public abstract class AbstractArchive<TEntry, TVolume> : IArchive
where TEntry : IArchiveEntry
where TVolume : IVolume
{
Type = type;
ReaderOptions = sourceStream.ReaderOptions;
_sourceStream = sourceStream;
_lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(_sourceStream));
_lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
_lazyVolumesAsync = new LazyAsyncReadOnlyCollection<TVolume>(
LoadVolumesAsync(_sourceStream)
);
_lazyEntriesAsync = new LazyAsyncReadOnlyCollection<TEntry>(
LoadEntriesAsync(_lazyVolumesAsync)
);
}
private readonly LazyReadOnlyCollection<TVolume> lazyVolumes;
private readonly LazyReadOnlyCollection<TEntry> lazyEntries;
internal AbstractArchive(ArchiveType type)
{
Type = type;
ReaderOptions = new();
_lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
_lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
_lazyVolumesAsync = new LazyAsyncReadOnlyCollection<TVolume>(
AsyncEnumerableEx.Empty<TVolume>()
);
_lazyEntriesAsync = new LazyAsyncReadOnlyCollection<TEntry>(
AsyncEnumerableEx.Empty<TEntry>()
);
}
protected ReaderOptions ReaderOptions { get; } = new ();
public ArchiveType Type { get; }
private bool disposed;
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveEntries across the one or many parts of the RarArchive.
/// </summary>
public virtual ICollection<TEntry> Entries => _lazyEntries;
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveVolumes across the one or many parts of the RarArchive.
/// </summary>
public ICollection<TVolume> Volumes => _lazyVolumes;
/// <summary>
/// The total size of the files compressed in the archive.
/// </summary>
public virtual long TotalSize =>
Entries.Aggregate(0L, (total, cf) => total + cf.CompressedSize);
/// <summary>
/// The total size of the files as uncompressed in the archive.
/// </summary>
public virtual long TotalUncompressedSize =>
Entries.Aggregate(0L, (total, cf) => total + cf.Size);
protected abstract IEnumerable<TVolume> LoadVolumes(SourceStream sourceStream);
protected abstract IEnumerable<TEntry> LoadEntries(IEnumerable<TVolume> volumes);
protected virtual IAsyncEnumerable<TVolume> LoadVolumesAsync(SourceStream sourceStream) =>
LoadVolumes(sourceStream).ToAsyncEnumerable();
protected virtual async IAsyncEnumerable<TEntry> LoadEntriesAsync(
IAsyncEnumerable<TVolume> volumes
)
{
foreach (var item in LoadEntries(await volumes.ToListAsync()))
internal AbstractArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerOptions, CancellationToken cancellationToken)
{
yield return item;
}
}
IEnumerable<IArchiveEntry> IArchive.Entries => Entries.Cast<IArchiveEntry>();
IEnumerable<IVolume> IArchive.Volumes => _lazyVolumes.Cast<IVolume>();
public virtual void Dispose()
{
if (!_disposed)
{
_lazyVolumes.ForEach(v => v.Dispose());
_lazyEntries.GetLoaded().Cast<Entry>().ForEach(x => x.Close());
_sourceStream?.Dispose();
_disposed = true;
}
}
private void EnsureEntriesLoaded()
{
_lazyEntries.EnsureFullyLoaded();
_lazyVolumes.EnsureFullyLoaded();
}
/// <summary>
/// Use this method to extract all entries in an archive in order.
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
/// extracted sequentially for the best performance.
///
/// This method will load all entry information from the archive.
///
/// WARNING: this will reuse the underlying stream for the archive. Errors may
/// occur if this is used at the same time as other extraction methods on this instance.
/// </summary>
/// <returns></returns>
public IReader ExtractAllEntries()
{
if (!IsSolid && Type != ArchiveType.SevenZip)
{
throw new SharpCompressException(
"ExtractAllEntries can only be used on solid archives or 7Zip archives (which require random access)."
);
}
EnsureEntriesLoaded();
return CreateReaderForSolidExtraction();
}
protected abstract IReader CreateReaderForSolidExtraction();
protected abstract ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync();
/// <summary>
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
/// </summary>
public virtual bool IsSolid => false;
/// <summary>
/// Archive is ENCRYPTED (this means the Archive has password-protected files).
/// </summary>
public virtual bool IsEncrypted => false;
/// <summary>
/// The archive can find all the parts of the archive needed to fully extract the archive. This forces the parsing of the entire archive.
/// </summary>
public bool IsComplete
{
get
{
EnsureEntriesLoaded();
return Entries.All(x => x.IsComplete);
}
}
#region Async Support
private readonly LazyAsyncReadOnlyCollection<TVolume> _lazyVolumesAsync;
private readonly LazyAsyncReadOnlyCollection<TEntry> _lazyEntriesAsync;
public virtual async ValueTask DisposeAsync()
{
if (!_disposed)
{
await foreach (var v in _lazyVolumesAsync)
Type = type;
if (!fileInfo.Exists)
{
v.Dispose();
throw new ArgumentException("File does not exist: " + fileInfo.FullName);
}
foreach (var v in _lazyEntriesAsync.GetLoaded().Cast<Entry>())
ReaderOptions = readerOptions;
readerOptions.LeaveStreamOpen = false;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(fileInfo, cancellationToken));
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes, cancellationToken));
}
protected abstract IAsyncEnumerable<TVolume> LoadVolumes(FileInfo file, CancellationToken cancellationToken);
internal AbstractArchive(ArchiveType type, IAsyncEnumerable<Stream> streams, ReaderOptions readerOptions, CancellationToken cancellationToken)
{
Type = type;
ReaderOptions = readerOptions;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(streams.Select(CheckStreams), cancellationToken));
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes, cancellationToken));
}
internal AbstractArchive(ArchiveType type)
{
Type = type;
lazyVolumes = new LazyReadOnlyCollection<TVolume>( AsyncEnumerable.Empty<TVolume>());
lazyEntries = new LazyReadOnlyCollection<TEntry>(AsyncEnumerable.Empty<TEntry>());
}
public ArchiveType Type { get; }
private static Stream CheckStreams(Stream stream)
{
if (!stream.CanSeek || !stream.CanRead)
{
v.Close();
throw new ArgumentException("Archive streams must be Readable and Seekable");
}
_sourceStream?.Dispose();
_disposed = true;
return stream;
}
}
private async ValueTask EnsureEntriesLoadedAsync()
{
await _lazyEntriesAsync.EnsureFullyLoaded();
await _lazyVolumesAsync.EnsureFullyLoaded();
}
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveEntries across the one or many parts of the RarArchive.
/// </summary>
public virtual IAsyncEnumerable<TEntry> Entries => lazyEntries;
public virtual IAsyncEnumerable<TEntry> EntriesAsync => _lazyEntriesAsync;
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveVolumes across the one or many parts of the RarArchive.
/// </summary>
public IAsyncEnumerable<TVolume> Volumes => lazyVolumes;
private async IAsyncEnumerable<IArchiveEntry> EntriesAsyncCast()
{
await foreach (var entry in EntriesAsync)
/// <summary>
/// The total size of the files compressed in the archive.
/// </summary>
public virtual async ValueTask<long> TotalSizeAsync()
{
yield return entry;
await EnsureEntriesLoaded();
return await Entries.AggregateAsync(0L, (total, cf) => total + cf.CompressedSize);
}
}
IAsyncEnumerable<IArchiveEntry> IAsyncArchive.EntriesAsync => EntriesAsyncCast();
private async IAsyncEnumerable<IVolume> VolumesAsyncCast()
{
await foreach (var volume in VolumesAsync)
/// <summary>
/// The total size of the files as uncompressed in the archive.
/// </summary>
public virtual async ValueTask<long> TotalUncompressedSizeAsync()
{
yield return volume;
await EnsureEntriesLoaded();
return await Entries.AggregateAsync(0L, (total, cf) => total + cf.Size);
}
}
public IAsyncEnumerable<IVolume> VolumesAsync => VolumesAsyncCast();
protected abstract IAsyncEnumerable<TVolume> LoadVolumes(IAsyncEnumerable<Stream> streams, CancellationToken cancellationToken);
protected abstract IAsyncEnumerable<TEntry> LoadEntries(IAsyncEnumerable<TVolume> volumes, CancellationToken cancellationToken);
public async ValueTask<IAsyncReader> ExtractAllEntriesAsync()
{
if (!IsSolid && Type != ArchiveType.SevenZip)
IAsyncEnumerable<IArchiveEntry> IArchive.Entries => Entries.Select(x => (IArchiveEntry)x);
IAsyncEnumerable<IVolume> IArchive.Volumes => lazyVolumes.Select(x => (IVolume)x);
public virtual async ValueTask DisposeAsync()
{
throw new SharpCompressException(
"ExtractAllEntries can only be used on solid archives or 7Zip archives (which require random access)."
);
if (!disposed)
{
await lazyVolumes.ForEachAsync(async v => await v.DisposeAsync());
await lazyEntries.GetLoaded().Cast<Entry>().ForEachAsync(async x => await x.CloseAsync());
disposed = true;
}
}
/// <summary>
/// Use this method to extract all entries in an archive in order.
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
/// extracted sequentially for the best performance.
///
/// This method will load all entry information from the archive.
///
/// WARNING: this will reuse the underlying stream for the archive. Errors may
/// occur if this is used at the same time as other extraction methods on this instance.
/// </summary>
/// <returns></returns>
public async ValueTask<IReader> ExtractAllEntries()
{
await EnsureEntriesLoaded();
return await CreateReaderForSolidExtraction();
}
public async ValueTask EnsureEntriesLoaded()
{
await lazyEntries.EnsureFullyLoaded();
await lazyVolumes.EnsureFullyLoaded();
}
protected abstract ValueTask<IReader> CreateReaderForSolidExtraction();
/// <summary>
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
/// </summary>
public virtual ValueTask<bool> IsSolidAsync() => new(false);
/// <summary>
/// The archive can find all the parts of the archive needed to fully extract the archive. This forces the parsing of the entire archive.
/// </summary>
public async ValueTask<bool> IsCompleteAsync()
{
await EnsureEntriesLoaded();
return await Entries.AllAsync(x => x.IsComplete);
}
await EnsureEntriesLoadedAsync();
return await CreateReaderForSolidExtractionAsync();
}
public virtual ValueTask<bool> IsSolidAsync() => new(false);
public async ValueTask<bool> IsCompleteAsync()
{
await EnsureEntriesLoadedAsync();
return await EntriesAsync.AllAsync(x => x.IsComplete);
}
public async ValueTask<long> TotalSizeAsync() =>
await EntriesAsync.AggregateAsync(0L, (total, cf) => total + cf.CompressedSize);
public async ValueTask<long> TotalUncompressedSizeAsync() =>
await EntriesAsync.AggregateAsync(0L, (total, cf) => total + cf.Size);
public ValueTask<bool> IsEncryptedAsync() => new(IsEncrypted);
#endregion
}

View File

@@ -1,227 +1,176 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Writers;
namespace SharpCompress.Archives;
public abstract class AbstractWritableArchive<TEntry, TVolume>
: AbstractArchive<TEntry, TVolume>,
IWritableArchive,
IWritableAsyncArchive
where TEntry : IArchiveEntry
where TVolume : IVolume
namespace SharpCompress.Archives
{
private class RebuildPauseDisposable : IDisposable
public abstract class AbstractWritableArchive<TEntry, TVolume> : AbstractArchive<TEntry, TVolume>, IWritableArchive
where TEntry : IArchiveEntry
where TVolume : IVolume
{
private readonly AbstractWritableArchive<TEntry, TVolume> archive;
public RebuildPauseDisposable(AbstractWritableArchive<TEntry, TVolume> archive)
private class RebuildPauseDisposable : IAsyncDisposable
{
this.archive = archive;
archive.pauseRebuilding = true;
}
private readonly AbstractWritableArchive<TEntry, TVolume> archive;
public void Dispose()
{
archive.pauseRebuilding = false;
archive.RebuildModifiedCollection();
}
}
private readonly List<TEntry> newEntries = new();
private readonly List<TEntry> removedEntries = new();
private readonly List<TEntry> modifiedEntries = new();
private bool hasModifications;
private bool pauseRebuilding;
internal AbstractWritableArchive(ArchiveType type)
: base(type) { }
internal AbstractWritableArchive(ArchiveType type, SourceStream sourceStream)
: base(type, sourceStream) { }
public override ICollection<TEntry> Entries
{
get
{
if (hasModifications)
public RebuildPauseDisposable(AbstractWritableArchive<TEntry, TVolume> archive)
{
return modifiedEntries;
this.archive = archive;
archive.pauseRebuilding = true;
}
return base.Entries;
}
}
public IDisposable PauseEntryRebuilding() => new RebuildPauseDisposable(this);
private void RebuildModifiedCollection()
{
if (pauseRebuilding)
{
return;
}
hasModifications = true;
newEntries.RemoveAll(v => removedEntries.Contains(v));
modifiedEntries.Clear();
modifiedEntries.AddRange(OldEntries.Concat(newEntries));
}
private IEnumerable<TEntry> OldEntries => base.Entries.Where(x => !removedEntries.Contains(x));
public void RemoveEntry(TEntry entry)
{
if (!removedEntries.Contains(entry))
{
removedEntries.Add(entry);
RebuildModifiedCollection();
}
}
void IWritableArchiveCommon.RemoveEntry(IArchiveEntry entry) => RemoveEntry((TEntry)entry);
public TEntry AddEntry(string key, Stream source, long size = 0, DateTime? modified = null) =>
AddEntry(key, source, false, size, modified);
IArchiveEntry IWritableArchiveCommon.AddEntry(
string key,
Stream source,
bool closeStream,
long size,
DateTime? modified
) => AddEntry(key, source, closeStream, size, modified);
IArchiveEntry IWritableArchiveCommon.AddDirectoryEntry(string key, DateTime? modified) =>
AddDirectoryEntry(key, modified);
public TEntry AddEntry(
string key,
Stream source,
bool closeStream,
long size = 0,
DateTime? modified = null
)
{
if (key.Length > 0 && key[0] is '/' or '\\')
{
key = key.Substring(1);
}
if (DoesKeyMatchExisting(key))
{
throw new ArchiveException("Cannot add entry with duplicate key: " + key);
}
var entry = CreateEntry(key, source, size, modified, closeStream);
newEntries.Add(entry);
RebuildModifiedCollection();
return entry;
}
private bool DoesKeyMatchExisting(string key)
{
foreach (var path in Entries.Select(x => x.Key))
{
if (path is null)
public async ValueTask DisposeAsync()
{
continue;
archive.pauseRebuilding = false;
await archive.RebuildModifiedCollection();
}
var p = path.Replace('/', '\\');
if (p.Length > 0 && p[0] == '\\')
}
private readonly List<TEntry> newEntries = new();
private readonly List<TEntry> removedEntries = new();
private readonly List<TEntry> modifiedEntries = new();
private bool hasModifications;
private bool pauseRebuilding;
internal AbstractWritableArchive(ArchiveType type)
: base(type)
{
}
internal AbstractWritableArchive(ArchiveType type, Stream stream, ReaderOptions readerFactoryOptions,
CancellationToken cancellationToken)
: base(type, stream.AsAsyncEnumerable(), readerFactoryOptions, cancellationToken)
{
}
internal AbstractWritableArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerFactoryOptions,
CancellationToken cancellationToken)
: base(type, fileInfo, readerFactoryOptions, cancellationToken)
{
}
public override IAsyncEnumerable<TEntry> Entries
{
get
{
p = p.Substring(1);
if (hasModifications)
{
return modifiedEntries.ToAsyncEnumerable();
}
return base.Entries;
}
return string.Equals(p, key, StringComparison.OrdinalIgnoreCase);
}
return false;
}
public TEntry AddDirectoryEntry(string key, DateTime? modified = null)
{
if (key.Length > 0 && key[0] is '/' or '\\')
public IAsyncDisposable PauseEntryRebuilding()
{
key = key.Substring(1);
return new RebuildPauseDisposable(this);
}
if (DoesKeyMatchExisting(key))
private async ValueTask RebuildModifiedCollection()
{
throw new ArchiveException("Cannot add entry with duplicate key: " + key);
if (pauseRebuilding)
{
return;
}
hasModifications = true;
newEntries.RemoveAll(v => removedEntries.Contains(v));
modifiedEntries.Clear();
modifiedEntries.AddRange(await OldEntries.Concat(newEntries.ToAsyncEnumerable()).ToListAsync());
}
var entry = CreateDirectoryEntry(key, modified);
newEntries.Add(entry);
RebuildModifiedCollection();
return entry;
}
public void SaveTo(Stream stream, WriterOptions options)
{
//reset streams of new entries
newEntries.Cast<IWritableArchiveEntry>().ForEach(x => x.Stream.Seek(0, SeekOrigin.Begin));
SaveTo(stream, options, OldEntries, newEntries);
}
private IAsyncEnumerable<TEntry> OldEntries { get { return base.Entries.Where(x => !removedEntries.Contains(x)); } }
public async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
CancellationToken cancellationToken = default
)
{
//reset streams of new entries
newEntries.Cast<IWritableArchiveEntry>().ForEach(x => x.Stream.Seek(0, SeekOrigin.Begin));
await SaveToAsync(stream, options, OldEntries, newEntries, cancellationToken)
.ConfigureAwait(false);
}
protected TEntry CreateEntry(
string key,
Stream source,
long size,
DateTime? modified,
bool closeStream
)
{
if (!source.CanRead || !source.CanSeek)
public async ValueTask RemoveEntryAsync(TEntry entry)
{
throw new ArchiveException(
"Streams must be readable and seekable to use the Writing Archive API"
);
if (!removedEntries.Contains(entry))
{
removedEntries.Add(entry);
await RebuildModifiedCollection();
}
}
return CreateEntryInternal(key, source, size, modified, closeStream);
}
protected abstract TEntry CreateEntryInternal(
string key,
Stream source,
long size,
DateTime? modified,
bool closeStream
);
ValueTask IWritableArchive.RemoveEntryAsync(IArchiveEntry entry, CancellationToken cancellationToken)
{
return RemoveEntryAsync((TEntry)entry);
}
protected abstract TEntry CreateDirectoryEntry(string key, DateTime? modified);
public ValueTask<TEntry> AddEntryAsync(string key, Stream source,
long size = 0, DateTime? modified = null,
CancellationToken cancellationToken = default)
{
return AddEntryAsync(key, source, false, size, modified, cancellationToken);
}
protected abstract void SaveTo(
Stream stream,
WriterOptions options,
IEnumerable<TEntry> oldEntries,
IEnumerable<TEntry> newEntries
);
async ValueTask<IArchiveEntry> IWritableArchive.AddEntryAsync(string key, Stream source, bool closeStream, long size, DateTime? modified, CancellationToken cancellationToken)
{
return await AddEntryAsync(key, source, closeStream, size, modified, cancellationToken);
}
protected abstract ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IEnumerable<TEntry> oldEntries,
IEnumerable<TEntry> newEntries,
CancellationToken cancellationToken = default
);
public async ValueTask<TEntry> AddEntryAsync(string key, Stream source, bool closeStream,
long size = 0, DateTime? modified = null, CancellationToken cancellationToken = default)
{
if (key.Length > 0 && key[0] is '/' or '\\')
{
key = key.Substring(1);
}
if (await DoesKeyMatchExisting(key))
{
throw new ArchiveException("Cannot add entry with duplicate key: " + key);
}
var entry = await CreateEntry(key, source, size, modified, closeStream, cancellationToken);
newEntries.Add(entry);
await RebuildModifiedCollection();
return entry;
}
public override void Dispose()
{
base.Dispose();
newEntries.Cast<Entry>().ForEach(x => x.Close());
removedEntries.Cast<Entry>().ForEach(x => x.Close());
modifiedEntries.Cast<Entry>().ForEach(x => x.Close());
private async ValueTask<bool> DoesKeyMatchExisting(string key)
{
await foreach (var path in Entries.Select(x => x.Key))
{
var p = path.Replace('/', '\\');
if (p.Length > 0 && p[0] == '\\')
{
p = p.Substring(1);
}
return string.Equals(p, key, StringComparison.OrdinalIgnoreCase);
}
return false;
}
public async ValueTask SaveToAsync(Stream stream, WriterOptions options, CancellationToken cancellationToken = default)
{
//reset streams of new entries
newEntries.Cast<IWritableArchiveEntry>().ForEach(x => x.Stream.Seek(0, SeekOrigin.Begin));
await SaveToAsync(stream, options, OldEntries, newEntries.ToAsyncEnumerable(), cancellationToken);
}
protected ValueTask<TEntry> CreateEntry(string key, Stream source, long size, DateTime? modified,
bool closeStream, CancellationToken cancellationToken)
{
if (!source.CanRead || !source.CanSeek)
{
throw new ArgumentException("Streams must be readable and seekable to use the Writing Archive API");
}
return CreateEntryInternal(key, source, size, modified, closeStream, cancellationToken);
}
protected abstract ValueTask<TEntry> CreateEntryInternal(string key, Stream source, long size, DateTime? modified,
bool closeStream, CancellationToken cancellationToken);
protected abstract ValueTask SaveToAsync(Stream stream, WriterOptions options, IAsyncEnumerable<TEntry> oldEntries, IAsyncEnumerable<TEntry> newEntries,
CancellationToken cancellationToken = default);
public override async ValueTask DisposeAsync()
{
await base.DisposeAsync();
await newEntries.Cast<Entry>().ForEachAsync(async x => await x.CloseAsync());
await removedEntries.Cast<Entry>().ForEachAsync(async x => await x.CloseAsync());
await modifiedEntries.Cast<Entry>().ForEachAsync(async x => await x.CloseAsync());
}
}
}

View File

@@ -1,347 +1,138 @@
using System;
using System.Collections.Generic;
using System;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives.GZip;
//using SharpCompress.Archives.Rar;
//using SharpCompress.Archives.SevenZip;
using SharpCompress.Archives.Tar;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
using SharpCompress.Factories;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
public static class ArchiveFactory
namespace SharpCompress.Archives
{
public static IArchive Open(Stream stream, ReaderOptions? readerOptions = null)
public static class ArchiveFactory
{
readerOptions ??= new ReaderOptions();
stream = SharpCompressStream.Create(stream, bufferSize: readerOptions.BufferSize);
return FindFactory<IArchiveFactory>(stream).Open(stream, readerOptions);
}
public static async ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
readerOptions ??= new ReaderOptions();
stream = SharpCompressStream.Create(stream, bufferSize: readerOptions.BufferSize);
var factory = await FindFactoryAsync<IArchiveFactory>(stream, cancellationToken);
return factory.OpenAsync(stream, readerOptions);
}
public static IWritableArchive Create(ArchiveType type)
{
var factory = Factory
.Factories.OfType<IWriteableArchiveFactory>()
.FirstOrDefault(item => item.KnownArchiveType == type);
if (factory != null)
/// <summary>
/// Opens an Archive for random access
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <returns></returns>
public static async ValueTask<IArchive> OpenAsync(Stream stream, ReaderOptions? readerOptions = null, CancellationToken cancellationToken = default)
{
return factory.CreateWriteableArchive();
}
throw new NotSupportedException("Cannot create Archives of type: " + type);
}
public static IArchive Open(string filePath, ReaderOptions? options = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), options);
}
public static ValueTask<IAsyncArchive> OpenAsync(
string filePath,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
filePath.NotNullOrEmpty(nameof(filePath));
return OpenAsync(new FileInfo(filePath), options, cancellationToken);
}
public static IArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
{
options ??= new ReaderOptions { LeaveStreamOpen = false };
return FindFactory<IArchiveFactory>(fileInfo).Open(fileInfo, options);
}
public static async ValueTask<IAsyncArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
options ??= new ReaderOptions { LeaveStreamOpen = false };
var factory = await FindFactoryAsync<IArchiveFactory>(fileInfo, cancellationToken);
return factory.OpenAsync(fileInfo, options, cancellationToken);
}
public static IArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? options = null)
{
fileInfos.NotNull(nameof(fileInfos));
var filesArray = fileInfos.ToArray();
if (filesArray.Length == 0)
{
throw new InvalidOperationException("No files to open");
}
var fileInfo = filesArray[0];
if (filesArray.Length == 1)
{
return Open(fileInfo, options);
}
fileInfo.NotNull(nameof(fileInfo));
options ??= new ReaderOptions { LeaveStreamOpen = false };
return FindFactory<IMultiArchiveFactory>(fileInfo).Open(filesArray, options);
}
public static async ValueTask<IAsyncArchive> OpenAsync(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
fileInfos.NotNull(nameof(fileInfos));
var filesArray = fileInfos.ToArray();
if (filesArray.Length == 0)
{
throw new InvalidOperationException("No files to open");
}
var fileInfo = filesArray[0];
if (filesArray.Length == 1)
{
return await OpenAsync(fileInfo, options, cancellationToken);
}
fileInfo.NotNull(nameof(fileInfo));
options ??= new ReaderOptions { LeaveStreamOpen = false };
var factory = await FindFactoryAsync<IMultiArchiveFactory>(fileInfo, cancellationToken);
return factory.OpenAsync(filesArray, options, cancellationToken);
}
public static IArchive Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
{
streams.NotNull(nameof(streams));
var streamsArray = streams.ToArray();
if (streamsArray.Length == 0)
{
throw new InvalidOperationException("No streams");
}
var firstStream = streamsArray[0];
if (streamsArray.Length == 1)
{
return Open(firstStream, options);
}
firstStream.NotNull(nameof(firstStream));
options ??= new ReaderOptions();
return FindFactory<IMultiArchiveFactory>(firstStream).Open(streamsArray, options);
}
public static async ValueTask<IAsyncArchive> OpenAsync(
IEnumerable<Stream> streams,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
streams.NotNull(nameof(streams));
var streamsArray = streams.ToArray();
if (streamsArray.Length == 0)
{
throw new InvalidOperationException("No streams");
}
var firstStream = streamsArray[0];
if (streamsArray.Length == 1)
{
return await OpenAsync(firstStream, options, cancellationToken);
}
firstStream.NotNull(nameof(firstStream));
options ??= new ReaderOptions();
var factory = FindFactory<IMultiArchiveFactory>(firstStream);
return factory.OpenAsync(streamsArray, options);
}
public static void WriteToDirectory(
string sourceArchive,
string destinationDirectory,
ExtractionOptions? options = null
)
{
using var archive = Open(sourceArchive);
archive.WriteToDirectory(destinationDirectory, options);
}
private static T FindFactory<T>(FileInfo finfo)
where T : IFactory
{
finfo.NotNull(nameof(finfo));
using Stream stream = finfo.OpenRead();
return FindFactory<T>(stream);
}
private static T FindFactory<T>(Stream stream)
where T : IFactory
{
stream.NotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
{
throw new ArgumentException("Stream should be readable and seekable");
}
var factories = Factory.Factories.OfType<T>();
var startPosition = stream.Position;
foreach (var factory in factories)
{
stream.Seek(startPosition, SeekOrigin.Begin);
if (factory.IsArchive(stream))
stream.CheckNotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
{
stream.Seek(startPosition, SeekOrigin.Begin);
return factory;
throw new ArgumentException("Stream should be readable and seekable");
}
}
var extensions = string.Join(", ", factories.Select(item => item.Name));
throw new InvalidOperationException(
$"Cannot determine compressed stream type. Supported Archive Formats: {extensions}"
);
}
private static async ValueTask<T> FindFactoryAsync<T>(
FileInfo finfo,
CancellationToken cancellationToken
)
where T : IFactory
{
finfo.NotNull(nameof(finfo));
using Stream stream = finfo.OpenRead();
return await FindFactoryAsync<T>(stream, cancellationToken);
}
private static async ValueTask<T> FindFactoryAsync<T>(
Stream stream,
CancellationToken cancellationToken
)
where T : IFactory
{
stream.NotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
{
throw new ArgumentException("Stream should be readable and seekable");
}
var factories = Factory.Factories.OfType<T>();
var startPosition = stream.Position;
foreach (var factory in factories)
{
stream.Seek(startPosition, SeekOrigin.Begin);
if (await factory.IsArchiveAsync(stream, cancellationToken: cancellationToken))
readerOptions ??= new ReaderOptions();
if (await ZipArchive.IsZipFileAsync(stream, null, cancellationToken))
{
stream.Seek(startPosition, SeekOrigin.Begin);
return factory;
stream.Seek(0, SeekOrigin.Begin);
return ZipArchive.Open(stream, readerOptions);
}
}
var extensions = string.Join(", ", factories.Select(item => item.Name));
throw new InvalidOperationException(
$"Cannot determine compressed stream type. Supported Archive Formats: {extensions}"
);
}
public static bool IsArchive(
string filePath,
out ArchiveType? type,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
filePath.NotNullOrEmpty(nameof(filePath));
using Stream s = File.OpenRead(filePath);
return IsArchive(s, out type, bufferSize);
}
public static bool IsArchive(
Stream stream,
out ArchiveType? type,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
type = null;
stream.NotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
{
throw new ArgumentException("Stream should be readable and seekable");
}
var startPosition = stream.Position;
foreach (var factory in Factory.Factories)
{
var isArchive = factory.IsArchive(stream);
stream.Position = startPosition;
if (isArchive)
stream.Seek(0, SeekOrigin.Begin);
/*if (SevenZipArchive.IsSevenZipFile(stream))
{
type = factory.KnownArchiveType;
return true;
stream.Seek(0, SeekOrigin.Begin);
return SevenZipArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin); */
if (await GZipArchive.IsGZipFileAsync(stream, cancellationToken))
{
stream.Seek(0, SeekOrigin.Begin);
return GZipArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
/* if (RarArchive.IsRarFile(stream, readerOptions))
{
stream.Seek(0, SeekOrigin.Begin);
return RarArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin); */
if (await TarArchive.IsTarFileAsync(stream, cancellationToken))
{
stream.Seek(0, SeekOrigin.Begin);
return TarArchive.Open(stream, readerOptions);
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip, LZip");
}
return false;
}
public static IEnumerable<string> GetFileParts(string part1)
{
part1.NotNullOrEmpty(nameof(part1));
return GetFileParts(new FileInfo(part1)).Select(a => a.FullName);
}
public static IEnumerable<FileInfo> GetFileParts(FileInfo part1)
{
part1.NotNull(nameof(part1));
yield return part1;
foreach (var factory in Factory.Factories.OfType<IFactory>())
public static IWritableArchive Create(ArchiveType type)
{
var i = 1;
var part = factory.GetFilePart(i++, part1);
if (part != null)
return type switch
{
yield return part;
while ((part = factory.GetFilePart(i++, part1)) != null)
{
yield return part;
}
ArchiveType.Zip => ZipArchive.Create(),
//ArchiveType.Tar => TarArchive.Create(),
ArchiveType.GZip => GZipArchive.Create(),
_ => throw new NotSupportedException("Cannot create Archives of type: " + type)
};
}
yield break;
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
public static ValueTask<IArchive> OpenAsync(string filePath, ReaderOptions? options = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
return OpenAsync(new FileInfo(filePath), options);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
public static async ValueTask<IArchive> OpenAsync(FileInfo fileInfo, ReaderOptions? options = null, CancellationToken cancellationToken = default)
{
fileInfo.CheckNotNull(nameof(fileInfo));
options ??= new ReaderOptions { LeaveStreamOpen = false };
await using var stream = fileInfo.OpenRead();
if (await ZipArchive.IsZipFileAsync(stream, null, cancellationToken))
{
return ZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
/*if (SevenZipArchive.IsSevenZipFile(stream))
{
return SevenZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin); */
if (await GZipArchive.IsGZipFileAsync(stream, cancellationToken))
{
return GZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
/*if (RarArchive.IsRarFile(stream, options))
{
return RarArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (TarArchive.IsTarFile(stream))
{
return TarArchive.Open(fileInfo, options);
} */
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static async ValueTask WriteToDirectory(string sourceArchive,
string destinationDirectory,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default)
{
await using IArchive archive = await OpenAsync(sourceArchive);
await foreach (IArchiveEntry entry in archive.Entries.WithCancellation(cancellationToken))
{
await entry.WriteEntryToDirectoryAsync(destinationDirectory, options, cancellationToken);
}
}
}
public static IArchiveFactory AutoFactory { get; } = new AutoArchiveFactory();
}

View File

@@ -1,30 +0,0 @@
using System;
using System.IO;
using System.Text.RegularExpressions;
namespace SharpCompress.Archives;
internal abstract class ArchiveVolumeFactory
{
internal static FileInfo? GetFilePart(int index, FileInfo part1) //base the name on the first part
{
FileInfo? item = null;
//split 001, 002 ...
var m = Regex.Match(part1.Name, @"^(.*\.)([0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(
Path.Combine(
part1.DirectoryName!,
String.Concat(
m.Groups[1].Value,
(index + 1).ToString().PadLeft(m.Groups[2].Value.Length, '0')
)
)
);
if (item != null && item.Exists)
return item;
return null;
}
}

View File

@@ -1,52 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
internal class AutoArchiveFactory : IArchiveFactory
{
public string Name => nameof(AutoArchiveFactory);
public ArchiveType? KnownArchiveType => null;
public IEnumerable<string> GetSupportedExtensions() => throw new NotSupportedException();
public bool IsArchive(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => throw new NotSupportedException();
public ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
) => throw new NotSupportedException();
public FileInfo? GetFilePart(int index, FileInfo part1) => throw new NotSupportedException();
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
ArchiveFactory.Open(stream, readerOptions);
public IAsyncArchive OpenAsync(Stream stream, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)Open(stream, readerOptions);
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
ArchiveFactory.Open(fileInfo, readerOptions);
public IAsyncArchive OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)Open(fileInfo, readerOptions);
}
}

View File

@@ -1,192 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.GZip;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.GZip;
using SharpCompress.Writers;
using SharpCompress.Writers.GZip;
namespace SharpCompress.Archives.GZip;
public partial class GZipArchive
#if NET8_0_OR_GREATER
: IArchiveOpenable<IWritableArchive, IWritableAsyncArchive>,
IMultiArchiveOpenable<IWritableArchive, IWritableAsyncArchive>
#endif
{
public static IWritableAsyncArchive OpenAsync(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return (IWritableAsyncArchive)Open(
new FileInfo(path),
readerOptions ?? new ReaderOptions()
);
}
public static IWritableArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
public static IWritableArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.NotNull(nameof(fileInfo));
return new GZipArchive(
new SourceStream(
fileInfo,
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
)
);
}
public static IWritableArchive Open(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new GZipArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
public static IWritableArchive Open(
IEnumerable<Stream> streams,
ReaderOptions? readerOptions = null
)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new GZipArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
public static IWritableArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
return new GZipArchive(
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
);
}
public static IWritableAsyncArchive OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)Open(stream, readerOptions);
}
public static IWritableAsyncArchive OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)Open(fileInfo, readerOptions);
}
public static IWritableAsyncArchive OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)Open(streams, readerOptions);
}
public static IWritableAsyncArchive OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)Open(fileInfos, readerOptions);
}
public static GZipArchive Create() => new();
public static bool IsGZipFile(string filePath) => IsGZipFile(new FileInfo(filePath));
public static bool IsGZipFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsGZipFile(stream);
}
public static bool IsGZipFile(Stream stream)
{
Span<byte> header = stackalloc byte[10];
if (!stream.ReadFully(header))
{
return false;
}
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
return false;
}
return true;
}
public static async ValueTask<bool> IsGZipFileAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
byte[] header = new byte[10];
if (!await stream.ReadFullyAsync(header, cancellationToken).ConfigureAwait(false))
{
return false;
}
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
return false;
}
return true;
}
}

View File

@@ -1,150 +1,192 @@
using System;
using System;
using System.Buffers;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.GZip;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.GZip;
using SharpCompress.Writers;
using SharpCompress.Writers.GZip;
namespace SharpCompress.Archives.GZip;
public partial class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
namespace SharpCompress.Archives.GZip
{
private GZipArchive(SourceStream sourceStream)
: base(ArchiveType.GZip, sourceStream) { }
internal GZipArchive()
: base(ArchiveType.GZip) { }
protected override IEnumerable<GZipVolume> LoadVolumes(SourceStream sourceStream)
public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
{
sourceStream.LoadAllParts();
return sourceStream.Streams.Select(a => new GZipVolume(a, ReaderOptions, 0));
}
public void SaveTo(string filePath) => SaveTo(new FileInfo(filePath));
public void SaveTo(FileInfo fileInfo)
{
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
SaveTo(stream, new WriterOptions(CompressionType.GZip));
}
public ValueTask SaveToAsync(string filePath, CancellationToken cancellationToken = default) =>
SaveToAsync(new FileInfo(filePath), cancellationToken);
public async ValueTask SaveToAsync(
FileInfo fileInfo,
CancellationToken cancellationToken = default
)
{
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
await SaveToAsync(stream, new WriterOptions(CompressionType.GZip), cancellationToken)
.ConfigureAwait(false);
}
protected override GZipArchiveEntry CreateEntryInternal(
string filePath,
Stream source,
long size,
DateTime? modified,
bool closeStream
)
{
if (Entries.Any())
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
return new GZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
}
protected override GZipArchiveEntry CreateDirectoryEntry(
string directoryPath,
DateTime? modified
) => throw new NotSupportedException("GZip archives do not support directory entries.");
protected override void SaveTo(
Stream stream,
WriterOptions options,
IEnumerable<GZipArchiveEntry> oldEntries,
IEnumerable<GZipArchiveEntry> newEntries
)
{
if (Entries.Count > 1)
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default)
{
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
fileInfo.CheckNotNull(nameof(fileInfo));
return new GZipArchive(fileInfo, readerOptions ?? new ReaderOptions(), cancellationToken);
}
using var writer = new GZipWriter(stream, new GZipWriterOptions(options));
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(Stream stream, ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default)
{
using var entryStream = entry.OpenEntryStream();
writer.Write(
entry.Key.NotNull("Entry Key is null"),
entryStream,
entry.LastModifiedTime
);
stream.CheckNotNull(nameof(stream));
return new GZipArchive(stream, readerOptions ?? new ReaderOptions(), cancellationToken);
}
}
protected override async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IEnumerable<GZipArchiveEntry> oldEntries,
IEnumerable<GZipArchiveEntry> newEntries,
CancellationToken cancellationToken = default
)
{
if (Entries.Count > 1)
public static GZipArchive Create()
{
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
return new GZipArchive();
}
using var writer = new GZipWriter(stream, new GZipWriterOptions(options));
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
internal GZipArchive(FileInfo fileInfo, ReaderOptions options,
CancellationToken cancellationToken)
: base(ArchiveType.GZip, fileInfo, options, cancellationToken)
{
using var entryStream = entry.OpenEntryStream();
await writer
.WriteAsync(entry.Key.NotNull("Entry Key is null"), entryStream, cancellationToken)
.ConfigureAwait(false);
}
}
protected override IEnumerable<GZipArchiveEntry> LoadEntries(IEnumerable<GZipVolume> volumes)
{
var stream = volumes.Single().Stream;
yield return new GZipArchiveEntry(
this,
GZipFilePart.Create(stream, ReaderOptions.ArchiveEncoding)
);
}
protected override IAsyncEnumerable<GZipVolume> LoadVolumes(FileInfo file,
CancellationToken cancellationToken)
{
return new GZipVolume(file, ReaderOptions).AsAsyncEnumerable();
}
protected override async IAsyncEnumerable<GZipArchiveEntry> LoadEntriesAsync(
IAsyncEnumerable<GZipVolume> volumes
)
{
var stream = (await volumes.SingleAsync()).Stream;
yield return new GZipArchiveEntry(
this,
await GZipFilePart.CreateAsync(stream, ReaderOptions.ArchiveEncoding)
);
}
public static ValueTask<bool> IsGZipFileAsync(string filePath, CancellationToken cancellationToken = default)
{
return IsGZipFileAsync(new FileInfo(filePath), cancellationToken);
}
protected override IReader CreateReaderForSolidExtraction()
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return GZipReader.Open(stream);
}
public static async ValueTask<bool> IsGZipFileAsync(FileInfo fileInfo, CancellationToken cancellationToken = default)
{
if (!fileInfo.Exists)
{
return false;
}
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync()
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return new((IAsyncReader)GZipReader.Open(stream));
await using Stream stream = fileInfo.OpenRead();
return await IsGZipFileAsync(stream, cancellationToken);
}
public Task SaveToAsync(string filePath, CancellationToken cancellationToken = default)
{
return SaveToAsync(new FileInfo(filePath), cancellationToken);
}
public async Task SaveToAsync(FileInfo fileInfo, CancellationToken cancellationToken = default)
{
await using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
await SaveToAsync(stream, new WriterOptions(CompressionType.GZip), cancellationToken);
}
public static async ValueTask<bool> IsGZipFileAsync(Stream stream, CancellationToken cancellationToken = default)
{
// read the header on the first read
using var header = MemoryPool<byte>.Shared.Rent(10);
var slice = header.Memory.Slice(0, 10);
// workitem 8501: handle edge case (decompress empty stream)
if (await stream.ReadAsync(slice, cancellationToken) != 10)
{
return false;
}
if (slice.Span[0] != 0x1F || slice.Span[1] != 0x8B || slice.Span[2] != 8)
{
return false;
}
return true;
}
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
internal GZipArchive(Stream stream, ReaderOptions options,
CancellationToken cancellationToken)
: base(ArchiveType.GZip, stream, options, cancellationToken)
{
}
internal GZipArchive()
: base(ArchiveType.GZip)
{
}
protected override async ValueTask<GZipArchiveEntry> CreateEntryInternal(string filePath, Stream source, long size, DateTime? modified,
bool closeStream, CancellationToken cancellationToken = default)
{
if (await Entries.AnyAsync(cancellationToken: cancellationToken))
{
throw new InvalidOperationException("Only one entry is allowed in a GZip Archive");
}
return new GZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
}
protected override async ValueTask SaveToAsync(Stream stream, WriterOptions options,
IAsyncEnumerable<GZipArchiveEntry> oldEntries,
IAsyncEnumerable<GZipArchiveEntry> newEntries,
CancellationToken cancellationToken = default)
{
if (await Entries.CountAsync(cancellationToken: cancellationToken) > 1)
{
throw new InvalidOperationException("Only one entry is allowed in a GZip Archive");
}
await using var writer = new GZipWriter(stream, new GZipWriterOptions(options));
await foreach (var entry in oldEntries.Concat(newEntries)
.Where(x => !x.IsDirectory)
.WithCancellation(cancellationToken))
{
await using var entryStream = await entry.OpenEntryStreamAsync(cancellationToken);
await writer.WriteAsync(entry.Key, entryStream, entry.LastModifiedTime, cancellationToken);
}
}
protected override async IAsyncEnumerable<GZipVolume> LoadVolumes(IAsyncEnumerable<Stream> streams,
[EnumeratorCancellation]CancellationToken cancellationToken)
{
yield return new GZipVolume(await streams.FirstAsync(cancellationToken: cancellationToken), ReaderOptions);
}
protected override async IAsyncEnumerable<GZipArchiveEntry> LoadEntries(IAsyncEnumerable<GZipVolume> volumes,
[EnumeratorCancellation]CancellationToken cancellationToken)
{
Stream stream = (await volumes.SingleAsync(cancellationToken: cancellationToken)).Stream;
var part = new GZipFilePart(ReaderOptions.ArchiveEncoding);
await part.Initialize(stream, cancellationToken);
yield return new GZipArchiveEntry(this, part);
}
protected override async ValueTask<IReader> CreateReaderForSolidExtraction()
{
var stream = (await Volumes.SingleAsync()).Stream;
stream.Position = 0;
return GZipReader.Open(stream);
}
}
}

View File

@@ -4,36 +4,33 @@ using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.GZip;
namespace SharpCompress.Archives.GZip;
public class GZipArchiveEntry : GZipEntry, IArchiveEntry
namespace SharpCompress.Archives.GZip
{
internal GZipArchiveEntry(GZipArchive archive, GZipFilePart? part)
: base(part) => Archive = archive;
public virtual Stream OpenEntryStream()
public class GZipArchiveEntry : GZipEntry, IArchiveEntry
{
//this is to reset the stream to be read multiple times
var part = (GZipFilePart)Parts.Single();
var rawStream = part.GetRawStream();
if (rawStream.CanSeek && rawStream.Position != part.EntryStartPosition)
internal GZipArchiveEntry(GZipArchive archive, GZipFilePart part)
: base(part)
{
rawStream.Position = part.EntryStartPosition;
Archive = archive;
}
return Parts.Single().GetCompressedStream().NotNull();
public virtual async ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
{
//this is to reset the stream to be read multiple times
var part = (GZipFilePart)Parts.Single();
if (part.GetRawStream().Position != part.EntryStartPosition)
{
part.GetRawStream().Position = part.EntryStartPosition;
}
return await Parts.Single().GetCompressedStreamAsync(cancellationToken);
}
#region IArchiveEntry Members
public IArchive Archive { get; }
public bool IsComplete => true;
#endregion
}
public ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
{
// GZip synchronous implementation is fast enough, just wrap it
return new(OpenEntryStream());
}
#region IArchiveEntry Members
public IArchive Archive { get; }
public bool IsComplete => true;
#endregion
}
}

View File

@@ -1,71 +1,70 @@
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Archives.GZip;
internal sealed class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArchiveEntry
namespace SharpCompress.Archives.GZip
{
private readonly bool closeStream;
private readonly Stream stream;
internal GZipWritableArchiveEntry(
GZipArchive archive,
Stream stream,
string path,
long size,
DateTime? lastModified,
bool closeStream
)
: base(archive, null)
internal sealed class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArchiveEntry
{
this.stream = stream;
Key = path;
Size = size;
LastModifiedTime = lastModified;
this.closeStream = closeStream;
}
private readonly bool closeStream;
private readonly Stream stream;
public override long Crc => 0;
public override string? Key { get; }
public override long CompressedSize => 0;
public override long Size { get; }
public override DateTime? LastModifiedTime { get; }
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => false;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
Stream IWritableArchiveEntry.Stream => stream;
public override Stream OpenEntryStream()
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return SharpCompressStream.Create(stream, leaveOpen: true);
}
internal override void Close()
{
if (closeStream)
internal GZipWritableArchiveEntry(GZipArchive archive, Stream stream,
string path, long size, DateTime? lastModified, bool closeStream)
: base(archive, null)
{
stream.Dispose();
this.stream = stream;
Key = path;
Size = size;
LastModifiedTime = lastModified;
this.closeStream = closeStream;
}
public override long Crc => 0;
public override string Key { get; }
public override long CompressedSize => 0;
public override long Size { get; }
public override DateTime? LastModifiedTime { get; }
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => false;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
Stream IWritableArchiveEntry.Stream => stream;
public override ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new(new NonDisposingStream(stream));
}
internal override async ValueTask CloseAsync()
{
if (closeStream)
{
await stream.DisposeAsync();
}
}
}
}
}

View File

@@ -1,47 +1,44 @@
using System;
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
public interface IArchive : IDisposable
namespace SharpCompress.Archives
{
IEnumerable<IArchiveEntry> Entries { get; }
IEnumerable<IVolume> Volumes { get; }
public interface IArchive : IAsyncDisposable
{
IAsyncEnumerable<IArchiveEntry> Entries { get; }
IAsyncEnumerable<IVolume> Volumes { get; }
ArchiveType Type { get; }
ArchiveType Type { get; }
ValueTask EnsureEntriesLoaded();
/// <summary>
/// Use this method to extract all entries in an archive in order.
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
/// extracted sequentially for the best performance.
/// </summary>
ValueTask<IReader> ExtractAllEntries();
/// <summary>
/// Use this method to extract all entries in an archive in order.
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
/// extracted sequentially for the best performance.
/// </summary>
IReader ExtractAllEntries();
/// <summary>
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
/// Rar Archives can be SOLID while all 7Zip archives are considered SOLID.
/// </summary>
ValueTask<bool> IsSolidAsync();
/// <summary>
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
/// Rar Archives can be SOLID while all 7Zip archives are considered SOLID.
/// </summary>
bool IsSolid { get; }
/// <summary>
/// This checks to see if all the known entries have IsComplete = true
/// </summary>
ValueTask<bool> IsCompleteAsync();
/// <summary>
/// This checks to see if all the known entries have IsComplete = true
/// </summary>
bool IsComplete { get; }
/// <summary>
/// The total size of the files compressed in the archive.
/// </summary>
ValueTask<long> TotalSizeAsync();
/// <summary>
/// The total size of the files compressed in the archive.
/// </summary>
long TotalSize { get; }
/// <summary>
/// The total size of the files as uncompressed in the archive.
/// </summary>
long TotalUncompressedSize { get; }
/// <summary>
/// Returns whether the archive is encrypted.
/// </summary>
bool IsEncrypted { get; }
}
/// <summary>
/// The total size of the files as uncompressed in the archive.
/// </summary>
ValueTask<long> TotalUncompressedSizeAsync();
}
}

View File

@@ -1,31 +1,26 @@
using System.IO;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Archives;
public interface IArchiveEntry : IEntry
namespace SharpCompress.Archives
{
/// <summary>
/// Opens the current entry as a stream that will decompress as it is read.
/// Read the entire stream or use SkipEntry on EntryStream.
/// </summary>
Stream OpenEntryStream();
public interface IArchiveEntry : IEntry
{
/// <summary>
/// Opens the current entry as a stream that will decompress as it is read.
/// Read the entire stream or use SkipEntry on EntryStream.
/// </summary>
ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default);
/// <summary>
/// Opens the current entry as a stream that will decompress as it is read asynchronously.
/// Read the entire stream or use SkipEntry on EntryStream.
/// </summary>
ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default);
/// <summary>
/// The archive can find all the parts of the archive needed to extract this entry.
/// </summary>
bool IsComplete { get; }
/// <summary>
/// The archive can find all the parts of the archive needed to extract this entry.
/// </summary>
bool IsComplete { get; }
/// <summary>
/// The archive instance this entry belongs to
/// </summary>
IArchive Archive { get; }
}
/// <summary>
/// The archive instance this entry belongs to
/// </summary>
IArchive Archive { get; }
}
}

View File

@@ -1,165 +1,60 @@
using System;
using System.IO;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Archives;
public static class IArchiveEntryExtensions
namespace SharpCompress.Archives
{
private const int BufferSize = 81920;
/// <param name="archiveEntry">The archive entry to extract.</param>
extension(IArchiveEntry archiveEntry)
public static class IArchiveEntryExtensions
{
/// <summary>
/// Extract entry to the specified stream.
/// </summary>
/// <param name="streamToWriteTo">The stream to write the entry content to.</param>
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
public void WriteTo(Stream streamToWriteTo, IProgress<ProgressReport>? progress = null)
public static async ValueTask WriteToAsync(this IArchiveEntry archiveEntry, Stream streamToWriteTo, CancellationToken cancellationToken = default)
{
if (archiveEntry.IsDirectory)
{
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
}
using var entryStream = archiveEntry.OpenEntryStream();
var sourceStream = WrapWithProgress(entryStream, archiveEntry, progress);
sourceStream.CopyTo(streamToWriteTo, BufferSize);
}
/// <summary>
/// Extract entry to the specified stream asynchronously.
/// </summary>
/// <param name="streamToWriteTo">The stream to write the entry content to.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
public async ValueTask WriteToAsync(
Stream streamToWriteTo,
IProgress<ProgressReport>? progress = null,
CancellationToken cancellationToken = default
)
{
if (archiveEntry.IsDirectory)
var archive = archiveEntry.Archive;
await archive.EnsureEntriesLoaded();
var entryStream = await archiveEntry.OpenEntryStreamAsync(cancellationToken);
if (entryStream is null)
{
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
return;
}
await using (entryStream)
{
await entryStream.TransferToAsync(streamToWriteTo, cancellationToken);
}
using var entryStream = await archiveEntry.OpenEntryStreamAsync(cancellationToken);
var sourceStream = WrapWithProgress(entryStream, archiveEntry, progress);
await sourceStream
.CopyToAsync(streamToWriteTo, BufferSize, cancellationToken)
.ConfigureAwait(false);
}
}
private static Stream WrapWithProgress(
Stream source,
IArchiveEntry entry,
IProgress<ProgressReport>? progress
)
{
if (progress is null)
{
return source;
}
var entryPath = entry.Key ?? string.Empty;
var totalBytes = GetEntrySizeSafe(entry);
return new ProgressReportingStream(
source,
progress,
entryPath,
totalBytes,
leaveOpen: true
);
}
private static long? GetEntrySizeSafe(IArchiveEntry entry)
{
try
{
var size = entry.Size;
return size >= 0 ? size : null;
}
catch (NotImplementedException)
{
return null;
}
}
extension(IArchiveEntry entry)
{
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public void WriteToDirectory(
string destinationDirectory,
ExtractionOptions? options = null
) =>
ExtractionMethods.WriteEntryToDirectory(
entry,
destinationDirectory,
options,
entry.WriteToFile
);
/// <summary>
/// Extract to specific directory asynchronously, retaining filename
/// </summary>
public async ValueTask WriteToDirectoryAsync(
string destinationDirectory,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default
) =>
await ExtractionMethods
.WriteEntryToDirectoryAsync(
entry,
destinationDirectory,
options,
entry.WriteToFileAsync,
cancellationToken
)
.ConfigureAwait(false);
public static ValueTask WriteEntryToDirectoryAsync(this IArchiveEntry entry,
string destinationDirectory,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default)
{
return ExtractionMethods.WriteEntryToDirectoryAsync(entry, destinationDirectory, options,
entry.WriteToFileAsync, cancellationToken);
}
/// <summary>
/// Extract to specific file
/// </summary>
public void WriteToFile(string destinationFileName, ExtractionOptions? options = null) =>
ExtractionMethods.WriteEntryToFile(
entry,
destinationFileName,
options,
(x, fm) =>
{
using var fs = File.Open(destinationFileName, fm);
entry.WriteTo(fs);
}
);
public static ValueTask WriteToFileAsync(this IArchiveEntry entry,
string destinationFileName,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default)
{
/// <summary>
/// Extract to specific file asynchronously
/// </summary>
public async ValueTask WriteToFileAsync(
string destinationFileName,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default
) =>
await ExtractionMethods
.WriteEntryToFileAsync(
entry,
destinationFileName,
options,
async (x, fm, ct) =>
{
using var fs = File.Open(destinationFileName, fm);
await entry.WriteToAsync(fs, null, ct).ConfigureAwait(false);
},
cancellationToken
)
.ConfigureAwait(false);
return ExtractionMethods.WriteEntryToFileAsync(entry, destinationFileName, options,
async (x, fm, ct) =>
{
await using FileStream fs = File.Open(x, fm);
await entry.WriteToAsync(fs, ct);
}, cancellationToken);
}
}
}
}

View File

@@ -1,73 +1,24 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
public static class IArchiveExtensions
namespace SharpCompress.Archives
{
extension(IArchive archive)
public static class IArchiveExtensions
{
/// <summary>
/// Extract to specific directory with progress reporting
/// Extract to specific directory, retaining filename
/// </summary>
/// <param name="destinationDirectory">The folder to extract into.</param>
/// <param name="options">Extraction options.</param>
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
public void WriteToDirectory(
string destinationDirectory,
ExtractionOptions? options = null,
IProgress<ProgressReport>? progress = null
)
public static async ValueTask WriteToDirectoryAsync(this IArchive archive,
string destinationDirectory,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default)
{
if (archive.IsSolid || archive.Type == ArchiveType.SevenZip)
await foreach (IArchiveEntry entry in archive.Entries.Where(x => !x.IsDirectory).WithCancellation(cancellationToken))
{
using var reader = archive.ExtractAllEntries();
reader.WriteAllToDirectory(destinationDirectory, options);
}
else
{
archive.WriteToDirectoryInternal(destinationDirectory, options, progress);
}
}
private void WriteToDirectoryInternal(
string destinationDirectory,
ExtractionOptions? options,
IProgress<ProgressReport>? progress
)
{
var totalBytes = archive.TotalUncompressedSize;
var bytesRead = 0L;
var seenDirectories = new HashSet<string>();
foreach (var entry in archive.Entries)
{
if (entry.IsDirectory)
{
var dirPath = Path.Combine(
destinationDirectory,
entry.Key.NotNull("Entry Key is null")
);
if (
Path.GetDirectoryName(dirPath + "/") is { } parentDirectory
&& seenDirectories.Add(dirPath)
)
{
Directory.CreateDirectory(parentDirectory);
}
continue;
}
entry.WriteToDirectory(destinationDirectory, options);
bytesRead += entry.Size;
progress?.Report(
new ProgressReport(entry.Key ?? string.Empty, bytesRead, totalBytes)
);
await entry.WriteEntryToDirectoryAsync(destinationDirectory, options, cancellationToken);
}
}
}
}
}

View File

@@ -1,55 +0,0 @@
using System.IO;
using System.Threading;
using SharpCompress.Factories;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
/// <summary>
/// Represents a factory used to identify and open archives.
/// </summary>
/// <remarks>
/// Currently implemented by:<br/>
/// <list type="table">
/// <item><see cref="TarFactory"/></item>
/// <item><see cref="RarFactory"/></item>
/// <item><see cref="ZipFactory"/></item>
/// <item><see cref="GZipFactory"/></item>
/// <item><see cref="SevenZipFactory"/></item>
/// </list>
/// </remarks>
public interface IArchiveFactory : IFactory
{
/// <summary>
/// Opens an Archive for random access.
/// </summary>
/// <param name="stream">An open, readable and seekable stream.</param>
/// <param name="readerOptions">reading options.</param>
IArchive Open(Stream stream, ReaderOptions? readerOptions = null);
/// <summary>
/// Opens an Archive for random access asynchronously.
/// </summary>
/// <param name="stream">An open, readable and seekable stream.</param>
/// <param name="readerOptions">reading options.</param>
IAsyncArchive OpenAsync(Stream stream, ReaderOptions? readerOptions = null);
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo">the file to open.</param>
/// <param name="readerOptions">reading options.</param>
IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null);
/// <summary>
/// Opens an Archive from a FileInfo object asynchronously.
/// </summary>
/// <param name="fileInfo">the file to open.</param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
IAsyncArchive OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
}

View File

@@ -1,36 +0,0 @@
#if NET8_0_OR_GREATER
using System.IO;
using System.Threading;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
public interface IArchiveOpenable<TSync, TASync>
where TSync : IArchive
where TASync : IAsyncArchive
{
public static abstract TSync Open(string filePath, ReaderOptions? readerOptions = null);
public static abstract TSync Open(FileInfo fileInfo, ReaderOptions? readerOptions = null);
public static abstract TSync Open(Stream stream, ReaderOptions? readerOptions = null);
public static abstract TASync OpenAsync(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
public static abstract TASync OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
public static abstract TASync OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
}
#endif

View File

@@ -1,48 +0,0 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
public interface IAsyncArchive : IAsyncDisposable
{
IAsyncEnumerable<IArchiveEntry> EntriesAsync { get; }
IAsyncEnumerable<IVolume> VolumesAsync { get; }
ArchiveType Type { get; }
/// <summary>
/// Use this method to extract all entries in an archive in order.
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
/// extracted sequentially for the best performance.
/// </summary>
ValueTask<IAsyncReader> ExtractAllEntriesAsync();
/// <summary>
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
/// Rar Archives can be SOLID while all 7Zip archives are considered SOLID.
/// </summary>
ValueTask<bool> IsSolidAsync();
/// <summary>
/// This checks to see if all the known entries have IsComplete = true
/// </summary>
ValueTask<bool> IsCompleteAsync();
/// <summary>
/// The total size of the files compressed in the archive.
/// </summary>
ValueTask<long> TotalSizeAsync();
/// <summary>
/// The total size of the files as uncompressed in the archive.
/// </summary>
ValueTask<long> TotalUncompressedSizeAsync();
/// <summary>
/// Returns whether the archive is encrypted.
/// </summary>
ValueTask<bool> IsEncryptedAsync();
}

View File

@@ -1,92 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
public static class IAsyncArchiveExtensions
{
extension(IAsyncArchive archive)
{
/// <summary>
/// Extract to specific directory asynchronously with progress reporting and cancellation support
/// </summary>
/// <param name="archive">The archive to extract.</param>
/// <param name="destinationDirectory">The folder to extract into.</param>
/// <param name="options">Extraction options.</param>
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
/// <param name="cancellationToken">Optional cancellation token.</param>
public async Task WriteToDirectoryAsync(
string destinationDirectory,
ExtractionOptions? options = null,
IProgress<ProgressReport>? progress = null,
CancellationToken cancellationToken = default
)
{
if (await archive.IsSolidAsync() || archive.Type == ArchiveType.SevenZip)
{
await using var reader = await archive.ExtractAllEntriesAsync();
await reader.WriteAllToDirectoryAsync(
destinationDirectory,
options,
cancellationToken
);
}
else
{
await archive.WriteToDirectoryAsyncInternal(
destinationDirectory,
options,
progress,
cancellationToken
);
}
}
private async Task WriteToDirectoryAsyncInternal(
string destinationDirectory,
ExtractionOptions? options,
IProgress<ProgressReport>? progress,
CancellationToken cancellationToken
)
{
var totalBytes = await archive.TotalUncompressedSizeAsync();
var bytesRead = 0L;
var seenDirectories = new HashSet<string>();
await foreach (var entry in archive.EntriesAsync.WithCancellation(cancellationToken))
{
cancellationToken.ThrowIfCancellationRequested();
if (entry.IsDirectory)
{
var dirPath = Path.Combine(
destinationDirectory,
entry.Key.NotNull("Entry Key is null")
);
if (
Path.GetDirectoryName(dirPath + "/") is { } parentDirectory
&& seenDirectories.Add(dirPath)
)
{
Directory.CreateDirectory(parentDirectory);
}
continue;
}
await entry
.WriteToDirectoryAsync(destinationDirectory, options, cancellationToken)
.ConfigureAwait(false);
bytesRead += entry.Size;
progress?.Report(
new ProgressReport(entry.Key ?? string.Empty, bytesRead, totalBytes)
);
}
}
}
}

View File

@@ -1,56 +0,0 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using SharpCompress.Factories;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
/// <summary>
/// Represents a factory used to identify and open archives.
/// </summary>
/// <remarks>
/// Implemented by:<br/>
/// <list type="table">
/// <item><see cref="TarFactory"/></item>
/// <item><see cref="RarFactory"/></item>
/// <item><see cref="ZipFactory"/></item>
/// <item><see cref="GZipFactory"/></item>
/// <item><see cref="SevenZipFactory"/></item>
/// </list>
/// </remarks>
public interface IMultiArchiveFactory : IFactory
{
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions">reading options.</param>
IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null);
/// <summary>
/// Opens a multi-part archive from streams asynchronously.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions">reading options.</param>
IAsyncArchive OpenAsync(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null);
/// <summary>
/// Constructor with IEnumerable Stream objects, multi and split support.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions">reading options.</param>
IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null);
/// <summary>
/// Opens a multi-part archive from files asynchronously.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
IAsyncArchive OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
}

View File

@@ -1,35 +0,0 @@
#if NET8_0_OR_GREATER
using System.Collections.Generic;
using System.IO;
using System.Threading;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
public interface IMultiArchiveOpenable<TSync, TASync>
where TSync : IArchive
where TASync : IAsyncArchive
{
public static abstract TSync Open(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
);
public static abstract TSync Open(
IEnumerable<Stream> streams,
ReaderOptions? readerOptions = null
);
public static abstract TASync OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
public static abstract TASync OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
}
#endif

View File

@@ -4,48 +4,20 @@ using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Writers;
namespace SharpCompress.Archives;
public interface IWritableArchiveCommon
namespace SharpCompress.Archives
{
/// <summary>
/// Use this to pause entry rebuilding when adding large collections of entries. Dispose when complete. A using statement is recommended.
/// </summary>
/// <returns>IDisposeable to resume entry rebuilding</returns>
IDisposable PauseEntryRebuilding();
public interface IWritableArchive : IArchive
{
ValueTask RemoveEntryAsync(IArchiveEntry entry, CancellationToken cancellationToken = default);
/// <summary>
/// Removes the specified entry from the archive.
/// </summary>
void RemoveEntry(IArchiveEntry entry);
ValueTask<IArchiveEntry> AddEntryAsync(string key, Stream source, bool closeStream, long size = 0, DateTime? modified = null, CancellationToken cancellationToken = default);
IArchiveEntry AddEntry(
string key,
Stream source,
bool closeStream,
long size = 0,
DateTime? modified = null
);
ValueTask SaveToAsync(Stream stream, WriterOptions options, CancellationToken cancellationToken = default);
IArchiveEntry AddDirectoryEntry(string key, DateTime? modified = null);
}
public interface IWritableArchive : IArchive, IWritableArchiveCommon
{
/// <summary>
/// Saves the archive to the specified stream using the given writer options.
/// </summary>
void SaveTo(Stream stream, WriterOptions options);
}
public interface IWritableAsyncArchive : IAsyncArchive, IWritableArchiveCommon
{
/// <summary>
/// Asynchronously saves the archive to the specified stream using the given writer options.
/// </summary>
ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
CancellationToken cancellationToken = default
);
}
/// <summary>
/// Use this to pause entry rebuilding when adding large collections of entries. Dispose when complete. A using statement is recommended.
/// </summary>
/// <returns>IDisposeable to resume entry rebuilding</returns>
IAsyncDisposable PauseEntryRebuilding();
}
}

View File

@@ -1,52 +0,0 @@
using System;
using System.IO;
namespace SharpCompress.Archives;
public static class IWritableArchiveCommonExtensions
{
extension(IWritableArchiveCommon writableArchive)
{
public void AddAllFromDirectory(
string filePath,
string searchPattern = "*.*",
SearchOption searchOption = SearchOption.AllDirectories
)
{
using (writableArchive.PauseEntryRebuilding())
{
foreach (
var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption)
)
{
var fileInfo = new FileInfo(path);
writableArchive.AddEntry(
path.Substring(filePath.Length),
fileInfo.OpenRead(),
true,
fileInfo.Length,
fileInfo.LastWriteTime
);
}
}
}
public IArchiveEntry AddEntry(string key, string file) =>
writableArchive.AddEntry(key, new FileInfo(file));
public IArchiveEntry AddEntry(string key, FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
throw new ArgumentException("FileInfo does not exist.");
}
return writableArchive.AddEntry(
key,
fileInfo.OpenRead(),
true,
fileInfo.Length,
fileInfo.LastWriteTime
);
}
}
}

View File

@@ -1,8 +1,9 @@
using System.IO;
using System.IO;
namespace SharpCompress.Archives;
internal interface IWritableArchiveEntry
namespace SharpCompress.Archives
{
Stream Stream { get; }
}
internal interface IWritableArchiveEntry
{
Stream Stream { get; }
}
}

View File

@@ -1,20 +1,62 @@
using System;
using System.IO;
using SharpCompress.Common;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Writers;
namespace SharpCompress.Archives;
public static class IWritableArchiveExtensions
namespace SharpCompress.Archives
{
extension(IWritableArchive writableArchive)
public static class IWritableArchiveExtensions
{
public void SaveTo(string filePath, WriterOptions? options = null) =>
writableArchive.SaveTo(new FileInfo(filePath), options ?? new(CompressionType.Deflate));
public void SaveTo(FileInfo fileInfo, WriterOptions? options = null)
public static async ValueTask AddEntryAsync(this IWritableArchive writableArchive,
string entryPath, string filePath,
CancellationToken cancellationToken = default)
{
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
writableArchive.SaveTo(stream, options ?? new(CompressionType.Deflate));
var fileInfo = new FileInfo(filePath);
if (!fileInfo.Exists)
{
throw new FileNotFoundException("Could not AddEntry: " + filePath);
}
await writableArchive.AddEntryAsync(entryPath, new FileInfo(filePath).OpenRead(), true, fileInfo.Length,
fileInfo.LastWriteTime, cancellationToken);
}
public static Task SaveToAsync(this IWritableArchive writableArchive, string filePath, WriterOptions options, CancellationToken cancellationToken = default)
{
return writableArchive.SaveToAsync(new FileInfo(filePath), options, cancellationToken);
}
public static async Task SaveToAsync(this IWritableArchive writableArchive, FileInfo fileInfo, WriterOptions options, CancellationToken cancellationToken = default)
{
await using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
await writableArchive.SaveToAsync(stream, options, cancellationToken);
}
public static async ValueTask AddAllFromDirectoryAsync(
this IWritableArchive writableArchive,
string filePath, string searchPattern = "*.*",
SearchOption searchOption = SearchOption.AllDirectories,
CancellationToken cancellationToken = default)
{
await using (writableArchive.PauseEntryRebuilding())
{
foreach (var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption))
{
var fileInfo = new FileInfo(path);
await writableArchive.AddEntryAsync(path.Substring(filePath.Length), fileInfo.OpenRead(), true, fileInfo.Length,
fileInfo.LastWriteTime,
cancellationToken);
}
}
}
public static ValueTask<IArchiveEntry> AddEntryAsync(this IWritableArchive writableArchive, string key, FileInfo fileInfo,
CancellationToken cancellationToken = default)
{
if (!fileInfo.Exists)
{
throw new ArgumentException("FileInfo does not exist.");
}
return writableArchive.AddEntryAsync(key, fileInfo.OpenRead(), true, fileInfo.Length, fileInfo.LastWriteTime, cancellationToken);
}
}
}
}

View File

@@ -1,36 +0,0 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Writers;
namespace SharpCompress.Archives;
public static class IWritableAsyncArchiveExtensions
{
extension(IWritableAsyncArchive writableArchive)
{
public ValueTask SaveToAsync(
string filePath,
WriterOptions? options = null,
CancellationToken cancellationToken = default
) =>
writableArchive.SaveToAsync(
new FileInfo(filePath),
options ?? new(CompressionType.Deflate),
cancellationToken
);
public async ValueTask SaveToAsync(
FileInfo fileInfo,
WriterOptions? options = null,
CancellationToken cancellationToken = default
)
{
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
await writableArchive
.SaveToAsync(stream, options ?? new(CompressionType.Deflate), cancellationToken)
.ConfigureAwait(false);
}
}
}

View File

@@ -1,20 +0,0 @@
namespace SharpCompress.Archives;
/// <summary>
/// Decorator for <see cref="Factories.Factory"/> used to declare an archive format as able to create writeable archives
/// </summary>
/// <remarks>
/// Implemented by:<br/>
/// <list type="table">
/// <item><see cref="Factories.TarFactory"/></item>
/// <item><see cref="Factories.ZipFactory"/></item>
/// <item><see cref="Factories.GZipFactory"/></item>
/// </list>
public interface IWriteableArchiveFactory : Factories.IFactory
{
/// <summary>
/// Creates a new, empty archive, ready to be written.
/// </summary>
/// <returns></returns>
IWritableArchive CreateWriteableArchive();
}

View File

@@ -1,5 +1,4 @@
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common.Rar;
@@ -7,33 +6,39 @@ using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives.Rar;
/// <summary>
/// A rar part based on a FileInfo object
/// </summary>
internal class FileInfoRarArchiveVolume : RarVolume
namespace SharpCompress.Archives.Rar
{
internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options, int index)
: base(StreamingMode.Seekable, fileInfo.OpenRead(), FixOptions(options), index)
/// <summary>
/// A rar part based on a FileInfo object
/// </summary>
internal class FileInfoRarArchiveVolume : RarVolume
{
FileInfo = fileInfo;
FileParts = GetVolumeFileParts().ToArray().ToReadOnly();
internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options)
: base(StreamingMode.Seekable, fileInfo.OpenRead(), FixOptions(options))
{
FileInfo = fileInfo;
FileParts = GetVolumeFileParts().ToArray().ToReadOnly();
}
private static ReaderOptions FixOptions(ReaderOptions options)
{
//make sure we're closing streams with fileinfo
options.LeaveStreamOpen = false;
return options;
}
internal ReadOnlyCollection<RarFilePart> FileParts { get; }
internal FileInfo FileInfo { get; }
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader)
{
return new FileInfoRarFilePart(this, ReaderOptions.Password, markHeader, fileHeader, FileInfo);
}
internal override IEnumerable<RarFilePart> ReadFileParts()
{
return FileParts;
}
}
private static ReaderOptions FixOptions(ReaderOptions options)
{
//make sure we're closing streams with fileinfo
options.LeaveStreamOpen = false;
return options;
}
internal ReadOnlyCollection<RarFilePart> FileParts { get; }
internal FileInfo FileInfo { get; }
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader) =>
new FileInfoRarFilePart(this, ReaderOptions.Password, markHeader, fileHeader, FileInfo);
internal override IEnumerable<RarFilePart> ReadFileParts() => FileParts;
}

View File

@@ -1,21 +1,25 @@
using System.IO;
using System.IO;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Archives.Rar;
internal sealed class FileInfoRarFilePart : SeekableFilePart
namespace SharpCompress.Archives.Rar
{
internal FileInfoRarFilePart(
FileInfoRarArchiveVolume volume,
string? password,
MarkHeader mh,
FileHeader fh,
FileInfo fi
)
: base(mh, fh, volume.Index, volume.Stream, password) => FileInfo = fi;
internal sealed class FileInfoRarFilePart : SeekableFilePart
{
internal FileInfoRarFilePart(FileInfoRarArchiveVolume volume, string? password, MarkHeader mh, FileHeader fh, FileInfo fi)
: base(mh, fh, volume.Stream, password)
{
FileInfo = fi;
}
internal FileInfo FileInfo { get; }
internal FileInfo FileInfo { get; }
internal override string FilePartName =>
"Rar File: " + FileInfo.FullName + " File Entry: " + FileHeader.FileName;
internal override string FilePartName
{
get
{
return "Rar File: " + FileInfo.FullName
+ " File Entry: " + FileHeader.FileName;
}
}
}
}

View File

@@ -1,36 +1,23 @@
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common.Rar;
using System.Linq;
namespace SharpCompress.Archives.Rar;
public static class RarArchiveExtensions
namespace SharpCompress.Archives.Rar
{
extension(IRarArchive archive)
public static class RarArchiveExtensions
{
/// <summary>
/// RarArchive is the first volume of a multi-part archive. If MultipartVolume is true and IsFirstVolume is false then the first volume file must be missing.
/// </summary>
public bool IsFirstVolume() => archive.Volumes.Cast<RarVolume>().First().IsFirstVolume;
public static bool IsFirstVolume(this RarArchive archive)
{
return archive.Volumes.First().IsFirstVolume;
}
/// <summary>
/// RarArchive is part of a multi-part archive.
/// </summary>
public bool IsMultipartVolume() => archive.Volumes.Cast<RarVolume>().First().IsMultiVolume;
public static bool IsMultipartVolume(this RarArchive archive)
{
return archive.Volumes.First().IsMultiVolume;
}
}
extension(IRarAsyncArchive archive)
{
/// <summary>
/// RarArchive is the first volume of a multi-part archive. If MultipartVolume is true and IsFirstVolume is false then the first volume file must be missing.
/// </summary>
public async ValueTask<bool> IsFirstVolumeAsync() =>
(await archive.VolumesAsync.CastAsync<RarVolume>().FirstAsync()).IsFirstVolume;
/// <summary>
/// RarArchive is part of a multi-part archive.
/// </summary>
public async ValueTask<bool> IsMultipartVolumeAsync() =>
(await archive.VolumesAsync.CastAsync<RarVolume>().FirstAsync()).IsMultiVolume;
}
}
}

View File

@@ -1,163 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Compressors.Rar;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.Rar;
namespace SharpCompress.Archives.Rar;
public partial class RarArchive
#if NET8_0_OR_GREATER
: IArchiveOpenable<IRarArchive, IRarAsyncArchive>,
IMultiArchiveOpenable<IRarArchive, IRarAsyncArchive>
#endif
{
public static IRarAsyncArchive OpenAsync(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return (IRarAsyncArchive)Open(new FileInfo(path), readerOptions);
}
public static IRarArchive Open(string filePath, ReaderOptions? options = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
var fileInfo = new FileInfo(filePath);
return new RarArchive(
new SourceStream(
fileInfo,
i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo),
options ?? new ReaderOptions()
)
);
}
public static IRarArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
{
fileInfo.NotNull(nameof(fileInfo));
return new RarArchive(
new SourceStream(
fileInfo,
i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo),
options ?? new ReaderOptions()
)
);
}
public static IRarArchive Open(Stream stream, ReaderOptions? options = null)
{
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
return new RarArchive(new SourceStream(stream, _ => null, options ?? new ReaderOptions()));
}
public static IRarArchive Open(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new RarArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
public static IRarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new RarArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
public static IRarAsyncArchive OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IRarAsyncArchive)Open(stream, readerOptions);
}
public static IRarAsyncArchive OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IRarAsyncArchive)Open(fileInfo, readerOptions);
}
public static IRarAsyncArchive OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IRarAsyncArchive)Open(streams, readerOptions);
}
public static IRarAsyncArchive OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IRarAsyncArchive)Open(fileInfos, readerOptions);
}
public static bool IsRarFile(string filePath) => IsRarFile(new FileInfo(filePath));
public static bool IsRarFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsRarFile(stream);
}
public static bool IsRarFile(Stream stream, ReaderOptions? options = null)
{
try
{
MarkHeader.Read(stream, true, false);
return true;
}
catch
{
return false;
}
}
}

View File

@@ -1,118 +1,140 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Compressors.Rar;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.Rar;
namespace SharpCompress.Archives.Rar;
public interface IRarArchiveCommon
namespace SharpCompress.Archives.Rar
{
int MinVersion { get; }
int MaxVersion { get; }
}
public interface IRarArchive : IArchive, IRarArchiveCommon { }
public interface IRarAsyncArchive : IAsyncArchive, IRarArchiveCommon { }
public partial class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>, IRarArchive
{
private bool _disposed;
internal Lazy<IRarUnpack> UnpackV2017 { get; } =
new(() => new Compressors.Rar.UnpackV2017.Unpack());
internal Lazy<IRarUnpack> UnpackV1 { get; } = new(() => new Compressors.Rar.UnpackV1.Unpack());
private RarArchive(SourceStream sourceStream)
: base(ArchiveType.Rar, sourceStream) { }
public override void Dispose()
public class
RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
{
if (!_disposed)
internal Lazy<IRarUnpack> UnpackV2017 { get; } = new Lazy<IRarUnpack>(() => new SharpCompress.Compressors.Rar.UnpackV2017.Unpack());
internal Lazy<IRarUnpack> UnpackV1 { get; } = new Lazy<IRarUnpack>(() => new SharpCompress.Compressors.Rar.UnpackV1.Unpack());
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
internal RarArchive(FileInfo fileInfo, ReaderOptions options)
: base(ArchiveType.Rar, fileInfo, options)
{
if (UnpackV1.IsValueCreated && UnpackV1.Value is IDisposable unpackV1)
}
protected override IEnumerable<RarVolume> LoadVolumes(FileInfo file)
{
return RarArchiveVolumeFactory.GetParts(file, ReaderOptions);
}
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
internal RarArchive(IEnumerable<Stream> streams, ReaderOptions options)
: base(ArchiveType.Rar, streams, options)
{
}
protected override IEnumerable<RarArchiveEntry> LoadEntries(IEnumerable<RarVolume> volumes)
{
return RarArchiveEntryFactory.GetEntries(this, volumes, ReaderOptions);
}
protected override IEnumerable<RarVolume> LoadVolumes(IEnumerable<Stream> streams)
{
return RarArchiveVolumeFactory.GetParts(streams, ReaderOptions);
}
protected override IReader CreateReaderForSolidExtraction()
{
var stream = Volumes.First().Stream;
stream.Position = 0;
return RarReader.Open(stream, ReaderOptions);
}
public override bool IsSolid => Volumes.First().IsSolidArchive;
#region Creation
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
public static RarArchive Open(string filePath, ReaderOptions? options = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
return new RarArchive(new FileInfo(filePath), options ?? new ReaderOptions());
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
public static RarArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new RarArchive(fileInfo, options ?? new ReaderOptions());
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
public static RarArchive Open(Stream stream, ReaderOptions? options = null)
{
stream.CheckNotNull(nameof(stream));
return Open(stream.AsEnumerable(), options ?? new ReaderOptions());
}
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
public static RarArchive Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
{
streams.CheckNotNull(nameof(streams));
return new RarArchive(streams, options ?? new ReaderOptions());
}
public static bool IsRarFile(string filePath)
{
return IsRarFile(new FileInfo(filePath));
}
public static bool IsRarFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
unpackV1.Dispose();
return false;
}
_disposed = true;
base.Dispose();
}
}
public override async ValueTask DisposeAsync()
{
if (!_disposed)
{
if (UnpackV1.IsValueCreated && UnpackV1.Value is IDisposable unpackV1)
using (Stream stream = fileInfo.OpenRead())
{
unpackV1.Dispose();
return IsRarFile(stream);
}
_disposed = true;
await base.DisposeAsync();
}
}
protected override IEnumerable<RarArchiveEntry> LoadEntries(IEnumerable<RarVolume> volumes) =>
RarArchiveEntryFactory.GetEntries(this, volumes, ReaderOptions);
protected override IEnumerable<RarVolume> LoadVolumes(SourceStream sourceStream)
{
sourceStream.LoadAllParts();
var streams = sourceStream.Streams.ToArray();
var i = 0;
if (streams.Length > 1 && IsRarFile(streams[1], ReaderOptions))
{
sourceStream.IsVolumes = true;
streams[1].Position = 0;
sourceStream.Position = 0;
return sourceStream.Streams.Select(a => new StreamRarArchiveVolume(
a,
ReaderOptions,
i++
));
}
return new StreamRarArchiveVolume(sourceStream, ReaderOptions, i++).AsEnumerable();
}
protected override IReader CreateReaderForSolidExtraction() =>
CreateReaderForSolidExtractionInternal();
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync() =>
new(CreateReaderForSolidExtractionInternal());
private RarReader CreateReaderForSolidExtractionInternal()
{
if (this.IsMultipartVolume())
public static bool IsRarFile(Stream stream, ReaderOptions? options = null)
{
var streams = Volumes.Select(volume =>
try
{
volume.Stream.Position = 0;
return volume.Stream;
});
return (RarReader)RarReader.Open(streams, ReaderOptions);
MarkHeader.Read(stream, true, false);
return true;
}
catch
{
return false;
}
}
var stream = Volumes.First().Stream;
stream.Position = 0;
return (RarReader)RarReader.Open(stream, ReaderOptions);
#endregion
}
public override bool IsSolid => Volumes.First().IsSolidArchive;
public override bool IsEncrypted => Entries.First(x => !x.IsDirectory).IsEncrypted;
public virtual int MinVersion => Volumes.First().MinVersion;
public virtual int MaxVersion => Volumes.First().MaxVersion;
}

View File

@@ -2,138 +2,87 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Compressors.Rar;
using SharpCompress.Readers;
namespace SharpCompress.Archives.Rar;
public class RarArchiveEntry : RarEntry, IArchiveEntry
namespace SharpCompress.Archives.Rar
{
private readonly ICollection<RarFilePart> parts;
private readonly RarArchive archive;
private readonly ReaderOptions readerOptions;
internal RarArchiveEntry(
RarArchive archive,
IEnumerable<RarFilePart> parts,
ReaderOptions readerOptions
)
public class RarArchiveEntry : RarEntry, IArchiveEntry
{
this.parts = parts.ToList();
this.archive = archive;
this.readerOptions = readerOptions;
IsSolid = FileHeader.IsSolid;
}
private readonly ICollection<RarFilePart> parts;
private readonly RarArchive archive;
private readonly ReaderOptions readerOptions;
public override CompressionType CompressionType => CompressionType.Rar;
public IArchive Archive => archive;
internal override IEnumerable<FilePart> Parts => parts.Cast<FilePart>();
internal override FileHeader FileHeader => parts.First().FileHeader;
public override long Crc
{
get
internal RarArchiveEntry(RarArchive archive, IEnumerable<RarFilePart> parts, ReaderOptions readerOptions)
{
CheckIncomplete();
return BitConverter.ToUInt32(
parts.Select(fp => fp.FileHeader).Single(fh => !fh.IsSplitAfter).FileCrc.NotNull(),
0
);
this.parts = parts.ToList();
this.archive = archive;
this.readerOptions = readerOptions;
}
public override CompressionType CompressionType => CompressionType.Rar;
public IArchive Archive => archive;
internal override IEnumerable<FilePart> Parts => parts.Cast<FilePart>();
internal override FileHeader FileHeader => parts.First().FileHeader;
public override long Crc
{
get
{
CheckIncomplete();
return parts.Select(fp => fp.FileHeader).Single(fh => !fh.IsSplitAfter).FileCrc;
}
}
public override long Size
{
get
{
CheckIncomplete();
return parts.First().FileHeader.UncompressedSize;
}
}
public override long CompressedSize
{
get
{
CheckIncomplete();
return parts.Aggregate(0L, (total, fp) => total + fp.FileHeader.CompressedSize);
}
}
public Stream OpenEntryStream()
{
if (IsRarV3)
{
return new RarStream(archive.UnpackV1.Value, FileHeader, new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive));
}
return new RarStream(archive.UnpackV2017.Value, FileHeader, new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive));
}
public bool IsComplete
{
get
{
var headers = parts.Select(x => x.FileHeader);
return !headers.First().IsSplitBefore && !headers.Last().IsSplitAfter;
}
}
private void CheckIncomplete()
{
if (!readerOptions.DisableCheckIncomplete && !IsComplete)
{
throw new IncompleteArchiveException("ArchiveEntry is incomplete and cannot perform this operation.");
}
}
}
public override long Size
{
get
{
CheckIncomplete();
return parts.First().FileHeader.UncompressedSize;
}
}
public override long CompressedSize
{
get
{
CheckIncomplete();
return parts.Aggregate(0L, (total, fp) => total + fp.FileHeader.CompressedSize);
}
}
public Stream OpenEntryStream()
{
RarStream stream;
if (IsRarV3)
{
stream = new RarStream(
archive.UnpackV1.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>())
);
}
else
{
stream = new RarStream(
archive.UnpackV2017.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>())
);
}
stream.Initialize();
return stream;
}
public async ValueTask<Stream> OpenEntryStreamAsync(
CancellationToken cancellationToken = default
)
{
RarStream stream;
if (IsRarV3)
{
stream = new RarStream(
archive.UnpackV1.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>())
);
}
else
{
stream = new RarStream(
archive.UnpackV2017.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>())
);
}
await stream.InitializeAsync(cancellationToken);
return stream;
}
public bool IsComplete
{
get
{
var headers = parts.Select(x => x.FileHeader);
return !headers.First().IsSplitBefore && !headers.Last().IsSplitAfter;
}
}
private void CheckIncomplete()
{
if (!readerOptions.DisableCheckIncomplete && !IsComplete)
{
throw new IncompleteArchiveException(
"ArchiveEntry is incomplete and cannot perform this operation."
);
}
}
}
}

View File

@@ -1,52 +1,49 @@
using System.Collections.Generic;
using System.Collections.Generic;
using SharpCompress.Common.Rar;
using SharpCompress.Readers;
namespace SharpCompress.Archives.Rar;
internal static class RarArchiveEntryFactory
namespace SharpCompress.Archives.Rar
{
private static IEnumerable<RarFilePart> GetFileParts(IEnumerable<RarVolume> parts)
internal static class RarArchiveEntryFactory
{
foreach (var rarPart in parts)
private static IEnumerable<RarFilePart> GetFileParts(IEnumerable<RarVolume> parts)
{
foreach (var fp in rarPart.ReadFileParts())
foreach (RarVolume rarPart in parts)
{
yield return fp;
foreach (RarFilePart fp in rarPart.ReadFileParts())
{
yield return fp;
}
}
}
}
private static IEnumerable<IEnumerable<RarFilePart>> GetMatchedFileParts(
IEnumerable<RarVolume> parts
)
{
var groupedParts = new List<RarFilePart>();
foreach (var fp in GetFileParts(parts))
private static IEnumerable<IEnumerable<RarFilePart>> GetMatchedFileParts(IEnumerable<RarVolume> parts)
{
groupedParts.Add(fp);
var groupedParts = new List<RarFilePart>();
foreach (RarFilePart fp in GetFileParts(parts))
{
groupedParts.Add(fp);
if (!fp.FileHeader.IsSplitAfter)
if (!fp.FileHeader.IsSplitAfter)
{
yield return groupedParts;
groupedParts = new List<RarFilePart>();
}
}
if (groupedParts.Count > 0)
{
yield return groupedParts;
groupedParts = new List<RarFilePart>();
}
}
if (groupedParts.Count > 0)
{
yield return groupedParts;
}
}
internal static IEnumerable<RarArchiveEntry> GetEntries(
RarArchive archive,
IEnumerable<RarVolume> rarParts,
ReaderOptions readerOptions
)
{
foreach (var groupedParts in GetMatchedFileParts(rarParts))
internal static IEnumerable<RarArchiveEntry> GetEntries(RarArchive archive,
IEnumerable<RarVolume> rarParts,
ReaderOptions readerOptions)
{
yield return new RarArchiveEntry(archive, groupedParts, readerOptions);
foreach (var groupedParts in GetMatchedFileParts(rarParts))
{
yield return new RarArchiveEntry(archive, groupedParts, readerOptions);
}
}
}
}
}

View File

@@ -1,52 +1,140 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Text.RegularExpressions;
using SharpCompress.Common.Rar;
using SharpCompress.Readers;
using System.Linq;
using System.Text;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Archives.Rar;
internal static class RarArchiveVolumeFactory
namespace SharpCompress.Archives.Rar
{
internal static FileInfo? GetFilePart(int index, FileInfo part1) //base the name on the first part
internal static class RarArchiveVolumeFactory
{
FileInfo? item = null;
//new style rar - ..part1 | /part01 | part001 ....
var m = Regex.Match(part1.Name, @"^(.*\.part)([0-9]+)(\.rar)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(
Path.Combine(
part1.DirectoryName!,
String.Concat(
m.Groups[1].Value,
(index + 1).ToString().PadLeft(m.Groups[2].Value.Length, '0'),
m.Groups[3].Value
)
)
);
else
internal static IEnumerable<RarVolume> GetParts(IEnumerable<Stream> streams, ReaderOptions options)
{
//old style - ...rar, .r00, .r01 ...
m = Regex.Match(part1.Name, @"^(.*\.)([r-z{])(ar|[0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(
Path.Combine(
part1.DirectoryName!,
String.Concat(
m.Groups[1].Value,
index == 0
? m.Groups[2].Value + m.Groups[3].Value
: (char)(m.Groups[2].Value[0] + ((index - 1) / 100))
+ (index - 1).ToString("D4").Substring(2)
)
)
);
else //split .001, .002 ....
return ArchiveVolumeFactory.GetFilePart(index, part1);
foreach (Stream s in streams)
{
if (!s.CanRead || !s.CanSeek)
{
throw new ArgumentException("Stream is not readable and seekable");
}
StreamRarArchiveVolume part = new StreamRarArchiveVolume(s, options);
yield return part;
}
}
if (item != null && item.Exists)
return item;
internal static IEnumerable<RarVolume> GetParts(FileInfo fileInfo, ReaderOptions options)
{
FileInfoRarArchiveVolume part = new FileInfoRarArchiveVolume(fileInfo, options);
yield return part;
return null; //no more items
ArchiveHeader ah = part.ArchiveHeader;
if (!ah.IsVolume)
{
yield break; //if file isn't volume then there is no reason to look
}
fileInfo = GetNextFileInfo(ah, part.FileParts.FirstOrDefault() as FileInfoRarFilePart)!;
//we use fileinfo because rar is dumb and looks at file names rather than archive info for another volume
while (fileInfo != null && fileInfo.Exists)
{
part = new FileInfoRarArchiveVolume(fileInfo, options);
fileInfo = GetNextFileInfo(ah, part.FileParts.FirstOrDefault() as FileInfoRarFilePart)!;
yield return part;
}
}
private static FileInfo? GetNextFileInfo(ArchiveHeader ah, FileInfoRarFilePart? currentFilePart)
{
if (currentFilePart is null)
{
return null;
}
bool oldNumbering = ah.OldNumberingFormat
|| currentFilePart.MarkHeader.OldNumberingFormat;
if (oldNumbering)
{
return FindNextFileWithOldNumbering(currentFilePart.FileInfo);
}
else
{
return FindNextFileWithNewNumbering(currentFilePart.FileInfo);
}
}
private static FileInfo FindNextFileWithOldNumbering(FileInfo currentFileInfo)
{
// .rar, .r00, .r01, ...
string extension = currentFileInfo.Extension;
var buffer = new StringBuilder(currentFileInfo.FullName.Length);
buffer.Append(currentFileInfo.FullName.Substring(0,
currentFileInfo.FullName.Length - extension.Length));
if (string.Compare(extension, ".rar", StringComparison.OrdinalIgnoreCase) == 0)
{
buffer.Append(".r00");
}
else
{
if (int.TryParse(extension.Substring(2, 2), out int num))
{
num++;
buffer.Append(".r");
if (num < 10)
{
buffer.Append('0');
}
buffer.Append(num);
}
else
{
ThrowInvalidFileName(currentFileInfo);
}
}
return new FileInfo(buffer.ToString());
}
private static FileInfo FindNextFileWithNewNumbering(FileInfo currentFileInfo)
{
// part1.rar, part2.rar, ...
string extension = currentFileInfo.Extension;
if (string.Compare(extension, ".rar", StringComparison.OrdinalIgnoreCase) != 0)
{
throw new ArgumentException("Invalid extension, expected 'rar': " + currentFileInfo.FullName);
}
int startIndex = currentFileInfo.FullName.LastIndexOf(".part");
if (startIndex < 0)
{
ThrowInvalidFileName(currentFileInfo);
}
StringBuilder buffer = new StringBuilder(currentFileInfo.FullName.Length);
buffer.Append(currentFileInfo.FullName, 0, startIndex);
string numString = currentFileInfo.FullName.Substring(startIndex + 5,
currentFileInfo.FullName.IndexOf('.', startIndex + 5) -
startIndex - 5);
buffer.Append(".part");
if (int.TryParse(numString, out int num))
{
num++;
for (int i = 0; i < numString.Length - num.ToString().Length; i++)
{
buffer.Append('0');
}
buffer.Append(num);
}
else
{
ThrowInvalidFileName(currentFileInfo);
}
buffer.Append(".rar");
return new FileInfo(buffer.ToString());
}
private static void ThrowInvalidFileName(FileInfo fileInfo)
{
throw new ArgumentException("Filename invalid or next archive could not be found:"
+ fileInfo.FullName);
}
}
}
}

View File

@@ -1,45 +1,31 @@
using System.IO;
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Archives.Rar;
internal class SeekableFilePart : RarFilePart
namespace SharpCompress.Archives.Rar
{
private readonly Stream _stream;
private readonly string? _password;
internal SeekableFilePart(
MarkHeader mh,
FileHeader fh,
int index,
Stream stream,
string? password
)
: base(mh, fh, index)
internal class SeekableFilePart : RarFilePart
{
_stream = stream;
_password = password;
}
private readonly Stream stream;
private readonly string? password;
internal override Stream GetCompressedStream()
{
_stream.Position = FileHeader.DataStartPosition;
if (FileHeader.R4Salt != null)
internal SeekableFilePart(MarkHeader mh, FileHeader fh, Stream stream, string? password)
: base(mh, fh)
{
var cryptKey = new CryptKey3(_password!);
return new RarCryptoWrapper(_stream, FileHeader.R4Salt, cryptKey);
this.stream = stream;
this.password = password;
}
if (FileHeader.Rar5CryptoInfo != null)
internal override Stream GetCompressedStream()
{
var cryptKey = new CryptKey5(_password!, FileHeader.Rar5CryptoInfo);
return new RarCryptoWrapper(_stream, FileHeader.Rar5CryptoInfo.Salt, cryptKey);
stream.Position = FileHeader.DataStartPosition;
if (FileHeader.R4Salt != null)
{
return new RarCryptoWrapper(stream, password!, FileHeader.R4Salt);
}
return stream;
}
return _stream;
internal override string FilePartName => "Unknown Stream - File Entry: " + FileHeader.FileName;
}
internal override string FilePartName => "Unknown Stream - File Entry: " + FileHeader.FileName;
}
}

View File

@@ -1,19 +1,27 @@
using System.Collections.Generic;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives.Rar;
internal class StreamRarArchiveVolume : RarVolume
namespace SharpCompress.Archives.Rar
{
internal StreamRarArchiveVolume(Stream stream, ReaderOptions options, int index)
: base(StreamingMode.Seekable, stream, options, index) { }
internal class StreamRarArchiveVolume : RarVolume
{
internal StreamRarArchiveVolume(Stream stream, ReaderOptions options)
: base(StreamingMode.Seekable, stream, options)
{
}
internal override IEnumerable<RarFilePart> ReadFileParts() => GetVolumeFileParts();
internal override IEnumerable<RarFilePart> ReadFileParts()
{
return GetVolumeFileParts();
}
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader) =>
new SeekableFilePart(markHeader, fileHeader, Index, Stream, ReaderOptions.Password);
}
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader)
{
return new SeekableFilePart(markHeader, fileHeader, Stream, ReaderOptions.Password);
}
}
}

View File

@@ -1,166 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.SevenZip;
using SharpCompress.Compressors.LZMA.Utilites;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives.SevenZip;
public partial class SevenZipArchive
#if NET8_0_OR_GREATER
: IArchiveOpenable<IArchive, IAsyncArchive>,
IMultiArchiveOpenable<IArchive, IAsyncArchive>
#endif
{
public static IAsyncArchive OpenAsync(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty("path");
return (IAsyncArchive)Open(new FileInfo(path), readerOptions ?? new ReaderOptions());
}
public static IArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.NotNullOrEmpty("filePath");
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
public static IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.NotNull("fileInfo");
return new SevenZipArchive(
new SourceStream(
fileInfo,
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
)
);
}
public static IArchive Open(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new SevenZipArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
public static IArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new SevenZipArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
public static IArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.NotNull("stream");
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
return new SevenZipArchive(
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
);
}
public static IAsyncArchive OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)Open(stream, readerOptions);
}
public static IAsyncArchive OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)Open(fileInfo, readerOptions);
}
public static IAsyncArchive OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)Open(streams, readerOptions);
}
public static IAsyncArchive OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)Open(fileInfos, readerOptions);
}
public static bool IsSevenZipFile(string filePath) => IsSevenZipFile(new FileInfo(filePath));
public static bool IsSevenZipFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsSevenZipFile(stream);
}
public static bool IsSevenZipFile(Stream stream)
{
try
{
return SignatureMatch(stream);
}
catch
{
return false;
}
}
private static ReadOnlySpan<byte> Signature =>
new byte[] { (byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C };
private static bool SignatureMatch(Stream stream)
{
var reader = new BinaryReader(stream);
ReadOnlySpan<byte> signatureBytes = reader.ReadBytes(6);
return signatureBytes.SequenceEqual(Signature);
}
}

View File

@@ -1,226 +1,222 @@
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.SevenZip;
using SharpCompress.Compressors.LZMA.Utilites;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives.SevenZip;
public partial class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVolume>
namespace SharpCompress.Archives.SevenZip
{
private ArchiveDatabase? _database;
private SevenZipArchive(SourceStream sourceStream)
: base(ArchiveType.SevenZip, sourceStream) { }
internal SevenZipArchive()
: base(ArchiveType.SevenZip) { }
protected override IEnumerable<SevenZipVolume> LoadVolumes(SourceStream sourceStream)
public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVolume>
{
sourceStream.NotNull("SourceStream is null").LoadAllParts();
return new SevenZipVolume(sourceStream, ReaderOptions, 0).AsEnumerable();
}
private ArchiveDatabase database;
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(string filePath, ReaderOptions readerOptions = null)
{
filePath.CheckNotNullOrEmpty("filePath");
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
protected override IEnumerable<SevenZipArchiveEntry> LoadEntries(
IEnumerable<SevenZipVolume> volumes
)
{
var stream = volumes.Single().Stream;
LoadFactory(stream);
if (_database is null)
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
{
return Enumerable.Empty<SevenZipArchiveEntry>();
fileInfo.CheckNotNull("fileInfo");
return new SevenZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
var entries = new SevenZipArchiveEntry[_database._files.Count];
for (var i = 0; i < _database._files.Count; i++)
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
{
var file = _database._files[i];
entries[i] = new SevenZipArchiveEntry(
this,
new SevenZipFilePart(stream, _database, i, file, ReaderOptions.ArchiveEncoding)
);
stream.CheckNotNull("stream");
return new SevenZipArchive(stream, readerOptions ?? new ReaderOptions());
}
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
internal SevenZipArchive(FileInfo fileInfo, ReaderOptions readerOptions)
: base(ArchiveType.SevenZip, fileInfo, readerOptions)
{
var isSolid = false;
foreach (var entry in group)
}
protected override IEnumerable<SevenZipVolume> LoadVolumes(FileInfo file)
{
return new SevenZipVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
}
public static bool IsSevenZipFile(string filePath)
{
return IsSevenZipFile(new FileInfo(filePath));
}
public static bool IsSevenZipFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
entry.IsSolid = isSolid;
isSolid = true;
return false;
}
using (Stream stream = fileInfo.OpenRead())
{
return IsSevenZipFile(stream);
}
}
return entries;
}
private void LoadFactory(Stream stream)
{
if (_database is null)
internal SevenZipArchive(Stream stream, ReaderOptions readerOptions)
: base(ArchiveType.SevenZip, stream.AsEnumerable(), readerOptions)
{
stream.Position = 0;
var reader = new ArchiveReader();
reader.Open(stream, lookForHeader: ReaderOptions.LookForHeader);
_database = reader.ReadDatabase(new PasswordProvider(ReaderOptions.Password));
}
}
protected override IReader CreateReaderForSolidExtraction() =>
new SevenZipReader(ReaderOptions, this);
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync() =>
new(new SevenZipReader(ReaderOptions, this));
public override bool IsSolid =>
Entries
.Where(x => !x.IsDirectory)
.GroupBy(x => x.FilePart.Folder)
.Any(folder => folder.Count() > 1);
public override bool IsEncrypted => Entries.First(x => !x.IsDirectory).IsEncrypted;
public override long TotalSize =>
_database?._packSizes.Aggregate(0L, (total, packSize) => total + packSize) ?? 0;
private sealed class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
{
private readonly SevenZipArchive _archive;
private SevenZipEntry? _currentEntry;
internal SevenZipReader(ReaderOptions readerOptions, SevenZipArchive archive)
: base(readerOptions, ArchiveType.SevenZip) => this._archive = archive;
public override SevenZipVolume Volume => _archive.Volumes.Single();
protected override IEnumerable<SevenZipEntry> GetEntries(Stream stream)
internal SevenZipArchive()
: base(ArchiveType.SevenZip)
{
var entries = _archive.Entries.ToList();
stream.Position = 0;
foreach (var dir in entries.Where(x => x.IsDirectory))
}
protected override IEnumerable<SevenZipVolume> LoadVolumes(IEnumerable<Stream> streams)
{
foreach (Stream s in streams)
{
_currentEntry = dir;
yield return dir;
}
foreach (var entry in entries.Where(x => !x.IsDirectory))
{
_currentEntry = entry;
yield return entry;
if (!s.CanRead || !s.CanSeek)
{
throw new ArgumentException("Stream is not readable and seekable");
}
SevenZipVolume volume = new SevenZipVolume(s, ReaderOptions);
yield return volume;
}
}
protected override EntryStream GetEntryStream()
protected override IEnumerable<SevenZipArchiveEntry> LoadEntries(IEnumerable<SevenZipVolume> volumes)
{
var entry = _currentEntry.NotNull("currentEntry is not null");
if (entry.IsDirectory)
var stream = volumes.Single().Stream;
LoadFactory(stream);
for (int i = 0; i < database._files.Count; i++)
{
return CreateEntryStream(Stream.Null);
var file = database._files[i];
yield return new SevenZipArchiveEntry(this, new SevenZipFilePart(stream, database, i, file, ReaderOptions.ArchiveEncoding));
}
return CreateEntryStream(new SyncOnlyStream(entry.FilePart.GetCompressedStream()));
}
}
private sealed class SyncOnlyStream : Stream
{
private readonly Stream _baseStream;
public SyncOnlyStream(Stream baseStream) => _baseStream = baseStream;
public override bool CanRead => _baseStream.CanRead;
public override bool CanSeek => _baseStream.CanSeek;
public override bool CanWrite => _baseStream.CanWrite;
public override long Length => _baseStream.Length;
public override long Position
{
get => _baseStream.Position;
set => _baseStream.Position = value;
}
public override void Flush() => _baseStream.Flush();
public override int Read(byte[] buffer, int offset, int count) =>
_baseStream.Read(buffer, offset, count);
public override long Seek(long offset, SeekOrigin origin) =>
_baseStream.Seek(offset, origin);
public override void SetLength(long value) => _baseStream.SetLength(value);
public override void Write(byte[] buffer, int offset, int count) =>
_baseStream.Write(buffer, offset, count);
public override Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
private void LoadFactory(Stream stream)
{
cancellationToken.ThrowIfCancellationRequested();
return Task.FromResult(_baseStream.Read(buffer, offset, count));
}
public override Task WriteAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
cancellationToken.ThrowIfCancellationRequested();
_baseStream.Write(buffer, offset, count);
return Task.CompletedTask;
}
public override Task FlushAsync(CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested();
_baseStream.Flush();
return Task.CompletedTask;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new ValueTask<int>(_baseStream.Read(buffer.Span));
}
public override ValueTask WriteAsync(
ReadOnlyMemory<byte> buffer,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
_baseStream.Write(buffer.Span);
return ValueTask.CompletedTask;
}
#endif
protected override void Dispose(bool disposing)
{
if (disposing)
if (database is null)
{
_baseStream.Dispose();
stream.Position = 0;
var reader = new ArchiveReader();
reader.Open(stream);
database = reader.ReadDatabase(new PasswordProvider(ReaderOptions.Password));
}
base.Dispose(disposing);
}
}
private class PasswordProvider : IPasswordProvider
{
private readonly string? _password;
public static bool IsSevenZipFile(Stream stream)
{
try
{
return SignatureMatch(stream);
}
catch
{
return false;
}
}
public PasswordProvider(string? password) => _password = password;
private static ReadOnlySpan<byte> SIGNATURE => new byte[] { (byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C };
public string? CryptoGetTextPassword() => _password;
private static bool SignatureMatch(Stream stream)
{
BinaryReader reader = new BinaryReader(stream);
ReadOnlySpan<byte> signatureBytes = reader.ReadBytes(6);
return signatureBytes.SequenceEqual(SIGNATURE);
}
protected override IReader CreateReaderForSolidExtraction()
{
return new SevenZipReader(ReaderOptions, this);
}
public override bool IsSolid { get { return Entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder).Count() > 1; } }
public override long TotalSize
{
get
{
int i = Entries.Count;
return database._packSizes.Aggregate(0L, (total, packSize) => total + packSize);
}
}
private sealed class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
{
private readonly SevenZipArchive archive;
private CFolder currentFolder;
private Stream currentStream;
private CFileItem currentItem;
internal SevenZipReader(ReaderOptions readerOptions, SevenZipArchive archive)
: base(readerOptions, ArchiveType.SevenZip)
{
this.archive = archive;
}
public override SevenZipVolume Volume => archive.Volumes.Single();
protected override IEnumerable<SevenZipEntry> GetEntries(Stream stream)
{
List<SevenZipArchiveEntry> entries = archive.Entries.ToList();
stream.Position = 0;
foreach (var dir in entries.Where(x => x.IsDirectory))
{
yield return dir;
}
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
{
currentFolder = group.Key;
if (group.Key is null)
{
currentStream = Stream.Null;
}
else
{
currentStream = archive.database.GetFolderStream(stream, currentFolder, new PasswordProvider(Options.Password));
}
foreach (var entry in group)
{
currentItem = entry.FilePart.Header;
yield return entry;
}
}
}
protected override EntryStream GetEntryStream()
{
return CreateEntryStream(new ReadOnlySubStream(currentStream, currentItem.Size));
}
}
private class PasswordProvider : IPasswordProvider
{
private readonly string _password;
public PasswordProvider(string password)
{
_password = password;
}
public string CryptoGetTextPassword()
{
return _password;
}
}
}
}

View File

@@ -1,26 +1,28 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.SevenZip;
namespace SharpCompress.Archives.SevenZip;
public class SevenZipArchiveEntry : SevenZipEntry, IArchiveEntry
namespace SharpCompress.Archives.SevenZip
{
internal SevenZipArchiveEntry(SevenZipArchive archive, SevenZipFilePart part)
: base(part) => Archive = archive;
public class SevenZipArchiveEntry : SevenZipEntry, IArchiveEntry
{
internal SevenZipArchiveEntry(SevenZipArchive archive, SevenZipFilePart part)
: base(part)
{
Archive = archive;
}
public Stream OpenEntryStream() => FilePart.GetCompressedStream();
public Stream OpenEntryStream()
{
return FilePart.GetCompressedStream();
}
public ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default) =>
new(OpenEntryStream());
public IArchive Archive { get; }
public IArchive Archive { get; }
public bool IsComplete => true;
public bool IsComplete => true;
/// <summary>
/// This is a 7Zip Anti item
/// </summary>
public bool IsAnti => FilePart.Header.IsAnti;
}
/// <summary>
/// This is a 7Zip Anti item
/// </summary>
public bool IsAnti => FilePart.Header.IsAnti;
}
}

View File

@@ -1,166 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Tar;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Writers;
using SharpCompress.Writers.Tar;
namespace SharpCompress.Archives.Tar;
public partial class TarArchive
#if NET8_0_OR_GREATER
: IArchiveOpenable<IWritableArchive, IWritableAsyncArchive>,
IMultiArchiveOpenable<IWritableArchive, IWritableAsyncArchive>
#endif
{
public static IWritableArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
public static IWritableArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.NotNull(nameof(fileInfo));
return new TarArchive(
new SourceStream(
fileInfo,
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
)
);
}
public static IWritableArchive Open(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new TarArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
public static IWritableArchive Open(
IEnumerable<Stream> streams,
ReaderOptions? readerOptions = null
)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new TarArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
public static IWritableArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
return new TarArchive(
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
);
}
public static IWritableAsyncArchive OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)Open(stream, readerOptions);
}
public static IWritableAsyncArchive OpenAsync(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)Open(new FileInfo(path), readerOptions);
}
public static IWritableAsyncArchive OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)Open(fileInfo, readerOptions);
}
public static IWritableAsyncArchive OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)Open(streams, readerOptions);
}
public static IWritableAsyncArchive OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)Open(fileInfos, readerOptions);
}
public static bool IsTarFile(string filePath) => IsTarFile(new FileInfo(filePath));
public static bool IsTarFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsTarFile(stream);
}
public static bool IsTarFile(Stream stream)
{
try
{
var tarHeader = new TarHeader(new ArchiveEncoding());
var readSucceeded = tarHeader.Read(new BinaryReader(stream));
var isEmptyArchive =
tarHeader.Name?.Length == 0
&& tarHeader.Size == 0
&& Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
return readSucceeded || isEmptyArchive;
}
catch { }
return false;
}
public static TarArchive Create() => new();
}

View File

@@ -1,7 +1,8 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
@@ -13,180 +14,191 @@ using SharpCompress.Readers.Tar;
using SharpCompress.Writers;
using SharpCompress.Writers.Tar;
namespace SharpCompress.Archives.Tar;
public partial class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
namespace SharpCompress.Archives.Tar
{
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream sourceStream)
public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
{
sourceStream.NotNull("SourceStream is null").LoadAllParts();
return new TarVolume(sourceStream, ReaderOptions, 1).AsEnumerable();
}
private TarArchive(SourceStream sourceStream)
: base(ArchiveType.Tar, sourceStream) { }
private TarArchive()
: base(ArchiveType.Tar) { }
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
{
var stream = volumes.Single().Stream;
TarHeader? previousHeader = null;
foreach (
var header in TarHeaderFactory.ReadHeader(
StreamingMode.Seekable,
stream,
ReaderOptions.ArchiveEncoding
)
)
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
if (header != null)
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new TarArchive(fileInfo, readerOptions ?? new ReaderOptions(), cancellationToken);
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(Stream stream, ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default)
{
stream.CheckNotNull(nameof(stream));
return new TarArchive(stream, readerOptions ?? new ReaderOptions(), cancellationToken);
}
public static ValueTask<bool> IsTarFileAsync(string filePath, CancellationToken cancellationToken = default)
{
return IsTarFileAsync(new FileInfo(filePath), cancellationToken);
}
public static async ValueTask<bool> IsTarFileAsync(FileInfo fileInfo, CancellationToken cancellationToken = default)
{
if (!fileInfo.Exists)
{
if (header.EntryType == EntryType.LongName)
return false;
}
await using Stream stream = fileInfo.OpenRead();
return await IsTarFileAsync(stream, cancellationToken);
}
public static async ValueTask<bool> IsTarFileAsync(Stream stream, CancellationToken cancellationToken = default)
{
try
{
TarHeader tarHeader = new(new ArchiveEncoding());
bool readSucceeded = await tarHeader.Read(stream, cancellationToken);
bool isEmptyArchive = tarHeader.Name.Length == 0 && tarHeader.Size == 0 && Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
return readSucceeded || isEmptyArchive;
}
catch
{
}
return false;
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
internal TarArchive(FileInfo fileInfo, ReaderOptions readerOptions,
CancellationToken cancellationToken)
: base(ArchiveType.Tar, fileInfo, readerOptions, cancellationToken)
{
}
protected override IAsyncEnumerable<TarVolume> LoadVolumes(FileInfo file, CancellationToken cancellationToken)
{
return new TarVolume(file.OpenRead(), ReaderOptions).AsAsyncEnumerable();
}
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
internal TarArchive(Stream stream, ReaderOptions readerOptions,
CancellationToken cancellationToken)
: base(ArchiveType.Tar, stream, readerOptions, cancellationToken)
{
}
internal TarArchive()
: base(ArchiveType.Tar)
{
}
protected override async IAsyncEnumerable<TarVolume> LoadVolumes(IAsyncEnumerable<Stream> streams,
[EnumeratorCancellation]CancellationToken cancellationToken)
{
yield return new TarVolume(await streams.FirstAsync(cancellationToken: cancellationToken), ReaderOptions);
}
protected override async IAsyncEnumerable<TarArchiveEntry> LoadEntries(IAsyncEnumerable<TarVolume> volumes,
[EnumeratorCancellation]CancellationToken cancellationToken)
{
Stream stream = (await volumes.SingleAsync(cancellationToken: cancellationToken)).Stream;
TarHeader? previousHeader = null;
await foreach (TarHeader? header in TarHeaderFactory.ReadHeader(StreamingMode.Seekable, stream, ReaderOptions.ArchiveEncoding, cancellationToken))
{
if (header != null)
{
previousHeader = header;
}
else
{
if (previousHeader != null)
if (header.EntryType == EntryType.LongName)
{
var entry = new TarArchiveEntry(
this,
new TarFilePart(previousHeader, stream),
CompressionType.None
);
var oldStreamPos = stream.Position;
using (var entryStream = entry.OpenEntryStream())
{
using var memoryStream = new MemoryStream();
entryStream.CopyTo(memoryStream);
memoryStream.Position = 0;
var bytes = memoryStream.ToArray();
header.Name = ReaderOptions.ArchiveEncoding.Decode(bytes).TrimNulls();
}
stream.Position = oldStreamPos;
previousHeader = null;
previousHeader = header;
}
else
{
if (previousHeader != null)
{
var entry = new TarArchiveEntry(this, new TarFilePart(previousHeader, stream),
CompressionType.None);
var oldStreamPos = stream.Position;
await using (var entryStream = await entry.OpenEntryStreamAsync(cancellationToken))
{
await using (var memoryStream = new MemoryStream())
{
await entryStream.TransferToAsync(memoryStream, cancellationToken);
memoryStream.Position = 0;
var bytes = memoryStream.ToArray();
header.Name = ReaderOptions.ArchiveEncoding.Decode(bytes).TrimNulls();
}
}
stream.Position = oldStreamPos;
previousHeader = null;
}
yield return new TarArchiveEntry(this, new TarFilePart(header, stream), CompressionType.None);
}
yield return new TarArchiveEntry(
this,
new TarFilePart(header, stream),
CompressionType.None
);
}
}
else
{
throw new IncompleteArchiveException("Failed to read TAR header");
}
}
}
protected override TarArchiveEntry CreateEntryInternal(
string filePath,
Stream source,
long size,
DateTime? modified,
bool closeStream
) =>
new TarWritableArchiveEntry(
this,
source,
CompressionType.Unknown,
filePath,
size,
modified,
closeStream
);
protected override TarArchiveEntry CreateDirectoryEntry(
string directoryPath,
DateTime? modified
) => new TarWritableArchiveEntry(this, directoryPath, modified);
protected override void SaveTo(
Stream stream,
WriterOptions options,
IEnumerable<TarArchiveEntry> oldEntries,
IEnumerable<TarArchiveEntry> newEntries
)
{
using var writer = new TarWriter(stream, new TarWriterOptions(options));
foreach (var entry in oldEntries.Concat(newEntries))
public static TarArchive Create()
{
if (entry.IsDirectory)
{
writer.WriteDirectory(
entry.Key.NotNull("Entry Key is null"),
entry.LastModifiedTime
);
}
else
{
using var entryStream = entry.OpenEntryStream();
writer.Write(
entry.Key.NotNull("Entry Key is null"),
entryStream,
entry.LastModifiedTime,
entry.Size
);
}
return new();
}
}
protected override async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IEnumerable<TarArchiveEntry> oldEntries,
IEnumerable<TarArchiveEntry> newEntries,
CancellationToken cancellationToken = default
)
{
using var writer = new TarWriter(stream, new TarWriterOptions(options));
foreach (var entry in oldEntries.Concat(newEntries))
protected override ValueTask<TarArchiveEntry> CreateEntryInternal(string filePath, Stream source,
long size, DateTime? modified, bool closeStream,
CancellationToken cancellationToken)
{
if (entry.IsDirectory)
return new (new TarWritableArchiveEntry(this, source, CompressionType.Unknown, filePath, size, modified,
closeStream));
}
protected override async ValueTask SaveToAsync(Stream stream, WriterOptions options,
IAsyncEnumerable<TarArchiveEntry> oldEntries,
IAsyncEnumerable<TarArchiveEntry> newEntries,
CancellationToken cancellationToken = default)
{
await using var writer = await TarWriter.CreateAsync(stream, new TarWriterOptions(options), cancellationToken);
await foreach (var entry in oldEntries.Concat(newEntries)
.Where(x => !x.IsDirectory)
.WithCancellation(cancellationToken))
{
await writer
.WriteDirectoryAsync(
entry.Key.NotNull("Entry Key is null"),
entry.LastModifiedTime,
cancellationToken
)
.ConfigureAwait(false);
}
else
{
using var entryStream = entry.OpenEntryStream();
await writer
.WriteAsync(
entry.Key.NotNull("Entry Key is null"),
entryStream,
entry.LastModifiedTime,
entry.Size,
cancellationToken
)
.ConfigureAwait(false);
await using var entryStream = await entry.OpenEntryStreamAsync(cancellationToken);
await writer.WriteAsync(entry.Key, entryStream, entry.LastModifiedTime, entry.Size, cancellationToken);
}
}
}
protected override IReader CreateReaderForSolidExtraction()
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return TarReader.Open(stream);
}
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync()
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return new((IAsyncReader)TarReader.Open(stream));
protected override async ValueTask<IReader> CreateReaderForSolidExtraction()
{
var stream = (await Volumes.SingleAsync()).Stream;
stream.Position = 0;
return await TarReader.OpenAsync(stream);
}
}
}

View File

@@ -5,23 +5,27 @@ using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Tar;
namespace SharpCompress.Archives.Tar;
public class TarArchiveEntry : TarEntry, IArchiveEntry
namespace SharpCompress.Archives.Tar
{
internal TarArchiveEntry(TarArchive archive, TarFilePart? part, CompressionType compressionType)
: base(part, compressionType) => Archive = archive;
public class TarArchiveEntry : TarEntry, IArchiveEntry
{
internal TarArchiveEntry(TarArchive archive, TarFilePart part, CompressionType compressionType)
: base(part, compressionType)
{
Archive = archive;
}
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();
public virtual async ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
{
return await Parts.Single().GetCompressedStreamAsync(cancellationToken);
}
public ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default) =>
new(OpenEntryStream());
#region IArchiveEntry Members
#region IArchiveEntry Members
public IArchive Archive { get; }
public IArchive Archive { get; }
public bool IsComplete => true;
public bool IsComplete => true;
#endregion
}
#endregion
}
}

View File

@@ -1,92 +1,69 @@
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Archives.Tar;
internal sealed class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiveEntry
namespace SharpCompress.Archives.Tar
{
private readonly bool closeStream;
private readonly Stream? stream;
private readonly bool isDirectory;
internal TarWritableArchiveEntry(
TarArchive archive,
Stream stream,
CompressionType compressionType,
string path,
long size,
DateTime? lastModified,
bool closeStream
)
: base(archive, null, compressionType)
internal sealed class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiveEntry
{
this.stream = stream;
Key = path;
Size = size;
LastModifiedTime = lastModified;
this.closeStream = closeStream;
isDirectory = false;
}
private readonly bool closeStream;
private readonly Stream stream;
internal TarWritableArchiveEntry(
TarArchive archive,
string directoryPath,
DateTime? lastModified
)
: base(archive, null, CompressionType.None)
{
stream = null;
Key = directoryPath;
Size = 0;
LastModifiedTime = lastModified;
closeStream = false;
isDirectory = true;
}
public override long Crc => 0;
public override string Key { get; }
public override long CompressedSize => 0;
public override long Size { get; }
public override DateTime? LastModifiedTime { get; }
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => isDirectory;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
Stream IWritableArchiveEntry.Stream => stream ?? Stream.Null;
public override Stream OpenEntryStream()
{
if (stream is null)
internal TarWritableArchiveEntry(TarArchive archive, Stream stream, CompressionType compressionType,
string path, long size, DateTime? lastModified, bool closeStream)
: base(archive, null, compressionType)
{
return Stream.Null;
this.stream = stream;
Key = path;
Size = size;
LastModifiedTime = lastModified;
this.closeStream = closeStream;
}
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return SharpCompressStream.Create(stream, leaveOpen: true);
}
internal override void Close()
{
if (closeStream && stream is not null)
public override long Crc => 0;
public override string Key { get; }
public override long CompressedSize => 0;
public override long Size { get; }
public override DateTime? LastModifiedTime { get; }
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => false;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
Stream IWritableArchiveEntry.Stream => stream;
public override ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
{
stream.Dispose();
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new(new NonDisposingStream(stream));
}
internal override async ValueTask CloseAsync()
{
if (closeStream)
{
await stream.DisposeAsync();
}
}
}
}
}

View File

@@ -1,319 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Zip;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives.Zip;
public partial class ZipArchive
#if NET8_0_OR_GREATER
: IArchiveOpenable<IWritableArchive, IWritableAsyncArchive>,
IMultiArchiveOpenable<IWritableArchive, IWritableAsyncArchive>
#endif
{
public static IWritableArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
public static IWritableArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.NotNull(nameof(fileInfo));
return new ZipArchive(
new SourceStream(
fileInfo,
i => ZipArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
)
);
}
public static IWritableArchive Open(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new ZipArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
public static IWritableArchive Open(
IEnumerable<Stream> streams,
ReaderOptions? readerOptions = null
)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new ZipArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
public static IWritableArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
return new ZipArchive(
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
);
}
public static IWritableAsyncArchive OpenAsync(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)Open(path, readerOptions);
}
public static IWritableAsyncArchive OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)Open(stream, readerOptions);
}
public static IWritableAsyncArchive OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)Open(fileInfo, readerOptions);
}
public static IWritableAsyncArchive OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)Open(streams, readerOptions);
}
public static IWritableAsyncArchive OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)Open(fileInfos, readerOptions);
}
public static bool IsZipFile(
string filePath,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => IsZipFile(new FileInfo(filePath), password, bufferSize);
public static bool IsZipFile(
FileInfo fileInfo,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsZipFile(stream, password, bufferSize);
}
public static bool IsZipFile(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = headerFactory
.ReadStreamHeader(stream)
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
return false;
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
public static bool IsZipMulti(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = headerFactory
.ReadStreamHeader(stream)
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
if (stream.CanSeek)
{
var z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
var x = z.ReadSeekableHeader(stream, useSync: true).FirstOrDefault();
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
}
else
{
return false;
}
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
public static async ValueTask<bool> IsZipFileAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = await headerFactory
.ReadStreamHeaderAsync(stream)
.Where(x => x.ZipHeaderType != ZipHeaderType.Split)
.FirstOrDefaultAsync(cancellationToken);
if (header is null)
{
return false;
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
public static ZipArchive Create() => new();
public static async ValueTask<bool> IsZipMultiAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = headerFactory
.ReadStreamHeader(stream)
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
if (stream.CanSeek)
{
var z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
ZipHeader? x = null;
await foreach (
var h in z.ReadSeekableHeaderAsync(stream)
.WithCancellation(cancellationToken)
)
{
x = h;
break;
}
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
}
else
{
return false;
}
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
}

View File

@@ -1,7 +1,8 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
@@ -14,243 +15,213 @@ using SharpCompress.Readers.Zip;
using SharpCompress.Writers;
using SharpCompress.Writers.Zip;
namespace SharpCompress.Archives.Zip;
public partial class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
namespace SharpCompress.Archives.Zip
{
private readonly SeekableZipHeaderFactory? headerFactory;
public CompressionLevel DeflateCompressionLevel { get; set; }
internal ZipArchive(SourceStream sourceStream)
: base(ArchiveType.Zip, sourceStream) =>
headerFactory = new SeekableZipHeaderFactory(
sourceStream.ReaderOptions.Password,
sourceStream.ReaderOptions.ArchiveEncoding
);
internal ZipArchive()
: base(ArchiveType.Zip) { }
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream stream)
public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
{
stream.LoadAllParts();
stream.Position = 0;
#nullable disable
private readonly SeekableZipHeaderFactory headerFactory;
#nullable enable
var streams = stream.Streams.ToList();
var idx = 0;
if (streams.Count() > 1)
/// <summary>
/// Gets or sets the compression level applied to files added to the archive,
/// if the compression method is set to deflate
/// </summary>
public CompressionLevel DeflateCompressionLevel { get; set; }
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
streams[1].Position += 4;
var isZip = IsZipFile(streams[1], ReaderOptions.Password, ReaderOptions.BufferSize);
streams[1].Position -= 4;
if (isZip)
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new ZipArchive(fileInfo, readerOptions ?? new ReaderOptions(), cancellationToken);
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(Stream stream, ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default)
{
stream.CheckNotNull(nameof(stream));
return new ZipArchive(stream, readerOptions ?? new ReaderOptions(), cancellationToken);
}
public static ValueTask<bool> IsZipFile(string filePath, string? password = null)
{
return IsZipFileAsync(new FileInfo(filePath), password);
}
public static async ValueTask<bool> IsZipFileAsync(FileInfo fileInfo, string? password = null)
{
if (!fileInfo.Exists)
{
stream.IsVolumes = true;
return false;
}
var tmp = streams[0];
streams.RemoveAt(0);
streams.Add(tmp);
await using Stream stream = fileInfo.OpenRead();
return await IsZipFileAsync(stream, password);
}
return streams.Select(a => new ZipVolume(a, ReaderOptions, idx++));
public static async ValueTask<bool> IsZipFileAsync(Stream stream, string? password = null, CancellationToken cancellationToken = default)
{
StreamingZipHeaderFactory headerFactory = new(password, new ArchiveEncoding());
try
{
RewindableStream rewindableStream;
if (stream is RewindableStream rs)
{
rewindableStream = rs;
}
else
{
rewindableStream = new RewindableStream(stream);
}
ZipHeader? header = await headerFactory.ReadStreamHeader(rewindableStream, cancellationToken)
.FirstOrDefaultAsync(x => x.ZipHeaderType != ZipHeaderType.Split, cancellationToken: cancellationToken);
if (header is null)
{
return false;
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
return new ZipVolume(stream, ReaderOptions, idx++).AsEnumerable();
}
protected override IEnumerable<ZipArchiveEntry> LoadEntries(IEnumerable<ZipVolume> volumes)
{
var vols = volumes.ToArray();
foreach (
var h in headerFactory.NotNull().ReadSeekableHeader(vols.Last().Stream, useSync: true)
)
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
internal ZipArchive(FileInfo fileInfo, ReaderOptions readerOptions,
CancellationToken cancellationToken)
: base(ArchiveType.Zip, fileInfo, readerOptions, cancellationToken)
{
if (h != null)
{
switch (h.ZipHeaderType)
{
case ZipHeaderType.DirectoryEntry:
{
var deh = (DirectoryEntryHeader)h;
Stream s;
if (
deh.RelativeOffsetOfEntryHeader + deh.CompressedSize
> vols[deh.DiskNumberStart].Stream.Length
)
{
var v = vols.Skip(deh.DiskNumberStart).ToArray();
s = new SourceStream(
v[0].Stream,
i => i < v.Length ? v[i].Stream : null,
new ReaderOptions() { LeaveStreamOpen = true }
);
}
else
{
s = vols[deh.DiskNumberStart].Stream;
}
headerFactory = new SeekableZipHeaderFactory(readerOptions.Password, readerOptions.ArchiveEncoding);
}
yield return new ZipArchiveEntry(
this,
new SeekableZipFilePart(headerFactory.NotNull(), deh, s)
);
}
break;
case ZipHeaderType.DirectoryEnd:
protected override IAsyncEnumerable<ZipVolume> LoadVolumes(FileInfo file,
CancellationToken cancellationToken)
{
return new ZipVolume(file.OpenRead(), ReaderOptions).AsAsyncEnumerable();
}
internal ZipArchive()
: base(ArchiveType.Zip)
{
}
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
internal ZipArchive(Stream stream, ReaderOptions readerOptions,
CancellationToken cancellationToken)
: base(ArchiveType.Zip, stream, readerOptions, cancellationToken)
{
headerFactory = new SeekableZipHeaderFactory(readerOptions.Password, readerOptions.ArchiveEncoding);
}
protected override async IAsyncEnumerable<ZipVolume> LoadVolumes(IAsyncEnumerable<Stream> streams,
[EnumeratorCancellation]CancellationToken cancellationToken)
{
yield return new ZipVolume(await streams.FirstAsync(cancellationToken: cancellationToken), ReaderOptions);
}
protected override async IAsyncEnumerable<ZipArchiveEntry> LoadEntries(IAsyncEnumerable<ZipVolume> volumes,
[EnumeratorCancellation]CancellationToken cancellationToken)
{
await Task.CompletedTask;
var volume = await volumes.SingleAsync(cancellationToken: cancellationToken);
Stream stream = volume.Stream;
await foreach (ZipHeader h in headerFactory.ReadSeekableHeader(stream, cancellationToken))
{
if (h != null)
{
switch (h.ZipHeaderType)
{
var bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
vols.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
yield break;
case ZipHeaderType.DirectoryEntry:
{
yield return new ZipArchiveEntry(this,
new SeekableZipFilePart(headerFactory,
(DirectoryEntryHeader)h,
stream));
}
break;
case ZipHeaderType.DirectoryEnd:
{
byte[] bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
volume.Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
yield break;
}
}
}
}
}
}
protected override async IAsyncEnumerable<ZipArchiveEntry> LoadEntriesAsync(
IAsyncEnumerable<ZipVolume> volumes
)
{
var vols = await volumes.ToListAsync();
var volsArray = vols.ToArray();
await foreach (
var h in headerFactory.NotNull().ReadSeekableHeaderAsync(volsArray.Last().Stream)
)
public ValueTask SaveToAsync(Stream stream, CancellationToken cancellationToken = default)
{
if (h != null)
{
switch (h.ZipHeaderType)
{
case ZipHeaderType.DirectoryEntry:
{
var deh = (DirectoryEntryHeader)h;
Stream s;
if (
deh.RelativeOffsetOfEntryHeader + deh.CompressedSize
> volsArray[deh.DiskNumberStart].Stream.Length
)
{
var v = volsArray.Skip(deh.DiskNumberStart).ToArray();
s = new SourceStream(
v[0].Stream,
i => i < v.Length ? v[i].Stream : null,
new ReaderOptions() { LeaveStreamOpen = true }
);
}
else
{
s = volsArray[deh.DiskNumberStart].Stream;
}
return SaveToAsync(stream, new WriterOptions(CompressionType.Deflate), cancellationToken);
}
yield return new ZipArchiveEntry(
this,
new SeekableZipFilePart(headerFactory.NotNull(), deh, s)
);
}
break;
case ZipHeaderType.DirectoryEnd:
{
var bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
volsArray.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
yield break;
}
protected override async ValueTask SaveToAsync(Stream stream, WriterOptions options,
IAsyncEnumerable<ZipArchiveEntry> oldEntries,
IAsyncEnumerable<ZipArchiveEntry> newEntries,
CancellationToken cancellationToken = default)
{
await using var writer = new ZipWriter(stream, new ZipWriterOptions(options));
await foreach (var entry in oldEntries.Concat(newEntries)
.Where(x => !x.IsDirectory)
.WithCancellation(cancellationToken))
{
await using (var entryStream = await entry.OpenEntryStreamAsync(cancellationToken))
{
await writer.WriteAsync(entry.Key, entryStream, entry.LastModifiedTime, cancellationToken);
}
}
}
}
public void SaveTo(Stream stream) => SaveTo(stream, new WriterOptions(CompressionType.Deflate));
protected override void SaveTo(
Stream stream,
WriterOptions options,
IEnumerable<ZipArchiveEntry> oldEntries,
IEnumerable<ZipArchiveEntry> newEntries
)
{
using var writer = new ZipWriter(stream, new ZipWriterOptions(options));
foreach (var entry in oldEntries.Concat(newEntries))
protected override ValueTask<ZipArchiveEntry> CreateEntryInternal(string filePath, Stream source, long size, DateTime? modified,
bool closeStream, CancellationToken cancellationToken = default)
{
if (entry.IsDirectory)
{
writer.WriteDirectory(
entry.Key.NotNull("Entry Key is null"),
entry.LastModifiedTime
);
}
else
{
using var entryStream = entry.OpenEntryStream();
writer.Write(
entry.Key.NotNull("Entry Key is null"),
entryStream,
entry.LastModifiedTime
);
}
return new(new ZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream));
}
}
protected override async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IEnumerable<ZipArchiveEntry> oldEntries,
IEnumerable<ZipArchiveEntry> newEntries,
CancellationToken cancellationToken = default
)
{
using var writer = new ZipWriter(stream, new ZipWriterOptions(options));
foreach (var entry in oldEntries.Concat(newEntries))
public static ZipArchive Create()
{
if (entry.IsDirectory)
{
await writer
.WriteDirectoryAsync(
entry.Key.NotNull("Entry Key is null"),
entry.LastModifiedTime,
cancellationToken
)
.ConfigureAwait(false);
}
else
{
using var entryStream = entry.OpenEntryStream();
await writer
.WriteAsync(
entry.Key.NotNull("Entry Key is null"),
entryStream,
cancellationToken
)
.ConfigureAwait(false);
}
return new();
}
}
protected override ZipArchiveEntry CreateEntryInternal(
string filePath,
Stream source,
long size,
DateTime? modified,
bool closeStream
) => new ZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
protected override ZipArchiveEntry CreateDirectoryEntry(
string directoryPath,
DateTime? modified
) => new ZipWritableArchiveEntry(this, directoryPath, modified);
protected override IReader CreateReaderForSolidExtraction()
{
var stream = Volumes.Single().Stream;
((IStreamStack)stream).StackSeek(0);
return ZipReader.Open(stream, ReaderOptions, Entries);
}
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync()
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return new((IAsyncReader)ZipReader.Open(stream));
protected override async ValueTask<IReader> CreateReaderForSolidExtraction()
{
var stream = (await Volumes.SingleAsync()).Stream;
stream.Position = 0;
return ZipReader.Open(stream, ReaderOptions);
}
}
}

View File

@@ -4,32 +4,29 @@ using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip;
namespace SharpCompress.Archives.Zip;
public class ZipArchiveEntry : ZipEntry, IArchiveEntry
namespace SharpCompress.Archives.Zip
{
internal ZipArchiveEntry(ZipArchive archive, SeekableZipFilePart? part)
: base(part) => Archive = archive;
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();
public async ValueTask<Stream> OpenEntryStreamAsync(
CancellationToken cancellationToken = default
)
public class ZipArchiveEntry : ZipEntry, IArchiveEntry
{
var part = Parts.Single();
if (part is SeekableZipFilePart seekablePart)
internal ZipArchiveEntry(ZipArchive archive, SeekableZipFilePart? part)
: base(part)
{
return (await seekablePart.GetCompressedStreamAsync(cancellationToken)).NotNull();
Archive = archive;
}
return OpenEntryStream();
public virtual ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
{
return Parts.Single().GetCompressedStreamAsync(cancellationToken);
}
#region IArchiveEntry Members
public IArchive Archive { get; }
public bool IsComplete => true;
#endregion
public string? Comment => ((SeekableZipFilePart)Parts.Single()).Comment;
}
#region IArchiveEntry Members
public IArchive Archive { get; }
public bool IsComplete => true;
#endregion
}
}

View File

@@ -1,35 +0,0 @@
using System;
using System.IO;
using System.Text.RegularExpressions;
namespace SharpCompress.Archives.Zip;
internal static class ZipArchiveVolumeFactory
{
internal static FileInfo? GetFilePart(int index, FileInfo part1) //base the name on the first part
{
FileInfo? item = null;
//load files with zip/zipx first. Swapped to end once loaded in ZipArchive
//new style .zip, z01.. | .zipx, zx01 - if the numbers go beyond 99 then they use 100 ...1000 etc
var m = Regex.Match(part1.Name, @"^(.*\.)(zipx?|zx?[0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(
Path.Combine(
part1.DirectoryName!,
String.Concat(
m.Groups[1].Value,
Regex.Replace(m.Groups[2].Value, @"[^xz]", ""),
index.ToString().PadLeft(2, '0')
)
)
);
else //split - 001, 002 ...
return ArchiveVolumeFactory.GetFilePart(index, part1);
if (item != null && item.Exists)
return item;
return null; //no more items
}
}

View File

@@ -1,94 +1,70 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Archives.Zip;
internal class ZipWritableArchiveEntry : ZipArchiveEntry, IWritableArchiveEntry
namespace SharpCompress.Archives.Zip
{
private readonly bool closeStream;
private readonly Stream? stream;
private readonly bool isDirectory;
private bool isDisposed;
internal ZipWritableArchiveEntry(
ZipArchive archive,
Stream stream,
string path,
long size,
DateTime? lastModified,
bool closeStream
)
: base(archive, null)
internal class ZipWritableArchiveEntry : ZipArchiveEntry, IWritableArchiveEntry
{
this.stream = stream;
Key = path;
Size = size;
LastModifiedTime = lastModified;
this.closeStream = closeStream;
isDirectory = false;
}
private readonly bool closeStream;
private readonly Stream stream;
private bool isDisposed;
internal ZipWritableArchiveEntry(
ZipArchive archive,
string directoryPath,
DateTime? lastModified
)
: base(archive, null)
{
stream = null;
Key = directoryPath;
Size = 0;
LastModifiedTime = lastModified;
closeStream = false;
isDirectory = true;
}
public override long Crc => 0;
public override string Key { get; }
public override long CompressedSize => 0;
public override long Size { get; }
public override DateTime? LastModifiedTime { get; }
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => isDirectory;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
Stream IWritableArchiveEntry.Stream => stream ?? Stream.Null;
public override Stream OpenEntryStream()
{
if (stream is null)
internal ZipWritableArchiveEntry(ZipArchive archive, Stream stream, string path, long size,
DateTime? lastModified, bool closeStream)
: base(archive, null)
{
return Stream.Null;
this.stream = stream;
Key = path;
Size = size;
LastModifiedTime = lastModified;
this.closeStream = closeStream;
}
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return SharpCompressStream.Create(stream, leaveOpen: true);
}
internal override void Close()
{
if (closeStream && !isDisposed && stream is not null)
public override long Crc => 0;
public override string Key { get; }
public override long CompressedSize => 0;
public override long Size { get; }
public override DateTime? LastModifiedTime { get; }
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => false;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
Stream IWritableArchiveEntry.Stream => stream;
public override ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
{
stream.Dispose();
isDisposed = true;
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new(new NonDisposingStream(stream));
}
internal override async ValueTask CloseAsync()
{
if (closeStream && !isDisposed)
{
await stream.DisposeAsync();
isDisposed = true;
}
}
}
}
}

View File

@@ -1,8 +1,24 @@
using System;
using System;
using System.Reflection;
using System.Runtime.CompilerServices;
// CLSCompliant(false) is required because ZStandard integration uses unsafe code
[assembly: CLSCompliant(false)]
[assembly: InternalsVisibleTo(
"SharpCompress.Test,PublicKey=0024000004800000940000000602000000240000525341310004000001000100158bebf1433f76dffc356733c138babea7a47536c65ed8009b16372c6f4edbb20554db74a62687f56b97c20a6ce8c4b123280279e33c894e7b3aa93ab3c573656fde4db576cfe07dba09619ead26375b25d2c4a8e43f7be257d712b0dd2eb546f67adb09281338618a58ac834fc038dd7e2740a7ab3591826252e4f4516306dc"
)]
[assembly: AssemblyTitle("SharpCompress")]
[assembly: AssemblyProduct("SharpCompress")]
[assembly: InternalsVisibleTo("SharpCompress.Test" + SharpCompress.AssemblyInfo.PublicKeySuffix)]
[assembly: CLSCompliant(true)]
namespace SharpCompress
{
/// <summary>
/// Just a static class to house the public key, to avoid repetition.
/// </summary>
internal static class AssemblyInfo
{
internal const string PublicKeySuffix =
",PublicKey=002400000480000094000000060200000024000052534131000400000100010059acfa17d26c44" +
"7a4d03f16eaa72c9187c04f16e6569dd168b080e39a6f5c9fd00f28c768cd8e9a089d5a0e1b34c" +
"cd971488e7afe030ce5ce8df2053cf12ec89f6d38065c434c09ee6af3ee284c5dc08f44774b679" +
"bf39298e57efe30d4b00aecf9e4f6f8448b2cb0146d8956dfcab606cc64a0ac38c60a7d78b0d65" +
"d3b98dc0";
}
}

View File

@@ -0,0 +1,25 @@
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress
{
public static class AsyncEnumerable
{
public static IAsyncEnumerable<T> Empty<T>() => EmptyAsyncEnumerable<T>.Instance;
private class EmptyAsyncEnumerable<T> : IAsyncEnumerator<T>, IAsyncEnumerable<T>
{
public static readonly EmptyAsyncEnumerable<T> Instance =
new();
public T Current => default!;
public ValueTask DisposeAsync() => default;
public ValueTask<bool> MoveNextAsync() => new(false);
public IAsyncEnumerator<T> GetAsyncEnumerator(CancellationToken cancellationToken = new CancellationToken())
{
return this;
}
}
}
}

View File

@@ -1,61 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SharpCompress.Common.Ace
{
public class AceCrc
{
// CRC-32 lookup table (standard polynomial 0xEDB88320, reflected)
private static readonly uint[] Crc32Table = GenerateTable();
private static uint[] GenerateTable()
{
var table = new uint[256];
for (int i = 0; i < 256; i++)
{
uint crc = (uint)i;
for (int j = 0; j < 8; j++)
{
if ((crc & 1) != 0)
crc = (crc >> 1) ^ 0xEDB88320u;
else
crc >>= 1;
}
table[i] = crc;
}
return table;
}
/// <summary>
/// Calculate ACE CRC-32 checksum.
/// ACE CRC-32 uses standard CRC-32 polynomial (0xEDB88320, reflected)
/// with init=0xFFFFFFFF but NO final XOR.
/// </summary>
public static uint AceCrc32(ReadOnlySpan<byte> data)
{
uint crc = 0xFFFFFFFFu;
foreach (byte b in data)
{
crc = (crc >> 8) ^ Crc32Table[(crc ^ b) & 0xFF];
}
return crc; // No final XOR for ACE
}
/// <summary>
/// ACE CRC-16 is the lower 16 bits of the ACE CRC-32.
/// </summary>
public static ushort AceCrc16(ReadOnlySpan<byte> data)
{
return (ushort)(AceCrc32(data) & 0xFFFF);
}
}
}

View File

@@ -1,68 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common.Ace.Headers;
namespace SharpCompress.Common.Ace
{
public class AceEntry : Entry
{
private readonly AceFilePart _filePart;
internal AceEntry(AceFilePart filePart)
{
_filePart = filePart;
}
public override long Crc
{
get
{
if (_filePart == null)
{
return 0;
}
return _filePart.Header.Crc32;
}
}
public override string? Key => _filePart?.Header.Filename;
public override string? LinkTarget => null;
public override long CompressedSize => _filePart?.Header.PackedSize ?? 0;
public override CompressionType CompressionType
{
get
{
if (_filePart.Header.CompressionType == Headers.CompressionType.Stored)
{
return CompressionType.None;
}
return CompressionType.AceLZ77;
}
}
public override long Size => _filePart?.Header.OriginalSize ?? 0;
public override DateTime? LastModifiedTime => _filePart.Header.DateTime;
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => _filePart.Header.IsFileEncrypted;
public override bool IsDirectory => _filePart.Header.IsDirectory;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
}
}

View File

@@ -1,52 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common.Ace.Headers;
using SharpCompress.IO;
namespace SharpCompress.Common.Ace
{
public class AceFilePart : FilePart
{
private readonly Stream _stream;
internal AceFileHeader Header { get; set; }
internal AceFilePart(AceFileHeader localAceHeader, Stream seekableStream)
: base(localAceHeader.ArchiveEncoding)
{
_stream = seekableStream;
Header = localAceHeader;
}
internal override string? FilePartName => Header.Filename;
internal override Stream GetCompressedStream()
{
if (_stream != null)
{
Stream compressedStream;
switch (Header.CompressionType)
{
case Headers.CompressionType.Stored:
compressedStream = new ReadOnlySubStream(
_stream,
Header.DataStartPosition,
Header.PackedSize
);
break;
default:
throw new NotSupportedException(
"CompressionMethod: " + Header.CompressionQuality
);
}
return compressedStream;
}
return _stream.NotNull();
}
internal override Stream? GetRawStream() => _stream;
}
}

View File

@@ -1,35 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common.Arj;
using SharpCompress.Readers;
namespace SharpCompress.Common.Ace
{
public class AceVolume : Volume
{
public AceVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
: base(stream, readerOptions, index) { }
public override bool IsFirstVolume
{
get { return true; }
}
/// <summary>
/// ArjArchive is part of a multi-part archive.
/// </summary>
public override bool IsMultiVolume
{
get { return false; }
}
internal IEnumerable<AceFilePart> GetVolumeFileParts()
{
return new List<AceFilePart>();
}
}
}

View File

@@ -1,171 +0,0 @@
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Xml.Linq;
using SharpCompress.Common.Arc;
namespace SharpCompress.Common.Ace.Headers
{
/// <summary>
/// ACE file entry header
/// </summary>
public sealed class AceFileHeader : AceHeader
{
public long DataStartPosition { get; private set; }
public long PackedSize { get; set; }
public long OriginalSize { get; set; }
public DateTime DateTime { get; set; }
public int Attributes { get; set; }
public uint Crc32 { get; set; }
public CompressionType CompressionType { get; set; }
public CompressionQuality CompressionQuality { get; set; }
public ushort Parameters { get; set; }
public string Filename { get; set; } = string.Empty;
public List<byte> Comment { get; set; } = new();
/// <summary>
/// File data offset in the archive
/// </summary>
public ulong DataOffset { get; set; }
public bool IsDirectory => (Attributes & 0x10) != 0;
public bool IsContinuedFromPrev =>
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.CONTINUED_PREV) != 0;
public bool IsContinuedToNext =>
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.CONTINUED_NEXT) != 0;
public int DictionarySize
{
get
{
int bits = Parameters & 0x0F;
return bits < 10 ? 1024 : 1 << bits;
}
}
public AceFileHeader(IArchiveEncoding archiveEncoding)
: base(archiveEncoding, AceHeaderType.FILE) { }
/// <summary>
/// Reads the next file entry header from the stream.
/// Returns null if no more entries or end of archive.
/// Supports both ACE 1.0 and ACE 2.0 formats.
/// </summary>
public override AceHeader? Read(Stream stream)
{
var headerData = ReadHeader(stream);
if (headerData.Length == 0)
{
return null;
}
int offset = 0;
// Header type (1 byte)
HeaderType = headerData[offset++];
// Skip recovery record headers (ACE 2.0 feature)
if (HeaderType == (byte)SharpCompress.Common.Ace.Headers.AceHeaderType.RECOVERY32)
{
// Skip to next header
return null;
}
if (HeaderType != (byte)SharpCompress.Common.Ace.Headers.AceHeaderType.FILE)
{
// Unknown header type - skip
return null;
}
// Header flags (2 bytes)
HeaderFlags = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Packed size (4 bytes)
PackedSize = BitConverter.ToUInt32(headerData, offset);
offset += 4;
// Original size (4 bytes)
OriginalSize = BitConverter.ToUInt32(headerData, offset);
offset += 4;
// File date/time in DOS format (4 bytes)
var dosDateTime = BitConverter.ToUInt32(headerData, offset);
DateTime = ConvertDosDateTime(dosDateTime);
offset += 4;
// File attributes (4 bytes)
Attributes = (int)BitConverter.ToUInt32(headerData, offset);
offset += 4;
// CRC32 (4 bytes)
Crc32 = BitConverter.ToUInt32(headerData, offset);
offset += 4;
// Compression type (1 byte)
byte compressionType = headerData[offset++];
CompressionType = GetCompressionType(compressionType);
// Compression quality/parameter (1 byte)
byte compressionQuality = headerData[offset++];
CompressionQuality = GetCompressionQuality(compressionQuality);
// Parameters (2 bytes)
Parameters = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Reserved (2 bytes) - skip
offset += 2;
// Filename length (2 bytes)
var filenameLength = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Filename
if (offset + filenameLength <= headerData.Length)
{
Filename = ArchiveEncoding.Decode(headerData, offset, filenameLength);
offset += filenameLength;
}
// Handle comment if present
if ((HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.COMMENT) != 0)
{
// Comment length (2 bytes)
if (offset + 2 <= headerData.Length)
{
ushort commentLength = BitConverter.ToUInt16(headerData, offset);
offset += 2 + commentLength; // Skip comment
}
}
// Store the data start position
DataStartPosition = stream.Position;
return this;
}
public CompressionType GetCompressionType(byte value) =>
value switch
{
0 => CompressionType.Stored,
1 => CompressionType.Lz77,
2 => CompressionType.Blocked,
_ => CompressionType.Unknown,
};
public CompressionQuality GetCompressionQuality(byte value) =>
value switch
{
0 => CompressionQuality.None,
1 => CompressionQuality.Fastest,
2 => CompressionQuality.Fast,
3 => CompressionQuality.Normal,
4 => CompressionQuality.Good,
5 => CompressionQuality.Best,
_ => CompressionQuality.Unknown,
};
}
}

View File

@@ -1,153 +0,0 @@
using System;
using System.IO;
using SharpCompress.Common.Arj.Headers;
using SharpCompress.Crypto;
namespace SharpCompress.Common.Ace.Headers
{
/// <summary>
/// Header type constants
/// </summary>
public enum AceHeaderType
{
MAIN = 0,
FILE = 1,
RECOVERY32 = 2,
RECOVERY64A = 3,
RECOVERY64B = 4,
}
public abstract class AceHeader
{
// ACE signature: bytes at offset 7 should be "**ACE**"
private static readonly byte[] AceSignature =
[
(byte)'*',
(byte)'*',
(byte)'A',
(byte)'C',
(byte)'E',
(byte)'*',
(byte)'*',
];
public AceHeader(IArchiveEncoding archiveEncoding, AceHeaderType type)
{
AceHeaderType = type;
ArchiveEncoding = archiveEncoding;
}
public IArchiveEncoding ArchiveEncoding { get; }
public AceHeaderType AceHeaderType { get; }
public ushort HeaderFlags { get; set; }
public ushort HeaderCrc { get; set; }
public ushort HeaderSize { get; set; }
public byte HeaderType { get; set; }
public bool IsFileEncrypted =>
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.FILE_ENCRYPTED) != 0;
public bool Is64Bit =>
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.MEMORY_64BIT) != 0;
public bool IsSolid =>
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.SOLID_MAIN) != 0;
public bool IsMultiVolume =>
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.MULTIVOLUME) != 0;
public abstract AceHeader? Read(Stream reader);
public byte[] ReadHeader(Stream stream)
{
// Read header CRC (2 bytes) and header size (2 bytes)
var headerBytes = new byte[4];
if (stream.Read(headerBytes, 0, 4) != 4)
{
return Array.Empty<byte>();
}
HeaderCrc = BitConverter.ToUInt16(headerBytes, 0); // CRC for validation
HeaderSize = BitConverter.ToUInt16(headerBytes, 2);
if (HeaderSize == 0)
{
return Array.Empty<byte>();
}
// Read the header data
var body = new byte[HeaderSize];
if (stream.Read(body, 0, HeaderSize) != HeaderSize)
{
return Array.Empty<byte>();
}
// Verify crc
var checksum = AceCrc.AceCrc16(body);
if (checksum != HeaderCrc)
{
throw new InvalidDataException("Header checksum is invalid");
}
return body;
}
public static bool IsArchive(Stream stream)
{
// ACE files have a specific signature
// First two bytes are typically 0x60 0xEA (signature bytes)
// At offset 7, there should be "**ACE**" (7 bytes)
var bytes = new byte[14];
if (stream.Read(bytes, 0, 14) != 14)
{
return false;
}
// Check for "**ACE**" at offset 7
return CheckMagicBytes(bytes, 7);
}
protected static bool CheckMagicBytes(byte[] headerBytes, int offset)
{
// Check for "**ACE**" at specified offset
for (int i = 0; i < AceSignature.Length; i++)
{
if (headerBytes[offset + i] != AceSignature[i])
{
return false;
}
}
return true;
}
protected DateTime ConvertDosDateTime(uint dosDateTime)
{
try
{
int second = (int)(dosDateTime & 0x1F) * 2;
int minute = (int)((dosDateTime >> 5) & 0x3F);
int hour = (int)((dosDateTime >> 11) & 0x1F);
int day = (int)((dosDateTime >> 16) & 0x1F);
int month = (int)((dosDateTime >> 21) & 0x0F);
int year = (int)((dosDateTime >> 25) & 0x7F) + 1980;
if (
day < 1
|| day > 31
|| month < 1
|| month > 12
|| hour > 23
|| minute > 59
|| second > 59
)
{
return DateTime.MinValue;
}
return new DateTime(year, month, day, hour, minute, second);
}
catch
{
return DateTime.MinValue;
}
}
}
}

View File

@@ -1,97 +0,0 @@
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Ace.Headers;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Crypto;
namespace SharpCompress.Common.Ace.Headers
{
/// <summary>
/// ACE main archive header
/// </summary>
public sealed class AceMainHeader : AceHeader
{
public byte ExtractVersion { get; set; }
public byte CreatorVersion { get; set; }
public HostOS HostOS { get; set; }
public byte VolumeNumber { get; set; }
public DateTime DateTime { get; set; }
public string Advert { get; set; } = string.Empty;
public List<byte> Comment { get; set; } = new();
public byte AceVersion { get; private set; }
public AceMainHeader(IArchiveEncoding archiveEncoding)
: base(archiveEncoding, AceHeaderType.MAIN) { }
/// <summary>
/// Reads the main archive header from the stream.
/// Returns header if this is a valid ACE archive.
/// Supports both ACE 1.0 and ACE 2.0 formats.
/// </summary>
public override AceHeader? Read(Stream stream)
{
var headerData = ReadHeader(stream);
if (headerData.Length == 0)
{
return null;
}
int offset = 0;
// Header type should be 0 for main header
if (headerData[offset++] != HeaderType)
{
return null;
}
// Header flags (2 bytes)
HeaderFlags = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Skip signature "**ACE**" (7 bytes)
if (!CheckMagicBytes(headerData, offset))
{
throw new InvalidDataException("Invalid ACE archive signature.");
}
offset += 7;
// ACE version (1 byte) - 10 for ACE 1.0, 20 for ACE 2.0
AceVersion = headerData[offset++];
ExtractVersion = headerData[offset++];
// Host OS (1 byte)
if (offset < headerData.Length)
{
var hostOsByte = headerData[offset++];
HostOS = hostOsByte <= 11 ? (HostOS)hostOsByte : HostOS.Unknown;
}
// Volume number (1 byte)
VolumeNumber = headerData[offset++];
// Creation date/time (4 bytes)
var dosDateTime = BitConverter.ToUInt32(headerData, offset);
DateTime = ConvertDosDateTime(dosDateTime);
offset += 4;
// Reserved fields (8 bytes)
if (offset + 8 <= headerData.Length)
{
offset += 8;
}
// Skip additional fields based on flags
// Handle comment if present
if ((HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.COMMENT) != 0)
{
if (offset + 2 <= headerData.Length)
{
ushort commentLength = BitConverter.ToUInt16(headerData, offset);
offset += 2 + commentLength;
}
}
return this;
}
}
}

View File

@@ -1,16 +0,0 @@
namespace SharpCompress.Common.Ace.Headers
{
/// <summary>
/// Compression quality
/// </summary>
public enum CompressionQuality
{
None,
Fastest,
Fast,
Normal,
Good,
Best,
Unknown,
}
}

View File

@@ -1,13 +0,0 @@
namespace SharpCompress.Common.Ace.Headers
{
/// <summary>
/// Compression types
/// </summary>
public enum CompressionType
{
Stored,
Lz77,
Blocked,
Unknown,
}
}

View File

@@ -1,33 +0,0 @@
namespace SharpCompress.Common.Ace.Headers
{
/// <summary>
/// Header flags (main + file, overlapping meanings)
/// </summary>
public static class HeaderFlags
{
// Shared / low bits
public const ushort ADDSIZE = 0x0001; // extra size field present
public const ushort COMMENT = 0x0002; // comment present
public const ushort MEMORY_64BIT = 0x0004;
public const ushort AV_STRING = 0x0008; // AV string present
public const ushort SOLID = 0x0010; // solid file
public const ushort LOCKED = 0x0020;
public const ushort PROTECTED = 0x0040;
// Main header specific
public const ushort V20FORMAT = 0x0100;
public const ushort SFX = 0x0200;
public const ushort LIMITSFXJR = 0x0400;
public const ushort MULTIVOLUME = 0x0800;
public const ushort ADVERT = 0x1000;
public const ushort RECOVERY = 0x2000;
public const ushort LOCKED_MAIN = 0x4000;
public const ushort SOLID_MAIN = 0x8000;
// File header specific (same bits, different meaning)
public const ushort NTSECURITY = 0x0400;
public const ushort CONTINUED_PREV = 0x1000;
public const ushort CONTINUED_NEXT = 0x2000;
public const ushort FILE_ENCRYPTED = 0x4000; // file encrypted (file header)
}
}

View File

@@ -1,22 +0,0 @@
namespace SharpCompress.Common.Ace.Headers
{
/// <summary>
/// Host OS type
/// </summary>
public enum HostOS
{
MsDos = 0,
Os2,
Windows,
Unix,
MacOs,
WinNt,
Primos,
AppleGs,
Atari,
Vax,
Amiga,
Next,
Unknown,
}
}

View File

@@ -1,60 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common.GZip;
using SharpCompress.Common.Tar;
namespace SharpCompress.Common.Arc
{
public class ArcEntry : Entry
{
private readonly ArcFilePart? _filePart;
internal ArcEntry(ArcFilePart? filePart)
{
_filePart = filePart;
}
public override long Crc
{
get
{
if (_filePart == null)
{
return 0;
}
return _filePart.Header.Crc16;
}
}
public override string? Key => _filePart?.Header.Name;
public override string? LinkTarget => null;
public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0;
public override CompressionType CompressionType =>
_filePart?.Header.CompressionMethod ?? CompressionType.Unknown;
public override long Size => throw new NotImplementedException();
public override DateTime? LastModifiedTime => null;
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => false;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
}
}

View File

@@ -1,76 +0,0 @@
using System;
using System.IO;
using System.Linq;
using System.Text;
namespace SharpCompress.Common.Arc
{
public class ArcEntryHeader
{
public IArchiveEncoding ArchiveEncoding { get; }
public CompressionType CompressionMethod { get; private set; }
public string? Name { get; private set; }
public long CompressedSize { get; private set; }
public DateTime DateTime { get; private set; }
public int Crc16 { get; private set; }
public long OriginalSize { get; private set; }
public long DataStartPosition { get; private set; }
public ArcEntryHeader(IArchiveEncoding archiveEncoding)
{
this.ArchiveEncoding = archiveEncoding;
}
public ArcEntryHeader? ReadHeader(Stream stream)
{
byte[] headerBytes = new byte[29];
if (stream.Read(headerBytes, 0, headerBytes.Length) != headerBytes.Length)
{
return null;
}
DataStartPosition = stream.Position;
return LoadFrom(headerBytes);
}
public ArcEntryHeader LoadFrom(byte[] headerBytes)
{
CompressionMethod = GetCompressionType(headerBytes[1]);
// Read name
int nameEnd = Array.IndexOf(headerBytes, (byte)0, 1); // Find null terminator
Name = Encoding.UTF8.GetString(headerBytes, 2, nameEnd > 0 ? nameEnd - 2 : 12);
int offset = 15;
CompressedSize = BitConverter.ToUInt32(headerBytes, offset);
offset += 4;
uint rawDateTime = BitConverter.ToUInt32(headerBytes, offset);
DateTime = ConvertToDateTime(rawDateTime);
offset += 4;
Crc16 = BitConverter.ToUInt16(headerBytes, offset);
offset += 2;
OriginalSize = BitConverter.ToUInt32(headerBytes, offset);
return this;
}
private CompressionType GetCompressionType(byte value)
{
return value switch
{
1 or 2 => CompressionType.None,
3 => CompressionType.Packed,
4 => CompressionType.Squeezed,
5 or 6 or 7 or 8 => CompressionType.Crunched,
9 => CompressionType.Squashed,
10 => CompressionType.Crushed,
11 => CompressionType.Distilled,
_ => CompressionType.Unknown,
};
}
public static DateTime ConvertToDateTime(long rawDateTime)
{
// Convert Unix timestamp to DateTime (UTC)
return DateTimeOffset.FromUnixTimeSeconds(rawDateTime).UtcDateTime;
}
}
}

View File

@@ -1,83 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common.GZip;
using SharpCompress.Common.Tar;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors.Lzw;
using SharpCompress.Compressors.RLE90;
using SharpCompress.Compressors.Squeezed;
using SharpCompress.IO;
namespace SharpCompress.Common.Arc
{
public class ArcFilePart : FilePart
{
private readonly Stream? _stream;
internal ArcFilePart(ArcEntryHeader localArcHeader, Stream? seekableStream)
: base(localArcHeader.ArchiveEncoding)
{
_stream = seekableStream;
Header = localArcHeader;
}
internal ArcEntryHeader Header { get; set; }
internal override string? FilePartName => Header.Name;
internal override Stream GetCompressedStream()
{
if (_stream != null)
{
Stream compressedStream;
switch (Header.CompressionMethod)
{
case CompressionType.None:
compressedStream = new ReadOnlySubStream(
_stream,
Header.DataStartPosition,
Header.CompressedSize
);
break;
case CompressionType.Packed:
compressedStream = new RunLength90Stream(
_stream,
(int)Header.CompressedSize
);
break;
case CompressionType.Squeezed:
compressedStream = new SqueezeStream(_stream, (int)Header.CompressedSize);
break;
case CompressionType.Crunched:
if (Header.OriginalSize > 128 * 1024)
{
throw new NotSupportedException(
"CompressionMethod: "
+ Header.CompressionMethod
+ " with size > 128KB"
);
}
compressedStream = new ArcLzwStream(
_stream,
(int)Header.CompressedSize,
true
);
break;
default:
throw new NotSupportedException(
"CompressionMethod: " + Header.CompressionMethod
);
}
return compressedStream;
}
return _stream.NotNull();
}
internal override Stream? GetRawStream() => _stream;
}
}

Some files were not shown because too many files have changed in this diff Show More