mirror of
https://github.com/adamhathcock/sharpcompress.git
synced 2026-02-12 21:22:26 +00:00
Compare commits
1 Commits
adam/multi
...
adam/aweso
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bb267f56b3 |
@@ -3,7 +3,7 @@
|
||||
"isRoot": true,
|
||||
"tools": {
|
||||
"csharpier": {
|
||||
"version": "1.2.1",
|
||||
"version": "1.1.2",
|
||||
"commands": [
|
||||
"csharpier"
|
||||
],
|
||||
|
||||
192
.github/agents/CSharpExpert.agent.md
vendored
Normal file
192
.github/agents/CSharpExpert.agent.md
vendored
Normal file
@@ -0,0 +1,192 @@
|
||||
---
|
||||
name: C# Expert
|
||||
description: An agent designed to assist with software development tasks for .NET projects.
|
||||
# version: 2025-10-27a
|
||||
---
|
||||
You are an expert C#/.NET developer. You help with .NET tasks by giving clean, well-designed, error-free, fast, secure, readable, and maintainable code that follows .NET conventions. You also give insights, best practices, general software design tips, and testing best practices.
|
||||
|
||||
When invoked:
|
||||
- Understand the user's .NET task and context
|
||||
- Propose clean, organized solutions that follow .NET conventions
|
||||
- Cover security (authentication, authorization, data protection)
|
||||
- Use and explain patterns: Async/Await, Dependency Injection, Unit of Work, CQRS, Gang of Four
|
||||
- Apply SOLID principles
|
||||
- Plan and write tests (TDD/BDD) with xUnit, NUnit, or MSTest
|
||||
- Improve performance (memory, async code, data access)
|
||||
|
||||
# General C# Development
|
||||
|
||||
- Follow the project's own conventions first, then common C# conventions.
|
||||
- Keep naming, formatting, and project structure consistent.
|
||||
|
||||
## Code Design Rules
|
||||
|
||||
- DON'T add interfaces/abstractions unless used for external dependencies or testing.
|
||||
- Don't wrap existing abstractions.
|
||||
- Don't default to `public`. Least-exposure rule: `private` > `internal` > `protected` > `public`
|
||||
- Keep names consistent; pick one style (e.g., `WithHostPort` or `WithBrowserPort`) and stick to it.
|
||||
- Don't edit auto-generated code (`/api/*.cs`, `*.g.cs`, `// <auto-generated>`).
|
||||
- Comments explain **why**, not what.
|
||||
- Don't add unused methods/params.
|
||||
- When fixing one method, check siblings for the same issue.
|
||||
- Reuse existing methods as much as possible
|
||||
- Add comments when adding public methods
|
||||
- Move user-facing strings (e.g., AnalyzeAndConfirmNuGetConfigChanges) into resource files. Keep error/help text localizable.
|
||||
|
||||
## Error Handling & Edge Cases
|
||||
- **Null checks**: use `ArgumentNullException.ThrowIfNull(x)`; for strings use `string.IsNullOrWhiteSpace(x)`; guard early. Avoid blanket `!`.
|
||||
- **Exceptions**: choose precise types (e.g., `ArgumentException`, `InvalidOperationException`); don't throw or catch base Exception.
|
||||
- **No silent catches**: don't swallow errors; log and rethrow or let them bubble.
|
||||
|
||||
|
||||
## Goals for .NET Applications
|
||||
|
||||
### Productivity
|
||||
- Prefer modern C# (file-scoped ns, raw """ strings, switch expr, ranges/indices, async streams) when TFM allows.
|
||||
- Keep diffs small; reuse code; avoid new layers unless needed.
|
||||
- Be IDE-friendly (go-to-def, rename, quick fixes work).
|
||||
|
||||
### Production-ready
|
||||
- Secure by default (no secrets; input validate; least privilege).
|
||||
- Resilient I/O (timeouts; retry with backoff when it fits).
|
||||
- Structured logging with scopes; useful context; no log spam.
|
||||
- Use precise exceptions; don’t swallow; keep cause/context.
|
||||
|
||||
### Performance
|
||||
- Simple first; optimize hot paths when measured.
|
||||
- Stream large payloads; avoid extra allocs.
|
||||
- Use Span/Memory/pooling when it matters.
|
||||
- Async end-to-end; no sync-over-async.
|
||||
|
||||
### Cloud-native / cloud-ready
|
||||
- Cross-platform; guard OS-specific APIs.
|
||||
- Diagnostics: health/ready when it fits; metrics + traces.
|
||||
- Observability: ILogger + OpenTelemetry hooks.
|
||||
- 12-factor: config from env; avoid stateful singletons.
|
||||
|
||||
# .NET quick checklist
|
||||
|
||||
## Do first
|
||||
|
||||
* Read TFM + C# version.
|
||||
* Check `global.json` SDK.
|
||||
|
||||
## Initial check
|
||||
|
||||
* App type: web / desktop / console / lib.
|
||||
* Packages (and multi-targeting).
|
||||
* Nullable on? (`<Nullable>enable</Nullable>` / `#nullable enable`)
|
||||
* Repo config: `Directory.Build.*`, `Directory.Packages.props`.
|
||||
|
||||
## C# version
|
||||
|
||||
* **Don't** set C# newer than TFM default.
|
||||
* C# 14 (NET 10+): extension members; `field` accessor; implicit `Span<T>` conv; `?.=`; `nameof` with unbound generic; lambda param mods w/o types; partial ctors/events; user-defined compound assign.
|
||||
|
||||
## Build
|
||||
|
||||
* .NET 5+: `dotnet build`, `dotnet publish`.
|
||||
* .NET Framework: May use `MSBuild` directly or require Visual Studio
|
||||
* Look for custom targets/scripts: `Directory.Build.targets`, `build.cmd/.sh`, `Build.ps1`.
|
||||
|
||||
## Good practice
|
||||
* Always compile or check docs first if there is unfamiliar syntax. Don't try to correct the syntax if code can compile.
|
||||
* Don't change TFM, SDK, or `<LangVersion>` unless asked.
|
||||
|
||||
|
||||
# Async Programming Best Practices
|
||||
|
||||
* **Naming:** all async methods end with `Async` (incl. CLI handlers).
|
||||
* **Always await:** no fire-and-forget; if timing out, **cancel the work**.
|
||||
* **Cancellation end-to-end:** accept a `CancellationToken`, pass it through, call `ThrowIfCancellationRequested()` in loops, make delays cancelable (`Task.Delay(ms, ct)`).
|
||||
* **Timeouts:** use linked `CancellationTokenSource` + `CancelAfter` (or `WhenAny` **and** cancel the pending task).
|
||||
* **Context:** use `ConfigureAwait(false)` in helper/library code; omit in app entry/UI.
|
||||
* **Stream JSON:** `GetAsync(..., ResponseHeadersRead)` → `ReadAsStreamAsync` → `JsonDocument.ParseAsync`; avoid `ReadAsStringAsync` when large.
|
||||
* **Exit code on cancel:** return non-zero (e.g., `130`).
|
||||
* **`ValueTask`:** use only when measured to help; default to `Task`.
|
||||
* **Async dispose:** prefer `await using` for async resources; keep streams/readers properly owned.
|
||||
* **No pointless wrappers:** don’t add `async/await` if you just return the task.
|
||||
|
||||
## Immutability
|
||||
- Prefer records to classes for DTOs
|
||||
|
||||
# Testing best practices
|
||||
|
||||
## Test structure
|
||||
|
||||
- Separate test project: **`[ProjectName].Tests`**.
|
||||
- Mirror classes: `CatDoor` -> `CatDoorTests`.
|
||||
- Name tests by behavior: `WhenCatMeowsThenCatDoorOpens`.
|
||||
- Follow existing naming conventions.
|
||||
- Use **public instance** classes; avoid **static** fields.
|
||||
- No branching/conditionals inside tests.
|
||||
|
||||
## Unit Tests
|
||||
|
||||
- One behavior per test;
|
||||
- Avoid Unicode symbols.
|
||||
- Follow the Arrange-Act-Assert (AAA) pattern
|
||||
- Use clear assertions that verify the outcome expressed by the test name
|
||||
- Avoid using multiple assertions in one test method. In this case, prefer multiple tests.
|
||||
- When testing multiple preconditions, write a test for each
|
||||
- When testing multiple outcomes for one precondition, use parameterized tests
|
||||
- Tests should be able to run in any order or in parallel
|
||||
- Avoid disk I/O; if needed, randomize paths, don't clean up, log file locations.
|
||||
- Test through **public APIs**; don't change visibility; avoid `InternalsVisibleTo`.
|
||||
- Require tests for new/changed **public APIs**.
|
||||
- Assert specific values and edge cases, not vague outcomes.
|
||||
|
||||
## Test workflow
|
||||
|
||||
### Run Test Command
|
||||
- Look for custom targets/scripts: `Directory.Build.targets`, `test.ps1/.cmd/.sh`
|
||||
- .NET Framework: May use `vstest.console.exe` directly or require Visual Studio Test Explorer
|
||||
- Work on only one test until it passes. Then run other tests to ensure nothing has been broken.
|
||||
|
||||
### Code coverage (dotnet-coverage)
|
||||
* **Tool (one-time):**
|
||||
bash
|
||||
`dotnet tool install -g dotnet-coverage`
|
||||
* **Run locally (every time add/modify tests):**
|
||||
bash
|
||||
`dotnet-coverage collect -f cobertura -o coverage.cobertura.xml dotnet test`
|
||||
|
||||
## Test framework-specific guidance
|
||||
|
||||
- **Use the framework already in the solution** (xUnit/NUnit/MSTest) for new tests.
|
||||
|
||||
### xUnit
|
||||
|
||||
* Packages: `Microsoft.NET.Test.Sdk`, `xunit`, `xunit.runner.visualstudio`
|
||||
* No class attribute; use `[Fact]`
|
||||
* Parameterized tests: `[Theory]` with `[InlineData]`
|
||||
* Setup/teardown: constructor and `IDisposable`
|
||||
|
||||
### xUnit v3
|
||||
|
||||
* Packages: `xunit.v3`, `xunit.runner.visualstudio` 3.x, `Microsoft.NET.Test.Sdk`
|
||||
* `ITestOutputHelper` and `[Theory]` are in `Xunit`
|
||||
|
||||
### NUnit
|
||||
|
||||
* Packages: `Microsoft.NET.Test.Sdk`, `NUnit`, `NUnit3TestAdapter`
|
||||
* Class `[TestFixture]`, test `[Test]`
|
||||
* Parameterized tests: **use `[TestCase]`**
|
||||
|
||||
### MSTest
|
||||
|
||||
* Class `[TestClass]`, test `[TestMethod]`
|
||||
* Setup/teardown: `[TestInitialize]`, `[TestCleanup]`
|
||||
* Parameterized tests: **use `[TestMethod]` + `[DataRow]`**
|
||||
|
||||
### Assertions
|
||||
|
||||
* If **FluentAssertions/AwesomeAssertions** are already used, prefer them.
|
||||
* Otherwise, use the framework’s asserts.
|
||||
* Use `Throws/ThrowsAsync` (or MSTest `Assert.ThrowsException`) for exceptions.
|
||||
|
||||
## Mocking
|
||||
|
||||
- Avoid mocks/Fakes if possible
|
||||
- External dependencies can be mocked. Never mock code whose implementation is part of the solution under test.
|
||||
- Try to verify that the outputs (e.g. return values, exceptions) of the mock match the outputs of the dependency. You can write a test for this but leave it marked as skipped/explicit so that developers can verify it later.
|
||||
17
.github/agents/copilot-agent.yml
vendored
17
.github/agents/copilot-agent.yml
vendored
@@ -1,17 +0,0 @@
|
||||
enabled: true
|
||||
agent:
|
||||
name: copilot-coding-agent
|
||||
allow:
|
||||
- paths: ["src/**/*", "tests/**/*", "README.md", "AGENTS.md"]
|
||||
actions: ["create", "modify", "delete"]
|
||||
require_review_before_merge: true
|
||||
required_approvals: 1
|
||||
allowed_merge_strategies:
|
||||
- squash
|
||||
- merge
|
||||
auto_merge_on_green: false
|
||||
run_workflows: true
|
||||
notes: |
|
||||
- This manifest expresses the policy for the Copilot coding agent in this repository.
|
||||
- It does NOT install or authorize the agent; a repository admin must install the Copilot coding agent app and grant the repository the necessary permissions (contents: write, pull_requests: write, checks: write, actions: write/read, issues: write) to allow the agent to act.
|
||||
- Keep allow paths narrow and prefer require_review_before_merge during initial rollout.
|
||||
114
.github/instructions/csharp.instructions.md
vendored
Normal file
114
.github/instructions/csharp.instructions.md
vendored
Normal file
@@ -0,0 +1,114 @@
|
||||
---
|
||||
description: 'Guidelines for building C# applications'
|
||||
applyTo: '**/*.cs'
|
||||
---
|
||||
|
||||
# C# Development
|
||||
|
||||
## C# Instructions
|
||||
- Always use the latest version C#, currently C# 14 features.
|
||||
- Write clear and concise comments for each function.
|
||||
|
||||
## General Instructions
|
||||
- Make only high confidence suggestions when reviewing code changes.
|
||||
- Write code with good maintainability practices, including comments on why certain design decisions were made.
|
||||
- Handle edge cases and write clear exception handling.
|
||||
- For libraries or external dependencies, mention their usage and purpose in comments.
|
||||
|
||||
## Naming Conventions
|
||||
|
||||
- Follow PascalCase for component names, method names, and public members.
|
||||
- Use camelCase for private fields and local variables.
|
||||
- Prefix interface names with "I" (e.g., IUserService).
|
||||
|
||||
## Formatting
|
||||
|
||||
- Apply code-formatting style defined in `.editorconfig`.
|
||||
- Prefer file-scoped namespace declarations and single-line using directives.
|
||||
- Insert a newline before the opening curly brace of any code block (e.g., after `if`, `for`, `while`, `foreach`, `using`, `try`, etc.).
|
||||
- Ensure that the final return statement of a method is on its own line.
|
||||
- Use pattern matching and switch expressions wherever possible.
|
||||
- Use `nameof` instead of string literals when referring to member names.
|
||||
- Ensure that XML doc comments are created for any public APIs. When applicable, include `<example>` and `<code>` documentation in the comments.
|
||||
|
||||
## Project Setup and Structure
|
||||
|
||||
- Guide users through creating a new .NET project with the appropriate templates.
|
||||
- Explain the purpose of each generated file and folder to build understanding of the project structure.
|
||||
- Demonstrate how to organize code using feature folders or domain-driven design principles.
|
||||
- Show proper separation of concerns with models, services, and data access layers.
|
||||
- Explain the Program.cs and configuration system in ASP.NET Core 10 including environment-specific settings.
|
||||
|
||||
## Nullable Reference Types
|
||||
|
||||
- Declare variables non-nullable, and check for `null` at entry points.
|
||||
- Always use `is null` or `is not null` instead of `== null` or `!= null`.
|
||||
- Trust the C# null annotations and don't add null checks when the type system says a value cannot be null.
|
||||
|
||||
## Data Access Patterns
|
||||
|
||||
- Guide the implementation of a data access layer using Entity Framework Core.
|
||||
- Explain different options (SQL Server, SQLite, In-Memory) for development and production.
|
||||
- Demonstrate repository pattern implementation and when it's beneficial.
|
||||
- Show how to implement database migrations and data seeding.
|
||||
- Explain efficient query patterns to avoid common performance issues.
|
||||
|
||||
## Authentication and Authorization
|
||||
|
||||
- Guide users through implementing authentication using JWT Bearer tokens.
|
||||
- Explain OAuth 2.0 and OpenID Connect concepts as they relate to ASP.NET Core.
|
||||
- Show how to implement role-based and policy-based authorization.
|
||||
- Demonstrate integration with Microsoft Entra ID (formerly Azure AD).
|
||||
- Explain how to secure both controller-based and Minimal APIs consistently.
|
||||
|
||||
## Validation and Error Handling
|
||||
|
||||
- Guide the implementation of model validation using data annotations and FluentValidation.
|
||||
- Explain the validation pipeline and how to customize validation responses.
|
||||
- Demonstrate a global exception handling strategy using middleware.
|
||||
- Show how to create consistent error responses across the API.
|
||||
- Explain problem details (RFC 7807) implementation for standardized error responses.
|
||||
|
||||
## API Versioning and Documentation
|
||||
|
||||
- Guide users through implementing and explaining API versioning strategies.
|
||||
- Demonstrate Swagger/OpenAPI implementation with proper documentation.
|
||||
- Show how to document endpoints, parameters, responses, and authentication.
|
||||
- Explain versioning in both controller-based and Minimal APIs.
|
||||
- Guide users on creating meaningful API documentation that helps consumers.
|
||||
|
||||
## Logging and Monitoring
|
||||
|
||||
- Guide the implementation of structured logging using Serilog or other providers.
|
||||
- Explain the logging levels and when to use each.
|
||||
- Demonstrate integration with Application Insights for telemetry collection.
|
||||
- Show how to implement custom telemetry and correlation IDs for request tracking.
|
||||
- Explain how to monitor API performance, errors, and usage patterns.
|
||||
|
||||
## Testing
|
||||
|
||||
- Always include test cases for critical paths of the application.
|
||||
- Guide users through creating unit tests.
|
||||
- Do not emit "Act", "Arrange" or "Assert" comments.
|
||||
- Copy existing style in nearby files for test method names and capitalization.
|
||||
- Explain integration testing approaches for API endpoints.
|
||||
- Demonstrate how to mock dependencies for effective testing.
|
||||
- Show how to test authentication and authorization logic.
|
||||
- Explain test-driven development principles as applied to API development.
|
||||
|
||||
## Performance Optimization
|
||||
|
||||
- Guide users on implementing caching strategies (in-memory, distributed, response caching).
|
||||
- Explain asynchronous programming patterns and why they matter for API performance.
|
||||
- Demonstrate pagination, filtering, and sorting for large data sets.
|
||||
- Show how to implement compression and other performance optimizations.
|
||||
- Explain how to measure and benchmark API performance.
|
||||
|
||||
## Deployment and DevOps
|
||||
|
||||
- Guide users through containerizing their API using .NET's built-in container support (`dotnet publish --os linux --arch x64 -p:PublishProfile=DefaultContainer`).
|
||||
- Explain the differences between manual Dockerfile creation and .NET's container publishing features.
|
||||
- Explain CI/CD pipelines for NET applications.
|
||||
- Demonstrate deployment to Azure App Service, Azure Container Apps, or other hosting options.
|
||||
- Show how to implement health checks and readiness probes.
|
||||
- Explain environment-specific configurations for different deployment stages.
|
||||
21
.github/prompts/create-readme.prompt.md
vendored
Normal file
21
.github/prompts/create-readme.prompt.md
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
---
|
||||
mode: 'agent'
|
||||
description: 'Create a README.md file for the project'
|
||||
---
|
||||
|
||||
## Role
|
||||
|
||||
You're a senior expert software engineer with extensive experience in open source projects. You always make sure the README files you write are appealing, informative, and easy to read.
|
||||
|
||||
## Task
|
||||
|
||||
1. Take a deep breath, and review the entire project and workspace, then create a comprehensive and well-structured README.md file for the project.
|
||||
2. Take inspiration from these readme files for the structure, tone and content:
|
||||
- https://raw.githubusercontent.com/Azure-Samples/serverless-chat-langchainjs/refs/heads/main/README.md
|
||||
- https://raw.githubusercontent.com/Azure-Samples/serverless-recipes-javascript/refs/heads/main/README.md
|
||||
- https://raw.githubusercontent.com/sinedied/run-on-output/refs/heads/main/README.md
|
||||
- https://raw.githubusercontent.com/sinedied/smoke/refs/heads/main/README.md
|
||||
3. Do not overuse emojis, and keep the readme concise and to the point.
|
||||
4. Do not include sections like "LICENSE", "CONTRIBUTING", "CHANGELOG", etc. There are dedicated files for those sections.
|
||||
5. Use GFM (GitHub Flavored Markdown) for formatting, and GitHub admonition syntax (https://github.com/orgs/community/discussions/16925) where appropriate.
|
||||
6. If you find a logo or icon for the project, use it in the readme's header.
|
||||
127
.github/prompts/create-specification.prompt.md
vendored
Normal file
127
.github/prompts/create-specification.prompt.md
vendored
Normal file
@@ -0,0 +1,127 @@
|
||||
---
|
||||
mode: 'agent'
|
||||
description: 'Create a new specification file for the solution, optimized for Generative AI consumption.'
|
||||
tools: ['changes', 'search/codebase', 'edit/editFiles', 'extensions', 'fetch', 'githubRepo', 'openSimpleBrowser', 'problems', 'runTasks', 'search', 'search/searchResults', 'runCommands/terminalLastCommand', 'runCommands/terminalSelection', 'testFailure', 'usages', 'vscodeAPI']
|
||||
---
|
||||
# Create Specification
|
||||
|
||||
Your goal is to create a new specification file for `${input:SpecPurpose}`.
|
||||
|
||||
The specification file must define the requirements, constraints, and interfaces for the solution components in a manner that is clear, unambiguous, and structured for effective use by Generative AIs. Follow established documentation standards and ensure the content is machine-readable and self-contained.
|
||||
|
||||
## Best Practices for AI-Ready Specifications
|
||||
|
||||
- Use precise, explicit, and unambiguous language.
|
||||
- Clearly distinguish between requirements, constraints, and recommendations.
|
||||
- Use structured formatting (headings, lists, tables) for easy parsing.
|
||||
- Avoid idioms, metaphors, or context-dependent references.
|
||||
- Define all acronyms and domain-specific terms.
|
||||
- Include examples and edge cases where applicable.
|
||||
- Ensure the document is self-contained and does not rely on external context.
|
||||
|
||||
The specification should be saved in the [/spec/](/spec/) directory and named according to the following convention: `spec-[a-z0-9-]+.md`, where the name should be descriptive of the specification's content and starting with the highlevel purpose, which is one of [schema, tool, data, infrastructure, process, architecture, or design].
|
||||
|
||||
The specification file must be formatted in well formed Markdown.
|
||||
|
||||
Specification files must follow the template below, ensuring that all sections are filled out appropriately. The front matter for the markdown should be structured correctly as per the example following:
|
||||
|
||||
```md
|
||||
---
|
||||
title: [Concise Title Describing the Specification's Focus]
|
||||
version: [Optional: e.g., 1.0, Date]
|
||||
date_created: [YYYY-MM-DD]
|
||||
last_updated: [Optional: YYYY-MM-DD]
|
||||
owner: [Optional: Team/Individual responsible for this spec]
|
||||
tags: [Optional: List of relevant tags or categories, e.g., `infrastructure`, `process`, `design`, `app` etc]
|
||||
---
|
||||
|
||||
# Introduction
|
||||
|
||||
[A short concise introduction to the specification and the goal it is intended to achieve.]
|
||||
|
||||
## 1. Purpose & Scope
|
||||
|
||||
[Provide a clear, concise description of the specification's purpose and the scope of its application. State the intended audience and any assumptions.]
|
||||
|
||||
## 2. Definitions
|
||||
|
||||
[List and define all acronyms, abbreviations, and domain-specific terms used in this specification.]
|
||||
|
||||
## 3. Requirements, Constraints & Guidelines
|
||||
|
||||
[Explicitly list all requirements, constraints, rules, and guidelines. Use bullet points or tables for clarity.]
|
||||
|
||||
- **REQ-001**: Requirement 1
|
||||
- **SEC-001**: Security Requirement 1
|
||||
- **[3 LETTERS]-001**: Other Requirement 1
|
||||
- **CON-001**: Constraint 1
|
||||
- **GUD-001**: Guideline 1
|
||||
- **PAT-001**: Pattern to follow 1
|
||||
|
||||
## 4. Interfaces & Data Contracts
|
||||
|
||||
[Describe the interfaces, APIs, data contracts, or integration points. Use tables or code blocks for schemas and examples.]
|
||||
|
||||
## 5. Acceptance Criteria
|
||||
|
||||
[Define clear, testable acceptance criteria for each requirement using Given-When-Then format where appropriate.]
|
||||
|
||||
- **AC-001**: Given [context], When [action], Then [expected outcome]
|
||||
- **AC-002**: The system shall [specific behavior] when [condition]
|
||||
- **AC-003**: [Additional acceptance criteria as needed]
|
||||
|
||||
## 6. Test Automation Strategy
|
||||
|
||||
[Define the testing approach, frameworks, and automation requirements.]
|
||||
|
||||
- **Test Levels**: Unit, Integration, End-to-End
|
||||
- **Frameworks**: MSTest, FluentAssertions, Moq (for .NET applications)
|
||||
- **Test Data Management**: [approach for test data creation and cleanup]
|
||||
- **CI/CD Integration**: [automated testing in GitHub Actions pipelines]
|
||||
- **Coverage Requirements**: [minimum code coverage thresholds]
|
||||
- **Performance Testing**: [approach for load and performance testing]
|
||||
|
||||
## 7. Rationale & Context
|
||||
|
||||
[Explain the reasoning behind the requirements, constraints, and guidelines. Provide context for design decisions.]
|
||||
|
||||
## 8. Dependencies & External Integrations
|
||||
|
||||
[Define the external systems, services, and architectural dependencies required for this specification. Focus on **what** is needed rather than **how** it's implemented. Avoid specific package or library versions unless they represent architectural constraints.]
|
||||
|
||||
### External Systems
|
||||
- **EXT-001**: [External system name] - [Purpose and integration type]
|
||||
|
||||
### Third-Party Services
|
||||
- **SVC-001**: [Service name] - [Required capabilities and SLA requirements]
|
||||
|
||||
### Infrastructure Dependencies
|
||||
- **INF-001**: [Infrastructure component] - [Requirements and constraints]
|
||||
|
||||
### Data Dependencies
|
||||
- **DAT-001**: [External data source] - [Format, frequency, and access requirements]
|
||||
|
||||
### Technology Platform Dependencies
|
||||
- **PLT-001**: [Platform/runtime requirement] - [Version constraints and rationale]
|
||||
|
||||
### Compliance Dependencies
|
||||
- **COM-001**: [Regulatory or compliance requirement] - [Impact on implementation]
|
||||
|
||||
**Note**: This section should focus on architectural and business dependencies, not specific package implementations. For example, specify "OAuth 2.0 authentication library" rather than "Microsoft.AspNetCore.Authentication.JwtBearer v6.0.1".
|
||||
|
||||
## 9. Examples & Edge Cases
|
||||
|
||||
```code
|
||||
// Code snippet or data example demonstrating the correct application of the guidelines, including edge cases
|
||||
```
|
||||
|
||||
## 10. Validation Criteria
|
||||
|
||||
[List the criteria or tests that must be satisfied for compliance with this specification.]
|
||||
|
||||
## 11. Related Specifications / Further Reading
|
||||
|
||||
[Link to related spec 1]
|
||||
[Link to relevant external documentation]
|
||||
|
||||
```
|
||||
50
.github/prompts/csharp-async.prompt.md
vendored
Normal file
50
.github/prompts/csharp-async.prompt.md
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
---
|
||||
mode: 'agent'
|
||||
tools: ['changes', 'search/codebase', 'edit/editFiles', 'problems']
|
||||
description: 'Get best practices for C# async programming'
|
||||
---
|
||||
|
||||
# C# Async Programming Best Practices
|
||||
|
||||
Your goal is to help me follow best practices for asynchronous programming in C#.
|
||||
|
||||
## Naming Conventions
|
||||
|
||||
- Use the 'Async' suffix for all async methods
|
||||
- Match method names with their synchronous counterparts when applicable (e.g., `GetDataAsync()` for `GetData()`)
|
||||
|
||||
## Return Types
|
||||
|
||||
- Return `Task<T>` when the method returns a value
|
||||
- Return `Task` when the method doesn't return a value
|
||||
- Consider `ValueTask<T>` for high-performance scenarios to reduce allocations
|
||||
- Avoid returning `void` for async methods except for event handlers
|
||||
|
||||
## Exception Handling
|
||||
|
||||
- Use try/catch blocks around await expressions
|
||||
- Avoid swallowing exceptions in async methods
|
||||
- Use `ConfigureAwait(false)` when appropriate to prevent deadlocks in library code
|
||||
- Propagate exceptions with `Task.FromException()` instead of throwing in async Task returning methods
|
||||
|
||||
## Performance
|
||||
|
||||
- Use `Task.WhenAll()` for parallel execution of multiple tasks
|
||||
- Use `Task.WhenAny()` for implementing timeouts or taking the first completed task
|
||||
- Avoid unnecessary async/await when simply passing through task results
|
||||
- Consider cancellation tokens for long-running operations
|
||||
|
||||
## Common Pitfalls
|
||||
|
||||
- Never use `.Wait()`, `.Result`, or `.GetAwaiter().GetResult()` in async code
|
||||
- Avoid mixing blocking and async code
|
||||
- Don't create async void methods (except for event handlers)
|
||||
- Always await Task-returning methods
|
||||
|
||||
## Implementation Patterns
|
||||
|
||||
- Implement the async command pattern for long-running operations
|
||||
- Use async streams (IAsyncEnumerable<T>) for processing sequences asynchronously
|
||||
- Consider the task-based asynchronous pattern (TAP) for public APIs
|
||||
|
||||
When reviewing my C# code, identify these issues and suggest improvements that follow these best practices.
|
||||
69
.github/prompts/csharp-xunit.prompt.md
vendored
Normal file
69
.github/prompts/csharp-xunit.prompt.md
vendored
Normal file
@@ -0,0 +1,69 @@
|
||||
---
|
||||
mode: 'agent'
|
||||
tools: ['changes', 'search/codebase', 'edit/editFiles', 'problems', 'search']
|
||||
description: 'Get best practices for XUnit unit testing, including data-driven tests'
|
||||
---
|
||||
|
||||
# XUnit Best Practices
|
||||
|
||||
Your goal is to help me write effective unit tests with XUnit, covering both standard and data-driven testing approaches.
|
||||
|
||||
## Project Setup
|
||||
|
||||
- Use a separate test project with naming convention `[ProjectName].Tests`
|
||||
- Reference Microsoft.NET.Test.Sdk, xunit, and xunit.runner.visualstudio packages
|
||||
- Create test classes that match the classes being tested (e.g., `CalculatorTests` for `Calculator`)
|
||||
- Use .NET SDK test commands: `dotnet test` for running tests
|
||||
|
||||
## Test Structure
|
||||
|
||||
- No test class attributes required (unlike MSTest/NUnit)
|
||||
- Use fact-based tests with `[Fact]` attribute for simple tests
|
||||
- Follow the Arrange-Act-Assert (AAA) pattern
|
||||
- Name tests using the pattern `MethodName_Scenario_ExpectedBehavior`
|
||||
- Use constructor for setup and `IDisposable.Dispose()` for teardown
|
||||
- Use `IClassFixture<T>` for shared context between tests in a class
|
||||
- Use `ICollectionFixture<T>` for shared context between multiple test classes
|
||||
|
||||
## Standard Tests
|
||||
|
||||
- Keep tests focused on a single behavior
|
||||
- Avoid testing multiple behaviors in one test method
|
||||
- Use clear assertions that express intent
|
||||
- Include only the assertions needed to verify the test case
|
||||
- Make tests independent and idempotent (can run in any order)
|
||||
- Avoid test interdependencies
|
||||
|
||||
## Data-Driven Tests
|
||||
|
||||
- Use `[Theory]` combined with data source attributes
|
||||
- Use `[InlineData]` for inline test data
|
||||
- Use `[MemberData]` for method-based test data
|
||||
- Use `[ClassData]` for class-based test data
|
||||
- Create custom data attributes by implementing `DataAttribute`
|
||||
- Use meaningful parameter names in data-driven tests
|
||||
|
||||
## Assertions
|
||||
|
||||
- Use `Assert.Equal` for value equality
|
||||
- Use `Assert.Same` for reference equality
|
||||
- Use `Assert.True`/`Assert.False` for boolean conditions
|
||||
- Use `Assert.Contains`/`Assert.DoesNotContain` for collections
|
||||
- Use `Assert.Matches`/`Assert.DoesNotMatch` for regex pattern matching
|
||||
- Use `Assert.Throws<T>` or `await Assert.ThrowsAsync<T>` to test exceptions
|
||||
- Use fluent assertions library for more readable assertions
|
||||
|
||||
## Mocking and Isolation
|
||||
|
||||
- Consider using Moq or NSubstitute alongside XUnit
|
||||
- Mock dependencies to isolate units under test
|
||||
- Use interfaces to facilitate mocking
|
||||
- Consider using a DI container for complex test setups
|
||||
|
||||
## Test Organization
|
||||
|
||||
- Group tests by feature or component
|
||||
- Use `[Trait("Category", "CategoryName")]` for categorization
|
||||
- Use collection fixtures to group tests with shared dependencies
|
||||
- Consider output helpers (`ITestOutputHelper`) for test diagnostics
|
||||
- Skip tests conditionally with `Skip = "reason"` in fact/theory attributes
|
||||
84
.github/prompts/dotnet-best-practices.prompt.md
vendored
Normal file
84
.github/prompts/dotnet-best-practices.prompt.md
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
---
|
||||
mode: 'agent'
|
||||
description: 'Ensure .NET/C# code meets best practices for the solution/project.'
|
||||
---
|
||||
# .NET/C# Best Practices
|
||||
|
||||
Your task is to ensure .NET/C# code in ${selection} meets the best practices specific to this solution/project. This includes:
|
||||
|
||||
## Documentation & Structure
|
||||
|
||||
- Create comprehensive XML documentation comments for all public classes, interfaces, methods, and properties
|
||||
- Include parameter descriptions and return value descriptions in XML comments
|
||||
- Follow the established namespace structure: {Core|Console|App|Service}.{Feature}
|
||||
|
||||
## Design Patterns & Architecture
|
||||
|
||||
- Use primary constructor syntax for dependency injection (e.g., `public class MyClass(IDependency dependency)`)
|
||||
- Implement the Command Handler pattern with generic base classes (e.g., `CommandHandler<TOptions>`)
|
||||
- Use interface segregation with clear naming conventions (prefix interfaces with 'I')
|
||||
- Follow the Factory pattern for complex object creation.
|
||||
|
||||
## Dependency Injection & Services
|
||||
|
||||
- Use constructor dependency injection with null checks via ArgumentNullException
|
||||
- Register services with appropriate lifetimes (Singleton, Scoped, Transient)
|
||||
- Use Microsoft.Extensions.DependencyInjection patterns
|
||||
- Implement service interfaces for testability
|
||||
|
||||
## Resource Management & Localization
|
||||
|
||||
- Use ResourceManager for localized messages and error strings
|
||||
- Separate LogMessages and ErrorMessages resource files
|
||||
- Access resources via `_resourceManager.GetString("MessageKey")`
|
||||
|
||||
## Async/Await Patterns
|
||||
|
||||
- Use async/await for all I/O operations and long-running tasks
|
||||
- Return Task or Task<T> from async methods
|
||||
- Use ConfigureAwait(false) where appropriate
|
||||
- Handle async exceptions properly
|
||||
|
||||
## Testing Standards
|
||||
|
||||
- Use MSTest framework with FluentAssertions for assertions
|
||||
- Follow AAA pattern (Arrange, Act, Assert)
|
||||
- Use Moq for mocking dependencies
|
||||
- Test both success and failure scenarios
|
||||
- Include null parameter validation tests
|
||||
|
||||
## Configuration & Settings
|
||||
|
||||
- Use strongly-typed configuration classes with data annotations
|
||||
- Implement validation attributes (Required, NotEmptyOrWhitespace)
|
||||
- Use IConfiguration binding for settings
|
||||
- Support appsettings.json configuration files
|
||||
|
||||
## Semantic Kernel & AI Integration
|
||||
|
||||
- Use Microsoft.SemanticKernel for AI operations
|
||||
- Implement proper kernel configuration and service registration
|
||||
- Handle AI model settings (ChatCompletion, Embedding, etc.)
|
||||
- Use structured output patterns for reliable AI responses
|
||||
|
||||
## Error Handling & Logging
|
||||
|
||||
- Use structured logging with Microsoft.Extensions.Logging
|
||||
- Include scoped logging with meaningful context
|
||||
- Throw specific exceptions with descriptive messages
|
||||
- Use try-catch blocks for expected failure scenarios
|
||||
|
||||
## Performance & Security
|
||||
|
||||
- Use C# 12+ features and .NET 8 optimizations where applicable
|
||||
- Implement proper input validation and sanitization
|
||||
- Use parameterized queries for database operations
|
||||
- Follow secure coding practices for AI/ML operations
|
||||
|
||||
## Code Quality
|
||||
|
||||
- Ensure SOLID principles compliance
|
||||
- Avoid code duplication through base classes and utilities
|
||||
- Use meaningful names that reflect domain concepts
|
||||
- Keep methods focused and cohesive
|
||||
- Implement proper disposal patterns for resources
|
||||
41
.github/prompts/dotnet-design-pattern-review.prompt.md
vendored
Normal file
41
.github/prompts/dotnet-design-pattern-review.prompt.md
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
---
|
||||
mode: 'agent'
|
||||
description: 'Review the C#/.NET code for design pattern implementation and suggest improvements.'
|
||||
---
|
||||
# .NET/C# Design Pattern Review
|
||||
|
||||
Review the C#/.NET code in ${selection} for design pattern implementation and suggest improvements for the solution/project. Do not make any changes to the code, just provide a review.
|
||||
|
||||
## Required Design Patterns
|
||||
|
||||
- **Command Pattern**: Generic base classes (`CommandHandler<TOptions>`), `ICommandHandler<TOptions>` interface, `CommandHandlerOptions` inheritance, static `SetupCommand(IHost host)` methods
|
||||
- **Factory Pattern**: Complex object creation service provider integration
|
||||
- **Dependency Injection**: Primary constructor syntax, `ArgumentNullException` null checks, interface abstractions, proper service lifetimes
|
||||
- **Repository Pattern**: Async data access interfaces provider abstractions for connections
|
||||
- **Provider Pattern**: External service abstractions (database, AI), clear contracts, configuration handling
|
||||
- **Resource Pattern**: ResourceManager for localized messages, separate .resx files (LogMessages, ErrorMessages)
|
||||
|
||||
## Review Checklist
|
||||
|
||||
- **Design Patterns**: Identify patterns used. Are Command Handler, Factory, Provider, and Repository patterns correctly implemented? Missing beneficial patterns?
|
||||
- **Architecture**: Follow namespace conventions (`{Core|Console|App|Service}.{Feature}`)? Proper separation between Core/Console projects? Modular and readable?
|
||||
- **.NET Best Practices**: Primary constructors, async/await with Task returns, ResourceManager usage, structured logging, strongly-typed configuration?
|
||||
- **GoF Patterns**: Command, Factory, Template Method, Strategy patterns correctly implemented?
|
||||
- **SOLID Principles**: Single Responsibility, Open/Closed, Liskov Substitution, Interface Segregation, Dependency Inversion violations?
|
||||
- **Performance**: Proper async/await, resource disposal, ConfigureAwait(false), parallel processing opportunities?
|
||||
- **Maintainability**: Clear separation of concerns, consistent error handling, proper configuration usage?
|
||||
- **Testability**: Dependencies abstracted via interfaces, mockable components, async testability, AAA pattern compatibility?
|
||||
- **Security**: Input validation, secure credential handling, parameterized queries, safe exception handling?
|
||||
- **Documentation**: XML docs for public APIs, parameter/return descriptions, resource file organization?
|
||||
- **Code Clarity**: Meaningful names reflecting domain concepts, clear intent through patterns, self-explanatory structure?
|
||||
- **Clean Code**: Consistent style, appropriate method/class size, minimal complexity, eliminated duplication?
|
||||
|
||||
## Improvement Focus Areas
|
||||
|
||||
- **Command Handlers**: Validation in base class, consistent error handling, proper resource management
|
||||
- **Factories**: Dependency configuration, service provider integration, disposal patterns
|
||||
- **Providers**: Connection management, async patterns, exception handling and logging
|
||||
- **Configuration**: Data annotations, validation attributes, secure sensitive value handling
|
||||
- **AI/ML Integration**: Semantic Kernel patterns, structured output handling, model configuration
|
||||
|
||||
Provide specific, actionable recommendations for improvements aligned with the project's architecture and .NET best practices.
|
||||
2
.github/workflows/dotnetcore.yml
vendored
2
.github/workflows/dotnetcore.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
os: [windows-latest, ubuntu-latest]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-dotnet@v5
|
||||
with:
|
||||
dotnet-version: 8.0.x
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
<Project>
|
||||
<ItemGroup>
|
||||
<PackageVersion Include="Bullseye" Version="6.0.0" />
|
||||
<PackageVersion Include="AwesomeAssertions" Version="9.3.0" />
|
||||
<PackageVersion Include="AwesomeAssertions" Version="9.2.1" />
|
||||
<PackageVersion Include="Glob" Version="1.1.9" />
|
||||
<PackageVersion Include="JetBrains.Profiler.SelfApi" Version="2.5.14" />
|
||||
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="8.0.0" />
|
||||
|
||||
@@ -172,9 +172,4 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
|
||||
return Entries.All(x => x.IsComplete);
|
||||
}
|
||||
}
|
||||
|
||||
public virtual bool IsMultiVolume =>
|
||||
_sourceStream?.Files.Count > 1 || _sourceStream?.Streams.Count > 1;
|
||||
|
||||
public virtual bool SupportsMultiThreading => false;
|
||||
}
|
||||
|
||||
@@ -45,14 +45,4 @@ public interface IArchive : IDisposable
|
||||
/// The total size of the files as uncompressed in the archive.
|
||||
/// </summary>
|
||||
long TotalUncompressSize { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Is the archive part of a multi-volume set.
|
||||
/// </summary>
|
||||
bool IsMultiVolume { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Does the archive support multi-threaded extraction.
|
||||
/// </summary>
|
||||
bool SupportsMultiThreading { get; }
|
||||
}
|
||||
|
||||
@@ -88,7 +88,7 @@ public static class IArchiveEntryExtensions
|
||||
entry,
|
||||
destinationDirectory,
|
||||
options,
|
||||
entry.WriteToFileAsync,
|
||||
(x, opt) => entry.WriteToFileAsync(x, opt, cancellationToken),
|
||||
cancellationToken
|
||||
);
|
||||
|
||||
@@ -124,11 +124,10 @@ public static class IArchiveEntryExtensions
|
||||
entry,
|
||||
destinationFileName,
|
||||
options,
|
||||
async (x, fm, ct) =>
|
||||
async (x, fm) =>
|
||||
{
|
||||
using var fs = File.Open(destinationFileName, fm);
|
||||
await entry.WriteToAsync(fs, ct).ConfigureAwait(false);
|
||||
},
|
||||
cancellationToken
|
||||
await entry.WriteToAsync(fs, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
39
src/SharpCompress/Archives/Rar/FileInfoRarArchiveVolume.cs
Normal file
39
src/SharpCompress/Archives/Rar/FileInfoRarArchiveVolume.cs
Normal file
@@ -0,0 +1,39 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.ObjectModel;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using SharpCompress.Common.Rar;
|
||||
using SharpCompress.Common.Rar.Headers;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives.Rar;
|
||||
|
||||
/// <summary>
|
||||
/// A rar part based on a FileInfo object
|
||||
/// </summary>
|
||||
internal class FileInfoRarArchiveVolume : RarVolume
|
||||
{
|
||||
internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options, int index)
|
||||
: base(StreamingMode.Seekable, fileInfo.OpenRead(), FixOptions(options), index)
|
||||
{
|
||||
FileInfo = fileInfo;
|
||||
FileParts = GetVolumeFileParts().ToArray().ToReadOnly();
|
||||
}
|
||||
|
||||
private static ReaderOptions FixOptions(ReaderOptions options)
|
||||
{
|
||||
//make sure we're closing streams with fileinfo
|
||||
options.LeaveStreamOpen = false;
|
||||
return options;
|
||||
}
|
||||
|
||||
internal ReadOnlyCollection<RarFilePart> FileParts { get; }
|
||||
|
||||
internal FileInfo FileInfo { get; }
|
||||
|
||||
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader) =>
|
||||
new FileInfoRarFilePart(this, ReaderOptions.Password, markHeader, fileHeader, FileInfo);
|
||||
|
||||
internal override IEnumerable<RarFilePart> ReadFileParts() => FileParts;
|
||||
}
|
||||
21
src/SharpCompress/Archives/Rar/FileInfoRarFilePart.cs
Normal file
21
src/SharpCompress/Archives/Rar/FileInfoRarFilePart.cs
Normal file
@@ -0,0 +1,21 @@
|
||||
using System.IO;
|
||||
using SharpCompress.Common.Rar.Headers;
|
||||
|
||||
namespace SharpCompress.Archives.Rar;
|
||||
|
||||
internal sealed class FileInfoRarFilePart : SeekableFilePart
|
||||
{
|
||||
internal FileInfoRarFilePart(
|
||||
FileInfoRarArchiveVolume volume,
|
||||
string? password,
|
||||
MarkHeader mh,
|
||||
FileHeader fh,
|
||||
FileInfo fi
|
||||
)
|
||||
: base(mh, fh, volume.Index, volume.Stream, password) => FileInfo = fi;
|
||||
|
||||
internal FileInfo FileInfo { get; }
|
||||
|
||||
internal override string FilePartName =>
|
||||
"Rar File: " + FileInfo.FullName + " File Entry: " + FileHeader.FileName;
|
||||
}
|
||||
@@ -47,9 +47,9 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
{
|
||||
sourceStream.LoadAllParts(); //request all streams
|
||||
var streams = sourceStream.Streams.ToArray();
|
||||
var i = 0;
|
||||
if (streams.Length > 1 && IsRarFile(streams[1], ReaderOptions)) //test part 2 - true = multipart not split
|
||||
{
|
||||
var i = 0;
|
||||
sourceStream.IsVolumes = true;
|
||||
streams[1].Position = 0;
|
||||
sourceStream.Position = 0;
|
||||
@@ -57,18 +57,12 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
return sourceStream.Streams.Select(a => new StreamRarArchiveVolume(
|
||||
a,
|
||||
ReaderOptions,
|
||||
i++,
|
||||
IsMultiVolume
|
||||
i++
|
||||
));
|
||||
}
|
||||
|
||||
//split mode or single file
|
||||
return new StreamRarArchiveVolume(
|
||||
sourceStream,
|
||||
ReaderOptions,
|
||||
0,
|
||||
IsMultiVolume
|
||||
).AsEnumerable();
|
||||
return new StreamRarArchiveVolume(sourceStream, ReaderOptions, i++).AsEnumerable();
|
||||
}
|
||||
|
||||
protected override IReader CreateReaderForSolidExtraction()
|
||||
@@ -89,7 +83,6 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
}
|
||||
|
||||
public override bool IsSolid => Volumes.First().IsSolidArchive;
|
||||
public override bool SupportsMultiThreading => !IsMultiVolume && !IsSolid;
|
||||
|
||||
public virtual int MinVersion => Volumes.First().MinVersion;
|
||||
public virtual int MaxVersion => Volumes.First().MaxVersion;
|
||||
|
||||
@@ -134,6 +134,4 @@ public class RarArchiveEntry : RarEntry, IArchiveEntry
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public override bool SupportsMultiThreading => Parts.Single().SupportsMultiThreading;
|
||||
}
|
||||
|
||||
@@ -1,29 +1,25 @@
|
||||
using System.IO;
|
||||
using SharpCompress.Common.Rar;
|
||||
using SharpCompress.Common.Rar.Headers;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Archives.Rar;
|
||||
|
||||
internal class SeekableRarFilePart : RarFilePart
|
||||
internal class SeekableFilePart : RarFilePart
|
||||
{
|
||||
private readonly Stream _stream;
|
||||
private readonly string? _password;
|
||||
private readonly bool _isMultiVolume;
|
||||
|
||||
internal SeekableRarFilePart(
|
||||
internal SeekableFilePart(
|
||||
MarkHeader mh,
|
||||
FileHeader fh,
|
||||
int index,
|
||||
Stream stream,
|
||||
string? password,
|
||||
bool isMultiVolume
|
||||
string? password
|
||||
)
|
||||
: base(mh, fh, index)
|
||||
{
|
||||
_stream = stream;
|
||||
_password = password;
|
||||
_isMultiVolume = isMultiVolume;
|
||||
}
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
@@ -46,7 +42,4 @@ internal class SeekableRarFilePart : RarFilePart
|
||||
}
|
||||
|
||||
internal override string FilePartName => "Unknown Stream - File Entry: " + FileHeader.FileName;
|
||||
|
||||
public override bool SupportsMultiThreading =>
|
||||
!_isMultiVolume && _stream is SourceStream ss && ss.IsFileMode && ss.Files.Count == 1;
|
||||
}
|
||||
@@ -9,28 +9,11 @@ namespace SharpCompress.Archives.Rar;
|
||||
|
||||
internal class StreamRarArchiveVolume : RarVolume
|
||||
{
|
||||
private readonly bool _isMultiVolume;
|
||||
|
||||
internal StreamRarArchiveVolume(
|
||||
Stream stream,
|
||||
ReaderOptions options,
|
||||
int index,
|
||||
bool isMultiVolume
|
||||
)
|
||||
: base(StreamingMode.Seekable, stream, options, index)
|
||||
{
|
||||
_isMultiVolume = isMultiVolume;
|
||||
}
|
||||
internal StreamRarArchiveVolume(Stream stream, ReaderOptions options, int index)
|
||||
: base(StreamingMode.Seekable, stream, options, index) { }
|
||||
|
||||
internal override IEnumerable<RarFilePart> ReadFileParts() => GetVolumeFileParts();
|
||||
|
||||
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader) =>
|
||||
new SeekableRarFilePart(
|
||||
markHeader,
|
||||
fileHeader,
|
||||
Index,
|
||||
Stream,
|
||||
ReaderOptions.Password,
|
||||
_isMultiVolume
|
||||
);
|
||||
new SeekableFilePart(markHeader, fileHeader, Index, Stream, ReaderOptions.Password);
|
||||
}
|
||||
|
||||
@@ -283,12 +283,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
|
||||
yield return new ZipArchiveEntry(
|
||||
this,
|
||||
new SeekableZipFilePart(
|
||||
headerFactory.NotNull(),
|
||||
deh,
|
||||
s,
|
||||
IsMultiVolume
|
||||
)
|
||||
new SeekableZipFilePart(headerFactory.NotNull(), deh, s)
|
||||
);
|
||||
}
|
||||
break;
|
||||
@@ -390,6 +385,4 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
((IStreamStack)stream).StackSeek(0);
|
||||
return ZipReader.Open(stream, ReaderOptions, Entries);
|
||||
}
|
||||
|
||||
public override bool SupportsMultiThreading => !IsMultiVolume;
|
||||
}
|
||||
|
||||
@@ -23,7 +23,5 @@ public class ZipArchiveEntry : ZipEntry, IArchiveEntry
|
||||
|
||||
public bool IsComplete => true;
|
||||
|
||||
public override bool SupportsMultiThreading => Parts.Single().SupportsMultiThreading;
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
@@ -57,7 +57,7 @@ namespace SharpCompress.Common.Arc
|
||||
return value switch
|
||||
{
|
||||
1 or 2 => CompressionType.None,
|
||||
3 => CompressionType.Packed,
|
||||
3 => CompressionType.RLE90,
|
||||
4 => CompressionType.Squeezed,
|
||||
5 or 6 or 7 or 8 => CompressionType.Crunched,
|
||||
9 => CompressionType.Squashed,
|
||||
|
||||
@@ -44,7 +44,7 @@ namespace SharpCompress.Common.Arc
|
||||
Header.CompressedSize
|
||||
);
|
||||
break;
|
||||
case CompressionType.Packed:
|
||||
case CompressionType.RLE90:
|
||||
compressedStream = new RunLength90Stream(
|
||||
_stream,
|
||||
(int)Header.CompressedSize
|
||||
@@ -54,14 +54,6 @@ namespace SharpCompress.Common.Arc
|
||||
compressedStream = new SqueezeStream(_stream, (int)Header.CompressedSize);
|
||||
break;
|
||||
case CompressionType.Crunched:
|
||||
if (Header.OriginalSize > 128 * 1024)
|
||||
{
|
||||
throw new NotSupportedException(
|
||||
"CompressionMethod: "
|
||||
+ Header.CompressionMethod
|
||||
+ " with size > 128KB"
|
||||
);
|
||||
}
|
||||
compressedStream = new ArcLzwStream(
|
||||
_stream,
|
||||
(int)Header.CompressedSize,
|
||||
|
||||
@@ -41,7 +41,7 @@ namespace SharpCompress.Common.Arj
|
||||
case CompressionMethod.CompressedMost:
|
||||
case CompressionMethod.Compressed:
|
||||
case CompressionMethod.CompressedFaster:
|
||||
if (Header.OriginalSize > 128 * 1024)
|
||||
if (Header.CompressedSize > 128 * 1024)
|
||||
{
|
||||
throw new NotSupportedException(
|
||||
"CompressionMethod: "
|
||||
|
||||
@@ -23,7 +23,7 @@ public enum CompressionType
|
||||
Reduce4,
|
||||
Explode,
|
||||
Squeezed,
|
||||
Packed,
|
||||
RLE90,
|
||||
Crunched,
|
||||
Squashed,
|
||||
Crushed,
|
||||
|
||||
@@ -87,5 +87,4 @@ public abstract class Entry : IEntry
|
||||
/// Entry file attribute.
|
||||
/// </summary>
|
||||
public virtual int? Attrib => throw new NotImplementedException();
|
||||
public virtual bool SupportsMultiThreading => false;
|
||||
}
|
||||
|
||||
@@ -128,7 +128,7 @@ internal static class ExtractionMethods
|
||||
IEntry entry,
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options,
|
||||
Func<string, ExtractionOptions?, CancellationToken, Task> writeAsync,
|
||||
Func<string, ExtractionOptions?, Task> writeAsync,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
@@ -189,7 +189,7 @@ internal static class ExtractionMethods
|
||||
"Entry is trying to write a file outside of the destination directory."
|
||||
);
|
||||
}
|
||||
await writeAsync(destinationFileName, options, cancellationToken).ConfigureAwait(false);
|
||||
await writeAsync(destinationFileName, options).ConfigureAwait(false);
|
||||
}
|
||||
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
|
||||
{
|
||||
@@ -201,7 +201,7 @@ internal static class ExtractionMethods
|
||||
IEntry entry,
|
||||
string destinationFileName,
|
||||
ExtractionOptions? options,
|
||||
Func<string, FileMode, CancellationToken, Task> openAndWriteAsync,
|
||||
Func<string, FileMode, Task> openAndWriteAsync,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
@@ -225,8 +225,7 @@ internal static class ExtractionMethods
|
||||
fm = FileMode.CreateNew;
|
||||
}
|
||||
|
||||
await openAndWriteAsync(destinationFileName, fm, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
await openAndWriteAsync(destinationFileName, fm).ConfigureAwait(false);
|
||||
entry.PreserveExtractionOptions(destinationFileName, options);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,6 +14,4 @@ public abstract class FilePart
|
||||
internal abstract Stream? GetCompressedStream();
|
||||
internal abstract Stream? GetRawStream();
|
||||
internal bool Skipped { get; set; }
|
||||
|
||||
public virtual bool SupportsMultiThreading => false;
|
||||
}
|
||||
|
||||
@@ -21,5 +21,4 @@ public interface IEntry
|
||||
DateTime? LastModifiedTime { get; }
|
||||
long Size { get; }
|
||||
int? Attrib { get; }
|
||||
bool SupportsMultiThreading { get; }
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
using System.IO;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Common.Zip;
|
||||
|
||||
@@ -8,19 +7,13 @@ internal class SeekableZipFilePart : ZipFilePart
|
||||
{
|
||||
private bool _isLocalHeaderLoaded;
|
||||
private readonly SeekableZipHeaderFactory _headerFactory;
|
||||
private readonly bool _isMultiVolume;
|
||||
|
||||
internal SeekableZipFilePart(
|
||||
SeekableZipHeaderFactory headerFactory,
|
||||
DirectoryEntryHeader header,
|
||||
Stream stream,
|
||||
bool isMultiVolume
|
||||
Stream stream
|
||||
)
|
||||
: base(header, stream)
|
||||
{
|
||||
_headerFactory = headerFactory;
|
||||
_isMultiVolume = isMultiVolume;
|
||||
}
|
||||
: base(header, stream) => _headerFactory = headerFactory;
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
{
|
||||
@@ -37,20 +30,8 @@ internal class SeekableZipFilePart : ZipFilePart
|
||||
|
||||
protected override Stream CreateBaseStream()
|
||||
{
|
||||
if (!_isMultiVolume && BaseStream is SourceStream ss)
|
||||
{
|
||||
if (ss.IsFileMode && ss.Files.Count == 1)
|
||||
{
|
||||
var fileStream = ss.CurrentFile.OpenRead();
|
||||
fileStream.Position = Header.DataStartPosition.NotNull();
|
||||
return fileStream;
|
||||
}
|
||||
}
|
||||
BaseStream.Position = Header.DataStartPosition.NotNull();
|
||||
|
||||
return BaseStream;
|
||||
}
|
||||
|
||||
public override bool SupportsMultiThreading =>
|
||||
!_isMultiVolume && BaseStream is SourceStream ss && ss.IsFileMode && ss.Files.Count == 1;
|
||||
}
|
||||
|
||||
@@ -62,6 +62,10 @@ internal sealed class MultiVolumeReadOnlyStream : Stream, IStreamStack
|
||||
base.Dispose(disposing);
|
||||
if (disposing)
|
||||
{
|
||||
#if DEBUG_STREAMS
|
||||
this.DebugDispose(typeof(MultiVolumeReadOnlyStream));
|
||||
#endif
|
||||
|
||||
if (filePartEnumerator != null)
|
||||
{
|
||||
filePartEnumerator.Dispose();
|
||||
|
||||
@@ -82,6 +82,9 @@ internal class RarStream : Stream, IStreamStack
|
||||
{
|
||||
if (disposing)
|
||||
{
|
||||
#if DEBUG_STREAMS
|
||||
this.DebugDispose(typeof(RarStream));
|
||||
#endif
|
||||
ArrayPool<byte>.Shared.Return(this.tmpBuffer);
|
||||
this.tmpBuffer = null;
|
||||
}
|
||||
|
||||
@@ -15,7 +15,7 @@ public class SourceStream : Stream, IStreamStack
|
||||
#endif
|
||||
int IStreamStack.DefaultBufferSize { get; set; }
|
||||
|
||||
Stream IStreamStack.BaseStream() => _streams[_streamIndex];
|
||||
Stream IStreamStack.BaseStream() => _streams[_stream];
|
||||
|
||||
int IStreamStack.BufferSize
|
||||
{
|
||||
@@ -35,7 +35,7 @@ public class SourceStream : Stream, IStreamStack
|
||||
private readonly List<Stream> _streams;
|
||||
private readonly Func<int, FileInfo?>? _getFilePart;
|
||||
private readonly Func<int, Stream?>? _getStreamPart;
|
||||
private int _streamIndex;
|
||||
private int _stream;
|
||||
|
||||
public SourceStream(FileInfo file, Func<int, FileInfo?> getPart, ReaderOptions options)
|
||||
: this(null, null, file, getPart, options) { }
|
||||
@@ -59,7 +59,7 @@ public class SourceStream : Stream, IStreamStack
|
||||
|
||||
if (!IsFileMode)
|
||||
{
|
||||
_streams.Add(stream.NotNull("stream is null"));
|
||||
_streams.Add(stream!);
|
||||
_getStreamPart = getStreamPart;
|
||||
_getFilePart = _ => null;
|
||||
if (stream is FileStream fileStream)
|
||||
@@ -69,12 +69,12 @@ public class SourceStream : Stream, IStreamStack
|
||||
}
|
||||
else
|
||||
{
|
||||
_files.Add(file.NotNull("file is null"));
|
||||
_files.Add(file!);
|
||||
_streams.Add(_files[0].OpenRead());
|
||||
_getFilePart = getFilePart;
|
||||
_getStreamPart = _ => null;
|
||||
}
|
||||
_streamIndex = 0;
|
||||
_stream = 0;
|
||||
_prevSize = 0;
|
||||
|
||||
#if DEBUG_STREAMS
|
||||
@@ -93,12 +93,10 @@ public class SourceStream : Stream, IStreamStack
|
||||
public ReaderOptions ReaderOptions { get; }
|
||||
public bool IsFileMode { get; }
|
||||
|
||||
public IReadOnlyList<FileInfo> Files => _files;
|
||||
public IReadOnlyList<Stream> Streams => _streams;
|
||||
public IEnumerable<FileInfo> Files => _files;
|
||||
public IEnumerable<Stream> Streams => _streams;
|
||||
|
||||
private Stream Current => _streams[_streamIndex];
|
||||
|
||||
public FileInfo CurrentFile => _files[_streamIndex];
|
||||
private Stream Current => _streams[_stream];
|
||||
|
||||
public bool LoadStream(int index) //ensure all parts to id are loaded
|
||||
{
|
||||
@@ -109,7 +107,7 @@ public class SourceStream : Stream, IStreamStack
|
||||
var f = _getFilePart.NotNull("GetFilePart is null")(_streams.Count);
|
||||
if (f == null)
|
||||
{
|
||||
_streamIndex = _streams.Count - 1;
|
||||
_stream = _streams.Count - 1;
|
||||
return false;
|
||||
}
|
||||
//throw new Exception($"File part {idx} not available.");
|
||||
@@ -121,7 +119,7 @@ public class SourceStream : Stream, IStreamStack
|
||||
var s = _getStreamPart.NotNull("GetStreamPart is null")(_streams.Count);
|
||||
if (s == null)
|
||||
{
|
||||
_streamIndex = _streams.Count - 1;
|
||||
_stream = _streams.Count - 1;
|
||||
return false;
|
||||
}
|
||||
//throw new Exception($"Stream part {idx} not available.");
|
||||
@@ -139,10 +137,10 @@ public class SourceStream : Stream, IStreamStack
|
||||
{
|
||||
if (LoadStream(idx))
|
||||
{
|
||||
_streamIndex = idx;
|
||||
_stream = idx;
|
||||
}
|
||||
|
||||
return _streamIndex == idx;
|
||||
return _stream == idx;
|
||||
}
|
||||
|
||||
public override bool CanRead => true;
|
||||
@@ -186,7 +184,7 @@ public class SourceStream : Stream, IStreamStack
|
||||
var length = Current.Length;
|
||||
|
||||
// Load next file if present
|
||||
if (!SetStream(_streamIndex + 1))
|
||||
if (!SetStream(_stream + 1))
|
||||
{
|
||||
break;
|
||||
}
|
||||
@@ -225,7 +223,7 @@ public class SourceStream : Stream, IStreamStack
|
||||
while (_prevSize + Current.Length < pos)
|
||||
{
|
||||
_prevSize += Current.Length;
|
||||
SetStream(_streamIndex + 1);
|
||||
SetStream(_stream + 1);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -275,7 +273,7 @@ public class SourceStream : Stream, IStreamStack
|
||||
var length = Current.Length;
|
||||
|
||||
// Load next file if present
|
||||
if (!SetStream(_streamIndex + 1))
|
||||
if (!SetStream(_stream + 1))
|
||||
{
|
||||
break;
|
||||
}
|
||||
@@ -324,7 +322,7 @@ public class SourceStream : Stream, IStreamStack
|
||||
var length = Current.Length;
|
||||
|
||||
// Load next file if present
|
||||
if (!SetStream(_streamIndex + 1))
|
||||
if (!SetStream(_stream + 1))
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -82,7 +82,7 @@ public static class IReaderExtensions
|
||||
reader.Entry,
|
||||
destinationDirectory,
|
||||
options,
|
||||
reader.WriteEntryToFileAsync,
|
||||
(fileName, opts) => reader.WriteEntryToFileAsync(fileName, opts, cancellationToken),
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
@@ -101,10 +101,10 @@ public static class IReaderExtensions
|
||||
reader.Entry,
|
||||
destinationFileName,
|
||||
options,
|
||||
async (x, fm, ct) =>
|
||||
async (x, fm) =>
|
||||
{
|
||||
using var fs = File.Open(destinationFileName, fm);
|
||||
await reader.WriteEntryToAsync(fs, ct).ConfigureAwait(false);
|
||||
await reader.WriteEntryToAsync(fs, cancellationToken).ConfigureAwait(false);
|
||||
},
|
||||
cancellationToken
|
||||
)
|
||||
|
||||
@@ -48,29 +48,7 @@ internal class ZipCentralDirectoryEntry
|
||||
var decompressedvalue = zip64 ? uint.MaxValue : (uint)Decompressed;
|
||||
var headeroffsetvalue = zip64 ? uint.MaxValue : (uint)HeaderOffset;
|
||||
var extralength = zip64 ? (2 + 2 + 8 + 8 + 8 + 4) : 0;
|
||||
|
||||
// Determine version needed to extract:
|
||||
// - Version 63 for LZMA, PPMd, BZip2, ZStandard (advanced compression methods)
|
||||
// - Version 45 for Zip64 extensions (when Zip64HeaderOffset != 0 or actual sizes require it)
|
||||
// - Version 20 for standard Deflate/None compression
|
||||
byte version;
|
||||
if (
|
||||
compression == ZipCompressionMethod.LZMA
|
||||
|| compression == ZipCompressionMethod.PPMd
|
||||
|| compression == ZipCompressionMethod.BZip2
|
||||
|| compression == ZipCompressionMethod.ZStandard
|
||||
)
|
||||
{
|
||||
version = 63;
|
||||
}
|
||||
else if (zip64 || Zip64HeaderOffset != 0)
|
||||
{
|
||||
version = 45;
|
||||
}
|
||||
else
|
||||
{
|
||||
version = 20;
|
||||
}
|
||||
var version = (byte)(zip64 ? 45 : 20); // Version 20 required for deflate/encryption
|
||||
|
||||
var flags = Equals(archiveEncoding.GetEncoding(), Encoding.UTF8)
|
||||
? HeaderFlags.Efs
|
||||
|
||||
@@ -27,22 +27,5 @@ namespace SharpCompress.Test.Arc
|
||||
|
||||
[Fact]
|
||||
public void Arc_Crunched_Read() => Read("Arc.crunched.arc");
|
||||
|
||||
[Theory]
|
||||
[InlineData("Arc.crunched.largefile.arc", CompressionType.Crunched)]
|
||||
public void Arc_LargeFile_ShouldThrow(string fileName, CompressionType compressionType)
|
||||
{
|
||||
var exception = Assert.Throws<NotSupportedException>(() =>
|
||||
ReadForBufferBoundaryCheck(fileName, compressionType)
|
||||
);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("Arc.uncompressed.largefile.arc", CompressionType.None)]
|
||||
[InlineData("Arc.squeezed.largefile.arc", CompressionType.Squeezed)]
|
||||
public void Arc_LargeFileTest_Read(string fileName, CompressionType compressionType)
|
||||
{
|
||||
ReadForBufferBoundaryCheck(fileName, compressionType);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -134,7 +134,6 @@ public class ArchiveTests : ReaderTests
|
||||
{
|
||||
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
|
||||
{
|
||||
Assert.False(entry.SupportsMultiThreading);
|
||||
entry.WriteToDirectory(
|
||||
SCRATCH_FILES_PATH,
|
||||
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
|
||||
@@ -267,31 +266,6 @@ public class ArchiveTests : ReaderTests
|
||||
VerifyFiles();
|
||||
}
|
||||
|
||||
protected async Task ArchiveFileRead_Multithreaded(
|
||||
IArchiveFactory archiveFactory,
|
||||
string testArchive,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
|
||||
var tasks = new List<Task>();
|
||||
using (var archive = archiveFactory.Open(new FileInfo(testArchive), readerOptions))
|
||||
{
|
||||
Assert.True(archive.SupportsMultiThreading);
|
||||
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
|
||||
{
|
||||
Assert.True(entry.SupportsMultiThreading);
|
||||
var t = entry.WriteToDirectoryAsync(
|
||||
SCRATCH_FILES_PATH,
|
||||
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
|
||||
);
|
||||
tasks.Add(t);
|
||||
}
|
||||
}
|
||||
await Task.WhenAll(tasks);
|
||||
VerifyFiles();
|
||||
}
|
||||
|
||||
protected void ArchiveFileRead(
|
||||
IArchiveFactory archiveFactory,
|
||||
string testArchive,
|
||||
@@ -315,11 +289,6 @@ public class ArchiveTests : ReaderTests
|
||||
protected void ArchiveFileRead(string testArchive, ReaderOptions? readerOptions = null) =>
|
||||
ArchiveFileRead(ArchiveFactory.AutoFactory, testArchive, readerOptions);
|
||||
|
||||
protected Task ArchiveFileRead_Multithreaded(
|
||||
string testArchive,
|
||||
ReaderOptions? readerOptions = null
|
||||
) => ArchiveFileRead_Multithreaded(ArchiveFactory.AutoFactory, testArchive, readerOptions);
|
||||
|
||||
protected void ArchiveFileSkip(
|
||||
string testArchive,
|
||||
string fileOrder,
|
||||
|
||||
@@ -45,14 +45,16 @@ namespace SharpCompress.Test.Arj
|
||||
public void Arj_Multi_Reader()
|
||||
{
|
||||
var exception = Assert.Throws<MultiVolumeExtractionException>(() =>
|
||||
DoArj_Multi_Reader([
|
||||
"Arj.store.split.arj",
|
||||
"Arj.store.split.a01",
|
||||
"Arj.store.split.a02",
|
||||
"Arj.store.split.a03",
|
||||
"Arj.store.split.a04",
|
||||
"Arj.store.split.a05",
|
||||
])
|
||||
DoArj_Multi_Reader(
|
||||
[
|
||||
"Arj.store.split.arj",
|
||||
"Arj.store.split.a01",
|
||||
"Arj.store.split.a02",
|
||||
"Arj.store.split.a03",
|
||||
"Arj.store.split.a04",
|
||||
"Arj.store.split.a05",
|
||||
]
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -63,7 +65,7 @@ namespace SharpCompress.Test.Arj
|
||||
public void Arj_LargeFile_ShouldThrow(string fileName, CompressionType compressionType)
|
||||
{
|
||||
var exception = Assert.Throws<NotSupportedException>(() =>
|
||||
ReadForBufferBoundaryCheck(fileName, compressionType)
|
||||
Arj_LargeFileTest_Read(fileName, compressionType)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -72,7 +74,24 @@ namespace SharpCompress.Test.Arj
|
||||
[InlineData("Arj.method4.largefile.arj", CompressionType.ArjLZ77)]
|
||||
public void Arj_LargeFileTest_Read(string fileName, CompressionType compressionType)
|
||||
{
|
||||
ReadForBufferBoundaryCheck(fileName, compressionType);
|
||||
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, fileName)))
|
||||
using (
|
||||
var reader = ReaderFactory.Open(stream, new ReaderOptions { LookForHeader = true })
|
||||
)
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
Assert.Equal(compressionType, reader.Entry.CompressionType);
|
||||
reader.WriteEntryToDirectory(
|
||||
SCRATCH_FILES_PATH,
|
||||
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
|
||||
);
|
||||
}
|
||||
}
|
||||
CompareFilesByPath(
|
||||
Path.Combine(SCRATCH_FILES_PATH, "news.txt"),
|
||||
Path.Combine(MISC_TEST_FILES_PATH, "news.txt")
|
||||
);
|
||||
}
|
||||
|
||||
private void DoArj_Multi_Reader(string[] archives)
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Archives;
|
||||
using SharpCompress.Archives.Rar;
|
||||
using SharpCompress.Common;
|
||||
@@ -293,15 +292,9 @@ public class RarArchiveTests : ArchiveTests
|
||||
[Fact]
|
||||
public void Rar_ArchiveFileRead() => ArchiveFileRead("Rar.rar");
|
||||
|
||||
[Fact]
|
||||
public Task Rar_ArchiveFileRead_Multithreaded() => ArchiveFileRead_Multithreaded("Rar.rar");
|
||||
|
||||
[Fact]
|
||||
public void Rar5_ArchiveFileRead() => ArchiveFileRead("Rar5.rar");
|
||||
|
||||
[Fact]
|
||||
public Task Rar5_ArchiveFileRead_Multithreaded() => ArchiveFileRead_Multithreaded("Rar5.rar");
|
||||
|
||||
[Fact]
|
||||
public void Rar_ArchiveFileRead_HasDirectories() =>
|
||||
DoRar_ArchiveFileRead_HasDirectories("Rar.rar");
|
||||
@@ -366,9 +359,6 @@ public class RarArchiveTests : ArchiveTests
|
||||
[Fact]
|
||||
public void Rar2_ArchiveFileRead() => ArchiveFileRead("Rar2.rar");
|
||||
|
||||
[Fact]
|
||||
public Task Rar2_ArchiveFileRead_Multithreaded() => ArchiveFileRead_Multithreaded("Rar2.rar");
|
||||
|
||||
[Fact]
|
||||
public void Rar15_ArchiveFileRead()
|
||||
{
|
||||
|
||||
@@ -15,25 +15,29 @@ public class RarReaderAsyncTests : ReaderTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task Rar_Multi_Reader_Async() =>
|
||||
await DoRar_Multi_Reader_Async([
|
||||
"Rar.multi.part01.rar",
|
||||
"Rar.multi.part02.rar",
|
||||
"Rar.multi.part03.rar",
|
||||
"Rar.multi.part04.rar",
|
||||
"Rar.multi.part05.rar",
|
||||
"Rar.multi.part06.rar",
|
||||
]);
|
||||
await DoRar_Multi_Reader_Async(
|
||||
[
|
||||
"Rar.multi.part01.rar",
|
||||
"Rar.multi.part02.rar",
|
||||
"Rar.multi.part03.rar",
|
||||
"Rar.multi.part04.rar",
|
||||
"Rar.multi.part05.rar",
|
||||
"Rar.multi.part06.rar",
|
||||
]
|
||||
);
|
||||
|
||||
[Fact]
|
||||
public async Task Rar5_Multi_Reader_Async() =>
|
||||
await DoRar_Multi_Reader_Async([
|
||||
"Rar5.multi.part01.rar",
|
||||
"Rar5.multi.part02.rar",
|
||||
"Rar5.multi.part03.rar",
|
||||
"Rar5.multi.part04.rar",
|
||||
"Rar5.multi.part05.rar",
|
||||
"Rar5.multi.part06.rar",
|
||||
]);
|
||||
await DoRar_Multi_Reader_Async(
|
||||
[
|
||||
"Rar5.multi.part01.rar",
|
||||
"Rar5.multi.part02.rar",
|
||||
"Rar5.multi.part03.rar",
|
||||
"Rar5.multi.part04.rar",
|
||||
"Rar5.multi.part05.rar",
|
||||
"Rar5.multi.part06.rar",
|
||||
]
|
||||
);
|
||||
|
||||
private async Task DoRar_Multi_Reader_Async(string[] archives)
|
||||
{
|
||||
@@ -91,25 +95,29 @@ public class RarReaderAsyncTests : ReaderTests
|
||||
|
||||
[Fact]
|
||||
public async Task Rar_Multi_Reader_Delete_Files_Async() =>
|
||||
await DoRar_Multi_Reader_Delete_Files_Async([
|
||||
"Rar.multi.part01.rar",
|
||||
"Rar.multi.part02.rar",
|
||||
"Rar.multi.part03.rar",
|
||||
"Rar.multi.part04.rar",
|
||||
"Rar.multi.part05.rar",
|
||||
"Rar.multi.part06.rar",
|
||||
]);
|
||||
await DoRar_Multi_Reader_Delete_Files_Async(
|
||||
[
|
||||
"Rar.multi.part01.rar",
|
||||
"Rar.multi.part02.rar",
|
||||
"Rar.multi.part03.rar",
|
||||
"Rar.multi.part04.rar",
|
||||
"Rar.multi.part05.rar",
|
||||
"Rar.multi.part06.rar",
|
||||
]
|
||||
);
|
||||
|
||||
[Fact]
|
||||
public async Task Rar5_Multi_Reader_Delete_Files_Async() =>
|
||||
await DoRar_Multi_Reader_Delete_Files_Async([
|
||||
"Rar5.multi.part01.rar",
|
||||
"Rar5.multi.part02.rar",
|
||||
"Rar5.multi.part03.rar",
|
||||
"Rar5.multi.part04.rar",
|
||||
"Rar5.multi.part05.rar",
|
||||
"Rar5.multi.part06.rar",
|
||||
]);
|
||||
await DoRar_Multi_Reader_Delete_Files_Async(
|
||||
[
|
||||
"Rar5.multi.part01.rar",
|
||||
"Rar5.multi.part02.rar",
|
||||
"Rar5.multi.part03.rar",
|
||||
"Rar5.multi.part04.rar",
|
||||
"Rar5.multi.part05.rar",
|
||||
"Rar5.multi.part06.rar",
|
||||
]
|
||||
);
|
||||
|
||||
private async Task DoRar_Multi_Reader_Delete_Files_Async(string[] archives)
|
||||
{
|
||||
|
||||
@@ -14,25 +14,29 @@ public class RarReaderTests : ReaderTests
|
||||
{
|
||||
[Fact]
|
||||
public void Rar_Multi_Reader() =>
|
||||
DoRar_Multi_Reader([
|
||||
"Rar.multi.part01.rar",
|
||||
"Rar.multi.part02.rar",
|
||||
"Rar.multi.part03.rar",
|
||||
"Rar.multi.part04.rar",
|
||||
"Rar.multi.part05.rar",
|
||||
"Rar.multi.part06.rar",
|
||||
]);
|
||||
DoRar_Multi_Reader(
|
||||
[
|
||||
"Rar.multi.part01.rar",
|
||||
"Rar.multi.part02.rar",
|
||||
"Rar.multi.part03.rar",
|
||||
"Rar.multi.part04.rar",
|
||||
"Rar.multi.part05.rar",
|
||||
"Rar.multi.part06.rar",
|
||||
]
|
||||
);
|
||||
|
||||
[Fact]
|
||||
public void Rar5_Multi_Reader() =>
|
||||
DoRar_Multi_Reader([
|
||||
"Rar5.multi.part01.rar",
|
||||
"Rar5.multi.part02.rar",
|
||||
"Rar5.multi.part03.rar",
|
||||
"Rar5.multi.part04.rar",
|
||||
"Rar5.multi.part05.rar",
|
||||
"Rar5.multi.part06.rar",
|
||||
]);
|
||||
DoRar_Multi_Reader(
|
||||
[
|
||||
"Rar5.multi.part01.rar",
|
||||
"Rar5.multi.part02.rar",
|
||||
"Rar5.multi.part03.rar",
|
||||
"Rar5.multi.part04.rar",
|
||||
"Rar5.multi.part05.rar",
|
||||
"Rar5.multi.part06.rar",
|
||||
]
|
||||
);
|
||||
|
||||
private void DoRar_Multi_Reader(string[] archives)
|
||||
{
|
||||
@@ -57,14 +61,16 @@ public class RarReaderTests : ReaderTests
|
||||
|
||||
[Fact]
|
||||
public void Rar_Multi_Reader_Encrypted() =>
|
||||
DoRar_Multi_Reader_Encrypted([
|
||||
"Rar.EncryptedParts.part01.rar",
|
||||
"Rar.EncryptedParts.part02.rar",
|
||||
"Rar.EncryptedParts.part03.rar",
|
||||
"Rar.EncryptedParts.part04.rar",
|
||||
"Rar.EncryptedParts.part05.rar",
|
||||
"Rar.EncryptedParts.part06.rar",
|
||||
]);
|
||||
DoRar_Multi_Reader_Encrypted(
|
||||
[
|
||||
"Rar.EncryptedParts.part01.rar",
|
||||
"Rar.EncryptedParts.part02.rar",
|
||||
"Rar.EncryptedParts.part03.rar",
|
||||
"Rar.EncryptedParts.part04.rar",
|
||||
"Rar.EncryptedParts.part05.rar",
|
||||
"Rar.EncryptedParts.part06.rar",
|
||||
]
|
||||
);
|
||||
|
||||
private void DoRar_Multi_Reader_Encrypted(string[] archives) =>
|
||||
Assert.Throws<InvalidFormatException>(() =>
|
||||
@@ -91,25 +97,29 @@ public class RarReaderTests : ReaderTests
|
||||
|
||||
[Fact]
|
||||
public void Rar_Multi_Reader_Delete_Files() =>
|
||||
DoRar_Multi_Reader_Delete_Files([
|
||||
"Rar.multi.part01.rar",
|
||||
"Rar.multi.part02.rar",
|
||||
"Rar.multi.part03.rar",
|
||||
"Rar.multi.part04.rar",
|
||||
"Rar.multi.part05.rar",
|
||||
"Rar.multi.part06.rar",
|
||||
]);
|
||||
DoRar_Multi_Reader_Delete_Files(
|
||||
[
|
||||
"Rar.multi.part01.rar",
|
||||
"Rar.multi.part02.rar",
|
||||
"Rar.multi.part03.rar",
|
||||
"Rar.multi.part04.rar",
|
||||
"Rar.multi.part05.rar",
|
||||
"Rar.multi.part06.rar",
|
||||
]
|
||||
);
|
||||
|
||||
[Fact]
|
||||
public void Rar5_Multi_Reader_Delete_Files() =>
|
||||
DoRar_Multi_Reader_Delete_Files([
|
||||
"Rar5.multi.part01.rar",
|
||||
"Rar5.multi.part02.rar",
|
||||
"Rar5.multi.part03.rar",
|
||||
"Rar5.multi.part04.rar",
|
||||
"Rar5.multi.part05.rar",
|
||||
"Rar5.multi.part06.rar",
|
||||
]);
|
||||
DoRar_Multi_Reader_Delete_Files(
|
||||
[
|
||||
"Rar5.multi.part01.rar",
|
||||
"Rar5.multi.part02.rar",
|
||||
"Rar5.multi.part03.rar",
|
||||
"Rar5.multi.part04.rar",
|
||||
"Rar5.multi.part05.rar",
|
||||
"Rar5.multi.part06.rar",
|
||||
]
|
||||
);
|
||||
|
||||
private void DoRar_Multi_Reader_Delete_Files(string[] archives)
|
||||
{
|
||||
@@ -397,14 +407,16 @@ public class RarReaderTests : ReaderTests
|
||||
Path.Combine("exe", "test.exe"),
|
||||
}
|
||||
);
|
||||
using var reader = RarReader.Open([
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part01.rar"),
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part02.rar"),
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part03.rar"),
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part04.rar"),
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part05.rar"),
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part06.rar"),
|
||||
]);
|
||||
using var reader = RarReader.Open(
|
||||
[
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part01.rar"),
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part02.rar"),
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part03.rar"),
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part04.rar"),
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part05.rar"),
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part06.rar"),
|
||||
]
|
||||
);
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
Assert.Equal(expectedOrder.Pop(), reader.Entry.Key);
|
||||
|
||||
@@ -176,27 +176,6 @@ public abstract class ReaderTests : TestBase
|
||||
}
|
||||
}
|
||||
|
||||
protected void ReadForBufferBoundaryCheck(string fileName, CompressionType compressionType)
|
||||
{
|
||||
using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, fileName));
|
||||
using var reader = ReaderFactory.Open(stream, new ReaderOptions { LookForHeader = true });
|
||||
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
Assert.Equal(compressionType, reader.Entry.CompressionType);
|
||||
|
||||
reader.WriteEntryToDirectory(
|
||||
SCRATCH_FILES_PATH,
|
||||
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
|
||||
);
|
||||
}
|
||||
|
||||
CompareFilesByPath(
|
||||
Path.Combine(SCRATCH_FILES_PATH, "alice29.txt"),
|
||||
Path.Combine(MISC_TEST_FILES_PATH, "alice29.txt")
|
||||
);
|
||||
}
|
||||
|
||||
protected void Iterate(
|
||||
string testArchive,
|
||||
string fileOrder,
|
||||
|
||||
@@ -74,21 +74,9 @@ public class XzIndexAsyncTests : XzTestsBase
|
||||
public async Task SkipsPaddingAsync()
|
||||
{
|
||||
// Index with 3-byte padding.
|
||||
using Stream badStream = new MemoryStream([
|
||||
0x00,
|
||||
0x01,
|
||||
0x10,
|
||||
0x80,
|
||||
0x01,
|
||||
0x00,
|
||||
0x00,
|
||||
0x00,
|
||||
0xB1,
|
||||
0x01,
|
||||
0xD9,
|
||||
0xC9,
|
||||
0xFF,
|
||||
]);
|
||||
using Stream badStream = new MemoryStream(
|
||||
[0x00, 0x01, 0x10, 0x80, 0x01, 0x00, 0x00, 0x00, 0xB1, 0x01, 0xD9, 0xC9, 0xFF]
|
||||
);
|
||||
var br = new BinaryReader(badStream);
|
||||
var index = new XZIndex(br, false);
|
||||
await index.ProcessAsync().ConfigureAwait(false);
|
||||
|
||||
@@ -71,21 +71,9 @@ public class XzIndexTests : XzTestsBase
|
||||
public void SkipsPadding()
|
||||
{
|
||||
// Index with 3-byte padding.
|
||||
using Stream badStream = new MemoryStream([
|
||||
0x00,
|
||||
0x01,
|
||||
0x10,
|
||||
0x80,
|
||||
0x01,
|
||||
0x00,
|
||||
0x00,
|
||||
0x00,
|
||||
0xB1,
|
||||
0x01,
|
||||
0xD9,
|
||||
0xC9,
|
||||
0xFF,
|
||||
]);
|
||||
using Stream badStream = new MemoryStream(
|
||||
[0x00, 0x01, 0x10, 0x80, 0x01, 0x00, 0x00, 0x00, 0xB1, 0x01, 0xD9, 0xC9, 0xFF]
|
||||
);
|
||||
var br = new BinaryReader(badStream);
|
||||
var index = new XZIndex(br, false);
|
||||
index.Process();
|
||||
|
||||
@@ -1,441 +0,0 @@
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.IO;
|
||||
using SharpCompress.Archives;
|
||||
using SharpCompress.Archives.Zip;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Writers;
|
||||
using SharpCompress.Writers.Zip;
|
||||
using Xunit;
|
||||
|
||||
namespace SharpCompress.Test.Zip;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for verifying version consistency between Local File Header (LFH)
|
||||
/// and Central Directory File Header (CDFH) when using Zip64.
|
||||
/// </summary>
|
||||
public class Zip64VersionConsistencyTests : WriterTests
|
||||
{
|
||||
public Zip64VersionConsistencyTests()
|
||||
: base(ArchiveType.Zip) { }
|
||||
|
||||
[Fact]
|
||||
public void Zip64_Small_File_With_UseZip64_Should_Have_Matching_Versions()
|
||||
{
|
||||
// Create a zip with UseZip64=true but with a small file
|
||||
var filename = Path.Combine(SCRATCH2_FILES_PATH, "zip64_version_test.zip");
|
||||
|
||||
if (File.Exists(filename))
|
||||
{
|
||||
File.Delete(filename);
|
||||
}
|
||||
|
||||
// Create archive with UseZip64=true
|
||||
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.Deflate)
|
||||
{
|
||||
LeaveStreamOpen = false,
|
||||
UseZip64 = true,
|
||||
};
|
||||
|
||||
ZipArchive zipArchive = ZipArchive.Create();
|
||||
zipArchive.AddEntry("empty", new MemoryStream());
|
||||
zipArchive.SaveTo(filename, writerOptions);
|
||||
|
||||
// Now read the raw bytes to verify version consistency
|
||||
using var fs = File.OpenRead(filename);
|
||||
using var br = new BinaryReader(fs);
|
||||
|
||||
// Read Local File Header
|
||||
var lfhSignature = br.ReadUInt32();
|
||||
Assert.Equal(0x04034b50u, lfhSignature); // Local file header signature
|
||||
|
||||
var lfhVersion = br.ReadUInt16();
|
||||
|
||||
// Skip to Central Directory
|
||||
// Find Central Directory by searching from the end
|
||||
fs.Seek(-22, SeekOrigin.End); // Min EOCD size
|
||||
var eocdSignature = br.ReadUInt32();
|
||||
|
||||
if (eocdSignature != 0x06054b50u)
|
||||
{
|
||||
// Might have Zip64 EOCD, search backwards
|
||||
fs.Seek(-100, SeekOrigin.End);
|
||||
var buffer = new byte[100];
|
||||
fs.Read(buffer, 0, 100);
|
||||
|
||||
// Find EOCD signature
|
||||
for (int i = buffer.Length - 4; i >= 0; i--)
|
||||
{
|
||||
if (BinaryPrimitives.ReadUInt32LittleEndian(buffer.AsSpan(i)) == 0x06054b50u)
|
||||
{
|
||||
fs.Seek(-100 + i, SeekOrigin.End);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Read EOCD
|
||||
fs.Seek(-22, SeekOrigin.End);
|
||||
br.ReadUInt32(); // EOCD signature
|
||||
br.ReadUInt16(); // disk number
|
||||
br.ReadUInt16(); // disk with central dir
|
||||
br.ReadUInt16(); // entries on this disk
|
||||
br.ReadUInt16(); // total entries
|
||||
br.ReadUInt32(); // central directory size (unused)
|
||||
var cdOffset = br.ReadUInt32();
|
||||
|
||||
// If Zip64, need to read from Zip64 EOCD
|
||||
if (cdOffset == 0xFFFFFFFF)
|
||||
{
|
||||
// Find Zip64 EOCD Locator
|
||||
fs.Seek(-22 - 20, SeekOrigin.End);
|
||||
var z64eocdlSig = br.ReadUInt32();
|
||||
if (z64eocdlSig == 0x07064b50u)
|
||||
{
|
||||
br.ReadUInt32(); // disk number
|
||||
var z64eocdOffset = br.ReadUInt64();
|
||||
br.ReadUInt32(); // total disks
|
||||
|
||||
// Read Zip64 EOCD
|
||||
fs.Seek((long)z64eocdOffset, SeekOrigin.Begin);
|
||||
br.ReadUInt32(); // signature
|
||||
br.ReadUInt64(); // size of EOCD64
|
||||
br.ReadUInt16(); // version made by
|
||||
br.ReadUInt16(); // version needed
|
||||
br.ReadUInt32(); // disk number
|
||||
br.ReadUInt32(); // disk with CD
|
||||
br.ReadUInt64(); // entries on disk
|
||||
br.ReadUInt64(); // total entries
|
||||
br.ReadUInt64(); // CD size
|
||||
cdOffset = (uint)br.ReadUInt64(); // CD offset
|
||||
}
|
||||
}
|
||||
|
||||
// Read Central Directory Header
|
||||
fs.Seek(cdOffset, SeekOrigin.Begin);
|
||||
var cdhSignature = br.ReadUInt32();
|
||||
Assert.Equal(0x02014b50u, cdhSignature); // Central directory header signature
|
||||
|
||||
br.ReadUInt16(); // version made by
|
||||
var cdhVersionNeeded = br.ReadUInt16();
|
||||
|
||||
// The versions should match when UseZip64 is true
|
||||
Assert.Equal(lfhVersion, cdhVersionNeeded);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Zip64_Small_File_Without_UseZip64_Should_Have_Version_20()
|
||||
{
|
||||
// Create a zip without UseZip64
|
||||
var filename = Path.Combine(SCRATCH2_FILES_PATH, "no_zip64_version_test.zip");
|
||||
|
||||
if (File.Exists(filename))
|
||||
{
|
||||
File.Delete(filename);
|
||||
}
|
||||
|
||||
// Create archive without UseZip64
|
||||
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.Deflate)
|
||||
{
|
||||
LeaveStreamOpen = false,
|
||||
UseZip64 = false,
|
||||
};
|
||||
|
||||
ZipArchive zipArchive = ZipArchive.Create();
|
||||
zipArchive.AddEntry("empty", new MemoryStream());
|
||||
zipArchive.SaveTo(filename, writerOptions);
|
||||
|
||||
// Read the raw bytes
|
||||
using var fs = File.OpenRead(filename);
|
||||
using var br = new BinaryReader(fs);
|
||||
|
||||
// Read Local File Header version
|
||||
var lfhSignature = br.ReadUInt32();
|
||||
Assert.Equal(0x04034b50u, lfhSignature);
|
||||
var lfhVersion = br.ReadUInt16();
|
||||
|
||||
// Read Central Directory Header version
|
||||
fs.Seek(-22, SeekOrigin.End);
|
||||
br.ReadUInt32(); // EOCD signature
|
||||
br.ReadUInt16(); // disk number
|
||||
br.ReadUInt16(); // disk with central dir
|
||||
br.ReadUInt16(); // entries on this disk
|
||||
br.ReadUInt16(); // total entries
|
||||
br.ReadUInt32(); // CD size
|
||||
var cdOffset = br.ReadUInt32();
|
||||
|
||||
fs.Seek(cdOffset, SeekOrigin.Begin);
|
||||
var cdhSignature = br.ReadUInt32();
|
||||
Assert.Equal(0x02014b50u, cdhSignature);
|
||||
br.ReadUInt16(); // version made by
|
||||
var cdhVersionNeeded = br.ReadUInt16();
|
||||
|
||||
// Both should be version 20 (or less)
|
||||
Assert.True(lfhVersion <= 20);
|
||||
Assert.Equal(lfhVersion, cdhVersionNeeded);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void LZMA_Compression_Should_Use_Version_63()
|
||||
{
|
||||
// Create a zip with LZMA compression
|
||||
var filename = Path.Combine(SCRATCH2_FILES_PATH, "lzma_version_test.zip");
|
||||
|
||||
if (File.Exists(filename))
|
||||
{
|
||||
File.Delete(filename);
|
||||
}
|
||||
|
||||
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.LZMA)
|
||||
{
|
||||
LeaveStreamOpen = false,
|
||||
UseZip64 = false,
|
||||
};
|
||||
|
||||
ZipArchive zipArchive = ZipArchive.Create();
|
||||
var data = new byte[100];
|
||||
new Random(42).NextBytes(data);
|
||||
zipArchive.AddEntry("test.bin", new MemoryStream(data));
|
||||
zipArchive.SaveTo(filename, writerOptions);
|
||||
|
||||
// Read the raw bytes
|
||||
using var fs = File.OpenRead(filename);
|
||||
using var br = new BinaryReader(fs);
|
||||
|
||||
// Read Local File Header version
|
||||
var lfhSignature = br.ReadUInt32();
|
||||
Assert.Equal(0x04034b50u, lfhSignature);
|
||||
var lfhVersion = br.ReadUInt16();
|
||||
|
||||
// Read Central Directory Header version
|
||||
fs.Seek(-22, SeekOrigin.End);
|
||||
br.ReadUInt32(); // EOCD signature
|
||||
br.ReadUInt16(); // disk number
|
||||
br.ReadUInt16(); // disk with central dir
|
||||
br.ReadUInt16(); // entries on this disk
|
||||
br.ReadUInt16(); // total entries
|
||||
br.ReadUInt32(); // CD size
|
||||
var cdOffset = br.ReadUInt32();
|
||||
|
||||
fs.Seek(cdOffset, SeekOrigin.Begin);
|
||||
var cdhSignature = br.ReadUInt32();
|
||||
Assert.Equal(0x02014b50u, cdhSignature);
|
||||
br.ReadUInt16(); // version made by
|
||||
var cdhVersionNeeded = br.ReadUInt16();
|
||||
|
||||
// Both should be version 63 for LZMA
|
||||
Assert.Equal(63, lfhVersion);
|
||||
Assert.Equal(lfhVersion, cdhVersionNeeded);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void PPMd_Compression_Should_Use_Version_63()
|
||||
{
|
||||
// Create a zip with PPMd compression
|
||||
var filename = Path.Combine(SCRATCH2_FILES_PATH, "ppmd_version_test.zip");
|
||||
|
||||
if (File.Exists(filename))
|
||||
{
|
||||
File.Delete(filename);
|
||||
}
|
||||
|
||||
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.PPMd)
|
||||
{
|
||||
LeaveStreamOpen = false,
|
||||
UseZip64 = false,
|
||||
};
|
||||
|
||||
ZipArchive zipArchive = ZipArchive.Create();
|
||||
var data = new byte[100];
|
||||
new Random(42).NextBytes(data);
|
||||
zipArchive.AddEntry("test.bin", new MemoryStream(data));
|
||||
zipArchive.SaveTo(filename, writerOptions);
|
||||
|
||||
// Read the raw bytes
|
||||
using var fs = File.OpenRead(filename);
|
||||
using var br = new BinaryReader(fs);
|
||||
|
||||
// Read Local File Header version
|
||||
var lfhSignature = br.ReadUInt32();
|
||||
Assert.Equal(0x04034b50u, lfhSignature);
|
||||
var lfhVersion = br.ReadUInt16();
|
||||
|
||||
// Read Central Directory Header version
|
||||
fs.Seek(-22, SeekOrigin.End);
|
||||
br.ReadUInt32(); // EOCD signature
|
||||
br.ReadUInt16(); // disk number
|
||||
br.ReadUInt16(); // disk with central dir
|
||||
br.ReadUInt16(); // entries on this disk
|
||||
br.ReadUInt16(); // total entries
|
||||
br.ReadUInt32(); // CD size
|
||||
var cdOffset = br.ReadUInt32();
|
||||
|
||||
fs.Seek(cdOffset, SeekOrigin.Begin);
|
||||
var cdhSignature = br.ReadUInt32();
|
||||
Assert.Equal(0x02014b50u, cdhSignature);
|
||||
br.ReadUInt16(); // version made by
|
||||
var cdhVersionNeeded = br.ReadUInt16();
|
||||
|
||||
// Both should be version 63 for PPMd
|
||||
Assert.Equal(63, lfhVersion);
|
||||
Assert.Equal(lfhVersion, cdhVersionNeeded);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Zip64_Multiple_Small_Files_With_UseZip64_Should_Have_Matching_Versions()
|
||||
{
|
||||
// Create a zip with UseZip64=true but with multiple small files
|
||||
var filename = Path.Combine(SCRATCH2_FILES_PATH, "zip64_version_multiple_test.zip");
|
||||
|
||||
if (File.Exists(filename))
|
||||
{
|
||||
File.Delete(filename);
|
||||
}
|
||||
|
||||
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.Deflate)
|
||||
{
|
||||
LeaveStreamOpen = false,
|
||||
UseZip64 = true,
|
||||
};
|
||||
|
||||
ZipArchive zipArchive = ZipArchive.Create();
|
||||
for (int i = 0; i < 5; i++)
|
||||
{
|
||||
var data = new byte[100];
|
||||
new Random(i).NextBytes(data);
|
||||
zipArchive.AddEntry($"file{i}.bin", new MemoryStream(data));
|
||||
}
|
||||
zipArchive.SaveTo(filename, writerOptions);
|
||||
|
||||
// Verify that all entries have matching versions
|
||||
using var fs = File.OpenRead(filename);
|
||||
using var br = new BinaryReader(fs);
|
||||
|
||||
// Read all LFH versions
|
||||
var lfhVersions = new System.Collections.Generic.List<ushort>();
|
||||
while (true)
|
||||
{
|
||||
var sig = br.ReadUInt32();
|
||||
if (sig == 0x04034b50u) // LFH signature
|
||||
{
|
||||
var version = br.ReadUInt16();
|
||||
lfhVersions.Add(version);
|
||||
|
||||
// Skip rest of LFH
|
||||
br.ReadUInt16(); // flags
|
||||
br.ReadUInt16(); // compression
|
||||
br.ReadUInt32(); // mod time
|
||||
br.ReadUInt32(); // crc
|
||||
br.ReadUInt32(); // compressed size
|
||||
br.ReadUInt32(); // uncompressed size
|
||||
var fnLen = br.ReadUInt16();
|
||||
var extraLen = br.ReadUInt16();
|
||||
fs.Seek(fnLen + extraLen, SeekOrigin.Current);
|
||||
|
||||
// Skip compressed data by reading compressed size from extra field if zip64
|
||||
// For simplicity in this test, we'll just find the next signature
|
||||
var found = false;
|
||||
|
||||
while (fs.Position < fs.Length - 4)
|
||||
{
|
||||
var b = br.ReadByte();
|
||||
if (b == 0x50)
|
||||
{
|
||||
var nextBytes = br.ReadBytes(3);
|
||||
if (
|
||||
(nextBytes[0] == 0x4b && nextBytes[1] == 0x03 && nextBytes[2] == 0x04)
|
||||
|| // LFH
|
||||
(nextBytes[0] == 0x4b && nextBytes[1] == 0x01 && nextBytes[2] == 0x02)
|
||||
) // CDH
|
||||
{
|
||||
fs.Seek(-4, SeekOrigin.Current);
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!found)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
else if (sig == 0x02014b50u) // CDH signature
|
||||
{
|
||||
break; // Reached central directory
|
||||
}
|
||||
else
|
||||
{
|
||||
break; // Unknown signature
|
||||
}
|
||||
}
|
||||
|
||||
// Find Central Directory
|
||||
fs.Seek(-22, SeekOrigin.End);
|
||||
br.ReadUInt32(); // EOCD signature
|
||||
br.ReadUInt16(); // disk number
|
||||
br.ReadUInt16(); // disk with central dir
|
||||
br.ReadUInt16(); // entries on this disk
|
||||
var totalEntries = br.ReadUInt16();
|
||||
br.ReadUInt32(); // CD size
|
||||
var cdOffset = br.ReadUInt32();
|
||||
|
||||
// Check if we need Zip64 EOCD
|
||||
if (cdOffset == 0xFFFFFFFF)
|
||||
{
|
||||
fs.Seek(-22 - 20, SeekOrigin.End);
|
||||
var z64eocdlSig = br.ReadUInt32();
|
||||
if (z64eocdlSig == 0x07064b50u)
|
||||
{
|
||||
br.ReadUInt32(); // disk number
|
||||
var z64eocdOffset = br.ReadUInt64();
|
||||
fs.Seek((long)z64eocdOffset, SeekOrigin.Begin);
|
||||
br.ReadUInt32(); // signature
|
||||
br.ReadUInt64(); // size
|
||||
br.ReadUInt16(); // version made by
|
||||
br.ReadUInt16(); // version needed
|
||||
br.ReadUInt32(); // disk number
|
||||
br.ReadUInt32(); // disk with CD
|
||||
br.ReadUInt64(); // entries on disk
|
||||
totalEntries = (ushort)br.ReadUInt64(); // total entries
|
||||
br.ReadUInt64(); // CD size
|
||||
cdOffset = (uint)br.ReadUInt64(); // CD offset
|
||||
}
|
||||
}
|
||||
|
||||
// Read CDH versions
|
||||
fs.Seek(cdOffset, SeekOrigin.Begin);
|
||||
var cdhVersions = new System.Collections.Generic.List<ushort>();
|
||||
for (int i = 0; i < totalEntries; i++)
|
||||
{
|
||||
var sig = br.ReadUInt32();
|
||||
Assert.Equal(0x02014b50u, sig);
|
||||
br.ReadUInt16(); // version made by
|
||||
var version = br.ReadUInt16();
|
||||
cdhVersions.Add(version);
|
||||
|
||||
// Skip rest of CDH
|
||||
br.ReadUInt16(); // flags
|
||||
br.ReadUInt16(); // compression
|
||||
br.ReadUInt32(); // mod time
|
||||
br.ReadUInt32(); // crc
|
||||
br.ReadUInt32(); // compressed size
|
||||
br.ReadUInt32(); // uncompressed size
|
||||
var fnLen = br.ReadUInt16();
|
||||
var extraLen = br.ReadUInt16();
|
||||
var commentLen = br.ReadUInt16();
|
||||
br.ReadUInt16(); // disk number start
|
||||
br.ReadUInt16(); // internal attributes
|
||||
br.ReadUInt32(); // external attributes
|
||||
br.ReadUInt32(); // LFH offset
|
||||
fs.Seek(fnLen + extraLen + commentLen, SeekOrigin.Current);
|
||||
}
|
||||
|
||||
// Verify all versions match
|
||||
Assert.Equal(lfhVersions.Count, cdhVersions.Count);
|
||||
for (int i = 0; i < lfhVersions.Count; i++)
|
||||
{
|
||||
Assert.Equal(lfhVersions[i], cdhVersions[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,7 +2,6 @@ using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Archives;
|
||||
using SharpCompress.Archives.Zip;
|
||||
using SharpCompress.Common;
|
||||
@@ -89,10 +88,6 @@ public class ZipArchiveTests : ArchiveTests
|
||||
[Fact]
|
||||
public void Zip_Deflate_ArchiveFileRead() => ArchiveFileRead("Zip.deflate.zip");
|
||||
|
||||
[Fact]
|
||||
public Task Zip_Deflate_ArchiveFileRead_Multithreaded() =>
|
||||
ArchiveFileRead_Multithreaded("Zip.deflate.zip");
|
||||
|
||||
[Fact]
|
||||
public void Zip_Deflate_ArchiveExtractToDirectory() =>
|
||||
ArchiveExtractToDirectory("Zip.deflate.zip");
|
||||
|
||||
@@ -4,9 +4,9 @@
|
||||
".NETFramework,Version=v4.8": {
|
||||
"AwesomeAssertions": {
|
||||
"type": "Direct",
|
||||
"requested": "[9.3.0, )",
|
||||
"resolved": "9.3.0",
|
||||
"contentHash": "8lGLYap2ec2gNLgjf2xKZaKLpQ7j36oJvrYzBVVpNAumqnxRdevqqhEF66qxE92f8y2+zsbQ061DeHG61ZhzaQ==",
|
||||
"requested": "[9.2.1, )",
|
||||
"resolved": "9.2.1",
|
||||
"contentHash": "lbwhyQNXxxEGx4oCbFqNfFy2DsywsvNhN6qoOjY4wwhMgI2L9+YrxjyF/M0io99yrvWV1Cjj12LP2QGcC43Uhw==",
|
||||
"dependencies": {
|
||||
"System.Threading.Tasks.Extensions": "4.5.4"
|
||||
}
|
||||
@@ -199,9 +199,9 @@
|
||||
"net8.0": {
|
||||
"AwesomeAssertions": {
|
||||
"type": "Direct",
|
||||
"requested": "[9.3.0, )",
|
||||
"resolved": "9.3.0",
|
||||
"contentHash": "8lGLYap2ec2gNLgjf2xKZaKLpQ7j36oJvrYzBVVpNAumqnxRdevqqhEF66qxE92f8y2+zsbQ061DeHG61ZhzaQ=="
|
||||
"requested": "[9.2.1, )",
|
||||
"resolved": "9.2.1",
|
||||
"contentHash": "lbwhyQNXxxEGx4oCbFqNfFy2DsywsvNhN6qoOjY4wwhMgI2L9+YrxjyF/M0io99yrvWV1Cjj12LP2QGcC43Uhw=="
|
||||
},
|
||||
"Microsoft.NET.Test.Sdk": {
|
||||
"type": "Direct",
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user