mirror of
https://github.com/adamhathcock/sharpcompress.git
synced 2026-02-04 05:25:00 +00:00
Compare commits
505 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4aa98aa3e2 | ||
|
|
41c4263a66 | ||
|
|
2215d60394 | ||
|
|
a8c33ab729 | ||
|
|
eede5aa9cc | ||
|
|
30e036f9ec | ||
|
|
095c871174 | ||
|
|
6d73c5b295 | ||
|
|
cc4d28193c | ||
|
|
9433e06b93 | ||
|
|
a92aaa51d5 | ||
|
|
d41908adeb | ||
|
|
81ca15b567 | ||
|
|
b81d0fd730 | ||
|
|
3a1bb187e8 | ||
|
|
3fee14a070 | ||
|
|
5bf789ac65 | ||
|
|
be06049db3 | ||
|
|
a0435f6a60 | ||
|
|
2321e2c90b | ||
|
|
97e98d8629 | ||
|
|
d96e7362d2 | ||
|
|
7dd46fe5ed | ||
|
|
04c044cb2b | ||
|
|
cc10a12fbc | ||
|
|
8b0a1c699f | ||
|
|
15ca7c9807 | ||
|
|
2b4da7e39b | ||
|
|
31f81f38af | ||
|
|
72cf77b7c7 | ||
|
|
0fe48c647e | ||
|
|
7b06652bff | ||
|
|
434ce05416 | ||
|
|
0698031ed4 | ||
|
|
51237a34eb | ||
|
|
b8264a8131 | ||
|
|
cad923018e | ||
|
|
db94b49941 | ||
|
|
72d15d9cbf | ||
|
|
e0186eadc0 | ||
|
|
4cfa5b04af | ||
|
|
f2c54b1f8b | ||
|
|
d7d0bc6582 | ||
|
|
dd9dc2500b | ||
|
|
4efb109da8 | ||
|
|
bcf9a6bdf1 | ||
|
|
e3a25ecdc0 | ||
|
|
783521928d | ||
|
|
9a876abd31 | ||
|
|
97f58b412e | ||
|
|
4c61628078 | ||
|
|
99a8b0f750 | ||
|
|
a9017d7c25 | ||
|
|
d9e4b26648 | ||
|
|
0d03bafe49 | ||
|
|
fee15a31f9 | ||
|
|
997d3910d4 | ||
|
|
a3918cc0d7 | ||
|
|
f056986b07 | ||
|
|
59c1f02f98 | ||
|
|
3a71a2b1f8 | ||
|
|
2ef1215b49 | ||
|
|
130ac83076 | ||
|
|
dd606a0702 | ||
|
|
84cd772f50 | ||
|
|
fa1d7af22f | ||
|
|
a771ba3bc0 | ||
|
|
8b612c658d | ||
|
|
7dd0da5fd7 | ||
|
|
f7b3525c4e | ||
|
|
de83bdae48 | ||
|
|
d90b610767 | ||
|
|
2d41de6b72 | ||
|
|
f391c3caf3 | ||
|
|
9bdf150676 | ||
|
|
0c199609eb | ||
|
|
6eff9d3753 | ||
|
|
7ab16457c7 | ||
|
|
e7ad8132b5 | ||
|
|
da87e45534 | ||
|
|
2ffaef5563 | ||
|
|
55cb350d2c | ||
|
|
7fa271a1b4 | ||
|
|
c53ca372f2 | ||
|
|
75bc8501f4 | ||
|
|
1e22b47fe1 | ||
|
|
74e2dca207 | ||
|
|
a669de24b7 | ||
|
|
e1e9c449e9 | ||
|
|
60e1dc0239 | ||
|
|
10eb94fd82 | ||
|
|
ccc8587e5f | ||
|
|
53c96193c1 | ||
|
|
d4f11e00b1 | ||
|
|
321233b82c | ||
|
|
eb188051d4 | ||
|
|
a136084e11 | ||
|
|
bc06f3179d | ||
|
|
ee84d971b2 | ||
|
|
264d80ef4c | ||
|
|
dba68187ac | ||
|
|
ca4a1936b3 | ||
|
|
77c8d31a90 | ||
|
|
ab7196f86c | ||
|
|
88b3a66bf9 | ||
|
|
ea77666b4a | ||
|
|
db98e5f39b | ||
|
|
df59c5cb9d | ||
|
|
e786e95358 | ||
|
|
75ada5623c | ||
|
|
ad5c655c45 | ||
|
|
65e607454e | ||
|
|
f238be6003 | ||
|
|
dc31e4c5fa | ||
|
|
665d8cd266 | ||
|
|
8324114e84 | ||
|
|
b83e6ee4ce | ||
|
|
58bab0d310 | ||
|
|
1af51aaaba | ||
|
|
a09327b831 | ||
|
|
16543bf74c | ||
|
|
aa4cd373ac | ||
|
|
351e294362 | ||
|
|
a0c5b1cd9d | ||
|
|
df2ed1e584 | ||
|
|
b354f7a3a5 | ||
|
|
bb53d1e1c6 | ||
|
|
2aabd8d0e1 | ||
|
|
aca97c2c6c | ||
|
|
8e7d959cf4 | ||
|
|
b23f031db9 | ||
|
|
1ba529a9d5 | ||
|
|
3d29c183ef | ||
|
|
8a108b590d | ||
|
|
bca0f67344 | ||
|
|
f3dad51134 | ||
|
|
f51840829c | ||
|
|
aa1c0d0870 | ||
|
|
dee5ee6589 | ||
|
|
b799f479c4 | ||
|
|
b4352fefa5 | ||
|
|
77d06fb60e | ||
|
|
00b647457c | ||
|
|
153d10a35c | ||
|
|
06713c641e | ||
|
|
210978ec2d | ||
|
|
42f7d43139 | ||
|
|
19967f5ad7 | ||
|
|
a1de3eb47d | ||
|
|
e88841bdec | ||
|
|
c8e4915f8e | ||
|
|
a93a3f0598 | ||
|
|
084f81fc8d | ||
|
|
d148f36e87 | ||
|
|
150d9c35b7 | ||
|
|
e11198616e | ||
|
|
2f27f1e6f9 | ||
|
|
5392ca9794 | ||
|
|
46672eb583 | ||
|
|
79653eee80 | ||
|
|
16ad86c52a | ||
|
|
6b7c6be5f5 | ||
|
|
fda1c2cc79 | ||
|
|
ef2fee0ee3 | ||
|
|
e287d0811d | ||
|
|
a7164f3c9f | ||
|
|
c55060039a | ||
|
|
c68d8deddd | ||
|
|
f6eabc5db1 | ||
|
|
72d5884db6 | ||
|
|
3595c89c79 | ||
|
|
9ebbc718c5 | ||
|
|
e862480b86 | ||
|
|
1f3d8fe6f1 | ||
|
|
41ae036ab4 | ||
|
|
588d176b96 | ||
|
|
f8697120a0 | ||
|
|
1a767105e6 | ||
|
|
4067b6ed2c | ||
|
|
b272dbfd1f | ||
|
|
48be7bbf86 | ||
|
|
51e22cea71 | ||
|
|
2241e27e68 | ||
|
|
11c90ae879 | ||
|
|
cf55125202 | ||
|
|
9cefb85905 | ||
|
|
fc672da0e0 | ||
|
|
25b297b142 | ||
|
|
ab03c12fa8 | ||
|
|
3095c805ad | ||
|
|
9c18daafb8 | ||
|
|
16182417fb | ||
|
|
9af35201e4 | ||
|
|
f21b982955 | ||
|
|
b3a20d05c5 | ||
|
|
4cd024a2b2 | ||
|
|
63d08ebfd2 | ||
|
|
c696197b03 | ||
|
|
738a72228b | ||
|
|
90641f4488 | ||
|
|
a4cc7eaf9b | ||
|
|
fdca728fdc | ||
|
|
d2c4ae8cdf | ||
|
|
f3d3ac30a6 | ||
|
|
f8cc4ade8a | ||
|
|
b3975b7bbd | ||
|
|
4f1b61f5bc | ||
|
|
beeb37b4fd | ||
|
|
43aa2bad22 | ||
|
|
1b2ba921bb | ||
|
|
f543da0ea8 | ||
|
|
e60c9efa84 | ||
|
|
c52fc6f240 | ||
|
|
ee136b024a | ||
|
|
699bc5f34b | ||
|
|
9eed8e842c | ||
|
|
6d652a12ee | ||
|
|
e043e06656 | ||
|
|
14b52599f4 | ||
|
|
e3e2c0c567 | ||
|
|
4fc5d60f03 | ||
|
|
c37a9e0f82 | ||
|
|
fed17ebb96 | ||
|
|
eeac678872 | ||
|
|
f9ed0f2df9 | ||
|
|
0ddbacac85 | ||
|
|
f0d28aa5cf | ||
|
|
cc84f6fee4 | ||
|
|
00e6eef369 | ||
|
|
1ae71907bc | ||
|
|
3ff688fba2 | ||
|
|
bb59b3d456 | ||
|
|
186ea74ada | ||
|
|
c108f2dcf3 | ||
|
|
4cca232d83 | ||
|
|
1db511e9cb | ||
|
|
76afa7d3bf | ||
|
|
3513f7b1cd | ||
|
|
4531fe39e6 | ||
|
|
8d276a85bc | ||
|
|
5f0d042bc3 | ||
|
|
408f07e3c4 | ||
|
|
d1a540c90c | ||
|
|
00df8e930e | ||
|
|
3b768b1b77 | ||
|
|
42a7ececa0 | ||
|
|
e8867de049 | ||
|
|
a1dfa3dfa3 | ||
|
|
83917d4f79 | ||
|
|
513cd4f905 | ||
|
|
eda0309df3 | ||
|
|
74e27c028e | ||
|
|
36c06c4089 | ||
|
|
249b8a9cdd | ||
|
|
62bee15f00 | ||
|
|
d8797b69e4 | ||
|
|
084fe72b02 | ||
|
|
c823acaa3f | ||
|
|
e0d6cd9cb7 | ||
|
|
01021e102b | ||
|
|
6de738ff17 | ||
|
|
c0612547eb | ||
|
|
e960907698 | ||
|
|
84e03b1b27 | ||
|
|
f1a80da34b | ||
|
|
5a5a55e556 | ||
|
|
e1f132b45b | ||
|
|
087011aede | ||
|
|
1430bf9b31 | ||
|
|
4e5de817ef | ||
|
|
5d6b94f8c3 | ||
|
|
8dfbe56f42 | ||
|
|
df79d983d7 | ||
|
|
6c23a28826 | ||
|
|
f72289570a | ||
|
|
51bc9dc20e | ||
|
|
e45ac6bfa9 | ||
|
|
44d1cbdb0c | ||
|
|
3fa7e854d3 | ||
|
|
0f5c080be6 | ||
|
|
871009ef8d | ||
|
|
89a565c6c4 | ||
|
|
6ed60e4d5f | ||
|
|
4dab1df3ae | ||
|
|
7b7aa2cdf0 | ||
|
|
d1e1a65f32 | ||
|
|
9d332b4ac5 | ||
|
|
fc2462e281 | ||
|
|
fb2cddabf0 | ||
|
|
33481a9465 | ||
|
|
45de87cb97 | ||
|
|
553c533ada | ||
|
|
634e562b93 | ||
|
|
c789ead590 | ||
|
|
d23560a441 | ||
|
|
9f306966db | ||
|
|
8eea2cef97 | ||
|
|
3abbb89c2e | ||
|
|
76de7d54c7 | ||
|
|
35aee6e066 | ||
|
|
8a0152fe7c | ||
|
|
7769435fc8 | ||
|
|
ae311ea66e | ||
|
|
d15816f162 | ||
|
|
5cbb31c559 | ||
|
|
b7f5b36f2b | ||
|
|
e9dd413de6 | ||
|
|
c5de3d8cc1 | ||
|
|
624c385e27 | ||
|
|
893890c985 | ||
|
|
e12efc8b52 | ||
|
|
015dfa41b1 | ||
|
|
5e72ce5fbe | ||
|
|
236e653176 | ||
|
|
3946c3ba03 | ||
|
|
5bdd39e6eb | ||
|
|
46267c0531 | ||
|
|
7b96eb7704 | ||
|
|
60d5955101 | ||
|
|
587675e488 | ||
|
|
c0ae319c63 | ||
|
|
d810f8d8db | ||
|
|
a88390a546 | ||
|
|
8575e5615d | ||
|
|
5fe9516c09 | ||
|
|
938775789d | ||
|
|
f37bebe51f | ||
|
|
21f14cd3f2 | ||
|
|
4e7baeb2c9 | ||
|
|
d78a682dd8 | ||
|
|
44021b7abc | ||
|
|
7f9e543213 | ||
|
|
c489e5a59b | ||
|
|
8de30db7f9 | ||
|
|
415f0c6774 | ||
|
|
aa9cf195a5 | ||
|
|
6412fc8135 | ||
|
|
8b1ba9a00c | ||
|
|
b02584ef9e | ||
|
|
88cd6bfd1a | ||
|
|
89a3da14c9 | ||
|
|
619d987492 | ||
|
|
744e410a1a | ||
|
|
6e51967993 | ||
|
|
7989ab2e28 | ||
|
|
a3570a568d | ||
|
|
c0cd998836 | ||
|
|
1a452acd1c | ||
|
|
6c54083b08 | ||
|
|
76105ebdaf | ||
|
|
2c452201fa | ||
|
|
9fc7fc73f9 | ||
|
|
e7417e35ba | ||
|
|
19eb4c7cba | ||
|
|
1f39a0c9da | ||
|
|
a480a8893c | ||
|
|
d3a9e341a5 | ||
|
|
95caffe607 | ||
|
|
fdeca61284 | ||
|
|
45dc653191 | ||
|
|
6e48302b7b | ||
|
|
a7918d7b11 | ||
|
|
ec21253af9 | ||
|
|
97cdda8663 | ||
|
|
35ac2b9d71 | ||
|
|
14affd8ffa | ||
|
|
c48409c903 | ||
|
|
227f66f299 | ||
|
|
c2d9bf94d1 | ||
|
|
8f03841161 | ||
|
|
344a1ed912 | ||
|
|
ff71993b31 | ||
|
|
18bb773b2c | ||
|
|
7f905c7940 | ||
|
|
8a4ba6fc56 | ||
|
|
e0b275c01c | ||
|
|
5926db85df | ||
|
|
a4715a10e7 | ||
|
|
de0f5c0fcb | ||
|
|
88d85ce6ac | ||
|
|
131c171d3e | ||
|
|
c5ddef6ef7 | ||
|
|
f36486d006 | ||
|
|
eaf466c5c3 | ||
|
|
b41bcc349e | ||
|
|
825c61bdcd | ||
|
|
f13f49bd71 | ||
|
|
88f5d4544b | ||
|
|
b7432a20f0 | ||
|
|
ab57c85e66 | ||
|
|
c7c41bc0d8 | ||
|
|
c66f9b06a4 | ||
|
|
a3ac7e7ca7 | ||
|
|
187b762f8a | ||
|
|
3a7fbdfa52 | ||
|
|
bbeb46b37f | ||
|
|
5450b9a700 | ||
|
|
353b28647c | ||
|
|
2411f4f870 | ||
|
|
34cd059423 | ||
|
|
4b4ec12c87 | ||
|
|
441147c0dc | ||
|
|
08ceac9f46 | ||
|
|
6bc690b50b | ||
|
|
c41888b338 | ||
|
|
6d3c980b39 | ||
|
|
eb5db176fa | ||
|
|
a77c03fcaf | ||
|
|
04b25011e9 | ||
|
|
96b34aec10 | ||
|
|
d560b46c85 | ||
|
|
94789ce455 | ||
|
|
55797e5873 | ||
|
|
675cab3074 | ||
|
|
8d63ab646e | ||
|
|
37a2fa1cdc | ||
|
|
79ed9650b3 | ||
|
|
b6dc58164e | ||
|
|
f9a974c1fe | ||
|
|
91e672befb | ||
|
|
c14d18b9df | ||
|
|
d2cfc1844c | ||
|
|
2fb3243a1a | ||
|
|
6f3124a84f | ||
|
|
59eb5bd9c4 | ||
|
|
ad2b6bb4f7 | ||
|
|
51d64ee10e | ||
|
|
1159efc6cc | ||
|
|
3cfb76a56f | ||
|
|
7a1ad6688a | ||
|
|
4f4af6e3fd | ||
|
|
2736b79097 | ||
|
|
8da2ffa433 | ||
|
|
5bf3d6dc32 | ||
|
|
30d4380afe | ||
|
|
c8b5aad482 | ||
|
|
fa1d440947 | ||
|
|
a89fc3a276 | ||
|
|
3875f62453 | ||
|
|
23e1447ab6 | ||
|
|
91364c6a7c | ||
|
|
a070493fba | ||
|
|
ca0a6ab72c | ||
|
|
10e0562a82 | ||
|
|
44998a2a2b | ||
|
|
f8e033e560 | ||
|
|
5842da84af | ||
|
|
10cc270170 | ||
|
|
f51bdd56aa | ||
|
|
7f79c49f93 | ||
|
|
e4920255f0 | ||
|
|
f8e8273d39 | ||
|
|
ceddab98d1 | ||
|
|
0faa176791 | ||
|
|
9b0e0ee536 | ||
|
|
881d2756db | ||
|
|
ab5af40999 | ||
|
|
6aeef8dcf9 | ||
|
|
5d62196dde | ||
|
|
7fe27ac310 | ||
|
|
1e300349ce | ||
|
|
6b01a7b08e | ||
|
|
34d948df18 | ||
|
|
27091c4f1d | ||
|
|
970a3d7f2a | ||
|
|
2bedbbfc54 | ||
|
|
8de33f0db3 | ||
|
|
df4eab67dc | ||
|
|
2d13bc0046 | ||
|
|
704a0cb35d | ||
|
|
06a983e445 | ||
|
|
2d10df8b87 | ||
|
|
baf66db9dc | ||
|
|
3545693999 | ||
|
|
84fb99c2c8 | ||
|
|
21e2983ae1 | ||
|
|
004e0941d5 | ||
|
|
188a426dde | ||
|
|
6fcfae8bfe | ||
|
|
9515350f52 | ||
|
|
6b88f82656 | ||
|
|
e42d953f47 | ||
|
|
471a3f63fe | ||
|
|
9c257faf26 | ||
|
|
d18cad6d76 | ||
|
|
061273be22 | ||
|
|
b89de6caad | ||
|
|
9bc0a1d7c7 | ||
|
|
eee518b7fa | ||
|
|
b7b78edaa3 | ||
|
|
3eaac68ab4 | ||
|
|
a7672190e9 | ||
|
|
4e4e89b6eb | ||
|
|
33dd519f56 | ||
|
|
5c1149aa8b | ||
|
|
9061e92af6 | ||
|
|
49f5ceaa9b | ||
|
|
525b309d37 | ||
|
|
bdb3a787fc | ||
|
|
a9601ef848 | ||
|
|
5d9c99508d | ||
|
|
e4d5b56951 | ||
|
|
e31238d121 | ||
|
|
a283d99e1b | ||
|
|
8cb621ebed |
@@ -3,10 +3,11 @@
|
||||
"isRoot": true,
|
||||
"tools": {
|
||||
"csharpier": {
|
||||
"version": "0.28.1",
|
||||
"version": "1.2.1",
|
||||
"commands": [
|
||||
"dotnet-csharpier"
|
||||
]
|
||||
"csharpier"
|
||||
],
|
||||
"rollForward": false
|
||||
}
|
||||
}
|
||||
}
|
||||
7
.copilot-agent.yml
Normal file
7
.copilot-agent.yml
Normal file
@@ -0,0 +1,7 @@
|
||||
enabled: true
|
||||
agent:
|
||||
name: copilot-coding-agent
|
||||
allow:
|
||||
- paths: ["src/**/*", "tests/**/*", "README.md", "AGENTS.md"]
|
||||
actions: ["create", "modify"]
|
||||
require_review_before_merge: true
|
||||
15
.github/COPILOT_AGENT_README.md
vendored
Normal file
15
.github/COPILOT_AGENT_README.md
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
# Copilot Coding Agent Configuration
|
||||
|
||||
This repository includes a minimal opt-in configuration and CI workflow to allow the GitHub Copilot coding agent to open and validate PRs.
|
||||
|
||||
- .copilot-agent.yml: opt-in config for automated agents
|
||||
- .github/agents/copilot-agent.yml: detailed agent policy configuration
|
||||
- .github/workflows/dotnetcore.yml: CI runs on PRs touching the solution, source, or tests to validate changes
|
||||
- AGENTS.md: general instructions for Copilot coding agent with project-specific guidelines
|
||||
|
||||
Maintainers can adjust the allowed paths or disable the agent by editing or removing .copilot-agent.yml.
|
||||
|
||||
Notes:
|
||||
- The agent can create, modify, and delete files within the allowed paths (src, tests, README.md, AGENTS.md)
|
||||
- All changes require review before merge
|
||||
- If build/test paths are different, update the workflow accordingly; this workflow targets SharpCompress.sln and the SharpCompress.Test test project.
|
||||
17
.github/agents/copilot-agent.yml
vendored
Normal file
17
.github/agents/copilot-agent.yml
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
enabled: true
|
||||
agent:
|
||||
name: copilot-coding-agent
|
||||
allow:
|
||||
- paths: ["src/**/*", "tests/**/*", "README.md", "AGENTS.md"]
|
||||
actions: ["create", "modify", "delete"]
|
||||
require_review_before_merge: true
|
||||
required_approvals: 1
|
||||
allowed_merge_strategies:
|
||||
- squash
|
||||
- merge
|
||||
auto_merge_on_green: false
|
||||
run_workflows: true
|
||||
notes: |
|
||||
- This manifest expresses the policy for the Copilot coding agent in this repository.
|
||||
- It does NOT install or authorize the agent; a repository admin must install the Copilot coding agent app and grant the repository the necessary permissions (contents: write, pull_requests: write, checks: write, actions: write/read, issues: write) to allow the agent to act.
|
||||
- Keep allow paths narrow and prefer require_review_before_merge during initial rollout.
|
||||
13
.github/dependabot.yml
vendored
13
.github/dependabot.yml
vendored
@@ -1,6 +1,13 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions" # search for actions - there are other options available
|
||||
directory: "/" # search in .github/workflows under root `/`
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly" # check for action update every week
|
||||
interval: "weekly"
|
||||
|
||||
- package-ecosystem: "nuget"
|
||||
directory: "/" # change to "/src/YourProject" if .csproj files are in subfolders
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 5
|
||||
# optional: target-branch: "master"
|
||||
|
||||
7
.github/workflows/dotnetcore.yml
vendored
7
.github/workflows/dotnetcore.yml
vendored
@@ -3,6 +3,7 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'release'
|
||||
pull_request:
|
||||
types: [ opened, synchronize, reopened, ready_for_review ]
|
||||
|
||||
@@ -14,12 +15,12 @@ jobs:
|
||||
os: [windows-latest, ubuntu-latest]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-dotnet@v4
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-dotnet@v5
|
||||
with:
|
||||
dotnet-version: 8.0.x
|
||||
- run: dotnet run --project build/build.csproj
|
||||
- uses: actions/upload-artifact@v4
|
||||
- uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: ${{ matrix.os }}-sharpcompress.nupkg
|
||||
path: artifacts/*
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -11,6 +11,8 @@ TestResults/
|
||||
packages/*/
|
||||
project.lock.json
|
||||
tests/TestArchives/Scratch
|
||||
tests/TestArchives/*/Scratch
|
||||
tests/TestArchives/*/Scratch2
|
||||
.vs
|
||||
tools
|
||||
.vscode
|
||||
@@ -18,4 +20,3 @@ tools
|
||||
|
||||
.DS_Store
|
||||
*.snupkg
|
||||
/tests/TestArchives/6d23a38c-f064-4ef1-ad89-b942396f53b9/Scratch
|
||||
|
||||
118
AGENTS.md
Normal file
118
AGENTS.md
Normal file
@@ -0,0 +1,118 @@
|
||||
---
|
||||
description: 'Guidelines for building SharpCompress - A C# compression library'
|
||||
applyTo: '**/*.cs'
|
||||
---
|
||||
|
||||
# SharpCompress Development
|
||||
|
||||
## About SharpCompress
|
||||
SharpCompress is a pure C# compression library supporting multiple archive formats (Zip, Tar, GZip, BZip2, 7Zip, Rar, LZip, XZ, ZStandard) for .NET Framework 4.62, .NET Standard 2.1, .NET 6.0, and .NET 8.0. The library provides both seekable Archive APIs and forward-only Reader/Writer APIs for streaming scenarios.
|
||||
|
||||
## C# Instructions
|
||||
- Always use the latest version C#, currently C# 13 features.
|
||||
- Write clear and concise comments for each function.
|
||||
- Follow the existing code style and patterns in the codebase.
|
||||
|
||||
## General Instructions
|
||||
- Make only high confidence suggestions when reviewing code changes.
|
||||
- Write code with good maintainability practices, including comments on why certain design decisions were made.
|
||||
- Handle edge cases and write clear exception handling.
|
||||
- For libraries or external dependencies, mention their usage and purpose in comments.
|
||||
- Preserve backward compatibility when making changes to public APIs.
|
||||
|
||||
## Naming Conventions
|
||||
|
||||
- Follow PascalCase for component names, method names, and public members.
|
||||
- Use camelCase for private fields and local variables.
|
||||
- Prefix interface names with "I" (e.g., IUserService).
|
||||
|
||||
## Code Formatting
|
||||
|
||||
- Use CSharpier for code formatting to ensure consistent style across the project
|
||||
- CSharpier is configured as a local tool in `.config/dotnet-tools.json`
|
||||
- Restore tools with: `dotnet tool restore`
|
||||
- Format files from the project root with: `dotnet csharpier .`
|
||||
- **Run `dotnet csharpier .` from the project root after making code changes before committing**
|
||||
- Configure your IDE to format on save using CSharpier for the best experience
|
||||
- The project also uses `.editorconfig` for editor settings (indentation, encoding, etc.)
|
||||
- Let CSharpier handle code style while `.editorconfig` handles editor behavior
|
||||
|
||||
## Project Setup and Structure
|
||||
|
||||
- The project targets multiple frameworks: .NET Framework 4.62, .NET Standard 2.1, .NET 6.0, and .NET 8.0
|
||||
- Main library is in `src/SharpCompress/`
|
||||
- Tests are in `tests/SharpCompress.Test/`
|
||||
- Performance tests are in `tests/SharpCompress.Performance/`
|
||||
- Test archives are in `tests/TestArchives/`
|
||||
- Build project is in `build/`
|
||||
- Use `dotnet build` to build the solution
|
||||
- Use `dotnet test` to run tests
|
||||
- Solution file: `SharpCompress.sln`
|
||||
|
||||
## Nullable Reference Types
|
||||
|
||||
- Declare variables non-nullable, and check for `null` at entry points.
|
||||
- Always use `is null` or `is not null` instead of `== null` or `!= null`.
|
||||
- Trust the C# null annotations and don't add null checks when the type system says a value cannot be null.
|
||||
|
||||
## SharpCompress-Specific Guidelines
|
||||
|
||||
### Supported Formats
|
||||
SharpCompress supports multiple archive and compression formats:
|
||||
- **Archive Formats**: Zip, Tar, 7Zip, Rar (read-only)
|
||||
- **Compression**: DEFLATE, BZip2, LZMA/LZMA2, PPMd, ZStandard (decompress only), Deflate64 (decompress only)
|
||||
- **Combined Formats**: Tar.GZip, Tar.BZip2, Tar.LZip, Tar.XZ, Tar.ZStandard
|
||||
- See FORMATS.md for complete format support matrix
|
||||
|
||||
### Stream Handling Rules
|
||||
- **Disposal**: As of version 0.21, SharpCompress closes wrapped streams by default
|
||||
- Use `ReaderOptions` or `WriterOptions` with `LeaveStreamOpen = true` to control stream disposal
|
||||
- Use `NonDisposingStream` wrapper when working with compression streams directly to prevent disposal
|
||||
- Always dispose of readers, writers, and archives in `using` blocks
|
||||
- For forward-only operations, use Reader/Writer APIs; for random access, use Archive APIs
|
||||
|
||||
### Async/Await Patterns
|
||||
- All I/O operations support async/await with `CancellationToken`
|
||||
- Async methods follow the naming convention: `MethodNameAsync`
|
||||
- Key async methods:
|
||||
- `WriteEntryToAsync` - Extract entry asynchronously
|
||||
- `WriteAllToDirectoryAsync` - Extract all entries asynchronously
|
||||
- `WriteAsync` - Write entry asynchronously
|
||||
- `WriteAllAsync` - Write directory asynchronously
|
||||
- `OpenEntryStreamAsync` - Open entry stream asynchronously
|
||||
- Always provide `CancellationToken` parameter in async methods
|
||||
|
||||
### Archive APIs vs Reader/Writer APIs
|
||||
- **Archive API**: Use for random access with seekable streams (e.g., `ZipArchive`, `TarArchive`)
|
||||
- **Reader API**: Use for forward-only reading on non-seekable streams (e.g., `ZipReader`, `TarReader`)
|
||||
- **Writer API**: Use for forward-only writing on streams (e.g., `ZipWriter`, `TarWriter`)
|
||||
- 7Zip only supports Archive API due to format limitations
|
||||
|
||||
### Tar-Specific Considerations
|
||||
- Tar format requires file size in the header
|
||||
- If no size is specified to TarWriter and the stream is not seekable, an exception will be thrown
|
||||
- Tar combined with compression (GZip, BZip2, LZip, XZ) is supported
|
||||
|
||||
### Zip-Specific Considerations
|
||||
- Supports Zip64 for large files (seekable streams only)
|
||||
- Supports PKWare and WinZip AES encryption
|
||||
- Multiple compression methods: None, Shrink, Reduce, Implode, DEFLATE, Deflate64, BZip2, LZMA, PPMd
|
||||
- Encrypted LZMA is not supported
|
||||
|
||||
### Performance Considerations
|
||||
- For large files, use Reader/Writer APIs with non-seekable streams to avoid loading entire file in memory
|
||||
- Leverage async I/O for better scalability
|
||||
- Consider compression level trade-offs (speed vs. size)
|
||||
- Use appropriate buffer sizes for stream operations
|
||||
|
||||
## Testing
|
||||
|
||||
- Always include test cases for critical paths of the application.
|
||||
- Test with multiple archive formats when making changes to core functionality.
|
||||
- Include tests for both Archive and Reader/Writer APIs when applicable.
|
||||
- Test async operations with cancellation tokens.
|
||||
- Do not emit "Act", "Arrange" or "Assert" comments.
|
||||
- Copy existing style in nearby files for test method names and capitalization.
|
||||
- Use test archives from `tests/TestArchives` directory for consistency.
|
||||
- Test stream disposal and `LeaveStreamOpen` behavior.
|
||||
- Test edge cases: empty archives, large files, corrupted archives, encrypted archives.
|
||||
@@ -1,18 +1,21 @@
|
||||
<Project>
|
||||
<ItemGroup>
|
||||
<PackageVersion Include="Bullseye" Version="5.0.0" />
|
||||
<PackageVersion Include="FluentAssertions" Version="6.12.0" />
|
||||
<PackageVersion Include="Bullseye" Version="6.0.0" />
|
||||
<PackageVersion Include="AwesomeAssertions" Version="9.3.0" />
|
||||
<PackageVersion Include="Glob" Version="1.1.9" />
|
||||
<PackageVersion Include="JetBrains.Profiler.SelfApi" Version="2.5.14" />
|
||||
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="8.0.0" />
|
||||
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="17.9.0" />
|
||||
<PackageVersion Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
|
||||
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="18.0.0" />
|
||||
<PackageVersion Include="Mono.Posix.NETStandard" Version="1.0.0" />
|
||||
<PackageVersion Include="SimpleExec" Version="12.0.0" />
|
||||
<PackageVersion Include="System.Memory" Version="4.5.5" />
|
||||
<PackageVersion Include="System.Buffers" Version="4.6.1" />
|
||||
<PackageVersion Include="System.Memory" Version="4.6.3" />
|
||||
<PackageVersion Include="System.Text.Encoding.CodePages" Version="8.0.0" />
|
||||
<PackageVersion Include="xunit" Version="2.7.1" />
|
||||
<PackageVersion Include="xunit.runner.visualstudio" Version="2.5.8" />
|
||||
<PackageVersion Include="xunit.SkippableFact" Version="1.4.13" />
|
||||
<PackageVersion Include="ZstdSharp.Port" Version="0.8.0" />
|
||||
<PackageVersion Include="xunit" Version="2.9.3" />
|
||||
<PackageVersion Include="xunit.runner.visualstudio" Version="3.1.5" />
|
||||
<PackageVersion Include="ZstdSharp.Port" Version="0.8.6" />
|
||||
<PackageVersion Include="Microsoft.NET.ILLink.Tasks" Version="8.0.21" />
|
||||
<PackageVersion Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
|
||||
<PackageVersion Include="Microsoft.NETFramework.ReferenceAssemblies" Version="1.0.3" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -11,10 +11,11 @@
|
||||
| Archive Format | Compression Format(s) | Compress/Decompress | Archive API | Reader API | Writer API |
|
||||
| ---------------------- | ------------------------------------------------- | ------------------- | --------------- | ---------- | ------------- |
|
||||
| Rar | Rar | Decompress (1) | RarArchive | RarReader | N/A |
|
||||
| Zip (2) | None, DEFLATE, Deflate64, BZip2, LZMA/LZMA2, PPMd | Both | ZipArchive | ZipReader | ZipWriter |
|
||||
| Zip (2) | None, Shrink, Reduce, Implode, DEFLATE, Deflate64, BZip2, LZMA/LZMA2, PPMd | Both | ZipArchive | ZipReader | ZipWriter |
|
||||
| Tar | None | Both | TarArchive | TarReader | TarWriter (3) |
|
||||
| Tar.GZip | DEFLATE | Both | TarArchive | TarReader | TarWriter (3) |
|
||||
| Tar.BZip2 | BZip2 | Both | TarArchive | TarReader | TarWriter (3) |
|
||||
| Tar.Zstandard | ZStandard | Decompress | TarArchive | TarReader | N/A |
|
||||
| Tar.LZip | LZMA | Both | TarArchive | TarReader | TarWriter (3) |
|
||||
| Tar.XZ | LZMA2 | Decompress | TarArchive | TarReader | TarWriter (3) |
|
||||
| GZip (single file) | DEFLATE | Both | GZipArchive | GZipReader | GZipWriter |
|
||||
@@ -41,6 +42,7 @@ For those who want to directly compress/decompress bits. The single file formats
|
||||
| ADCStream | Decompress |
|
||||
| LZipStream | Both |
|
||||
| XZStream | Decompress |
|
||||
| ZStandardStream | Decompress |
|
||||
|
||||
## Archive Formats vs Compression
|
||||
|
||||
|
||||
85
README.md
85
README.md
@@ -1,9 +1,11 @@
|
||||
# SharpCompress
|
||||
|
||||
SharpCompress is a compression library in pure C# for .NET Framework 4.62, .NET Standard 2.1, .NET 6.0 and NET 8.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
|
||||
SharpCompress is a compression library in pure C# for .NET Framework 4.62, .NET Standard 2.1, .NET 6.0 and NET 8.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip, unzstd, unarc and unarj with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
|
||||
|
||||
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).
|
||||
|
||||
**NEW:** All I/O operations now support async/await for improved performance and scalability. See the [Async Usage](#async-usage) section below.
|
||||
|
||||
GitHub Actions Build -
|
||||
[](https://github.com/adamhathcock/sharpcompress/actions/workflows/dotnetcore.yml)
|
||||
[](https://dndocs.com/d/sharpcompress/api/index.html)
|
||||
@@ -20,16 +22,94 @@ In general, I recommend GZip (Deflate)/BZip2 (BZip)/LZip (LZMA) as the simplicit
|
||||
|
||||
Zip is okay, but it's a very hap-hazard format and the variation in headers and implementations makes it hard to get correct. Uses Deflate by default but supports a lot of compression methods.
|
||||
|
||||
RAR is not recommended as it's a propriatory format and the compression is closed source. Use Tar/LZip for LZMA
|
||||
RAR is not recommended as it's a proprietary format and the compression is closed source. Use Tar/LZip for LZMA
|
||||
|
||||
7Zip and XZ both are overly complicated. 7Zip does not support streamable formats. XZ has known holes explained here: (http://www.nongnu.org/lzip/xz_inadequate.html) Use Tar/LZip for LZMA compression instead.
|
||||
|
||||
ZStandard is an efficient format that works well for streaming with a flexible compression level to tweak the speed/performance trade off you are looking for. We currently only implement decompression for ZStandard but as we leverage the [ZstdSharp](https://github.com/oleg-st/ZstdSharp) library one could likely add compression support without much trouble (PRs are welcome!).
|
||||
|
||||
## A Simple Request
|
||||
|
||||
Hi everyone. I hope you're using SharpCompress and finding it useful. Please give me feedback on what you'd like to see changed especially as far as usability goes. New feature suggestions are always welcome as well. I would also like to know what projects SharpCompress is being used in. I like seeing how it is used to give me ideas for future versions. Thanks!
|
||||
|
||||
Please do not email me directly to ask for help. If you think there is a real issue, please report it here.
|
||||
|
||||
## Async Usage
|
||||
|
||||
SharpCompress now provides full async/await support for all I/O operations, allowing for better performance and scalability in modern applications.
|
||||
|
||||
### Async Reading Examples
|
||||
|
||||
Extract entries asynchronously:
|
||||
```csharp
|
||||
using (Stream stream = File.OpenRead("archive.zip"))
|
||||
using (var reader = ReaderFactory.Open(stream))
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
if (!reader.Entry.IsDirectory)
|
||||
{
|
||||
// Async extraction
|
||||
await reader.WriteEntryToDirectoryAsync(
|
||||
@"C:\temp",
|
||||
new ExtractionOptions() { ExtractFullPath = true, Overwrite = true },
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Extract all entries to directory asynchronously:
|
||||
```csharp
|
||||
using (Stream stream = File.OpenRead("archive.tar.gz"))
|
||||
using (var reader = ReaderFactory.Open(stream))
|
||||
{
|
||||
await reader.WriteAllToDirectoryAsync(
|
||||
@"C:\temp",
|
||||
new ExtractionOptions() { ExtractFullPath = true, Overwrite = true },
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
Open entry stream asynchronously:
|
||||
```csharp
|
||||
using (var archive = ZipArchive.Open("archive.zip"))
|
||||
{
|
||||
foreach (var entry in archive.Entries.Where(e => !e.IsDirectory))
|
||||
{
|
||||
using (var entryStream = await entry.OpenEntryStreamAsync(cancellationToken))
|
||||
{
|
||||
// Process stream asynchronously
|
||||
await entryStream.CopyToAsync(outputStream, cancellationToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Async Writing Examples
|
||||
|
||||
Write files asynchronously:
|
||||
```csharp
|
||||
using (Stream stream = File.OpenWrite("output.zip"))
|
||||
using (var writer = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.Deflate))
|
||||
{
|
||||
await writer.WriteAsync("file1.txt", fileStream, DateTime.Now, cancellationToken);
|
||||
}
|
||||
```
|
||||
|
||||
Write all files from directory asynchronously:
|
||||
```csharp
|
||||
using (Stream stream = File.OpenWrite("output.tar.gz"))
|
||||
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, new WriterOptions(CompressionType.GZip)))
|
||||
{
|
||||
await writer.WriteAllAsync(@"D:\files", "*", SearchOption.AllDirectories, cancellationToken);
|
||||
}
|
||||
```
|
||||
|
||||
All async methods support `CancellationToken` for graceful cancellation of long-running operations.
|
||||
|
||||
## Want to contribute?
|
||||
|
||||
I'm always looking for help or ideas. Please submit code or email with ideas. Unfortunately, just letting me know you'd like to help is not enough because I really have no overall plan of what needs to be done. I'll definitely accept code submissions and add you as a member of the project!
|
||||
@@ -40,6 +120,7 @@ I'm always looking for help or ideas. Please submit code or email with ideas. Un
|
||||
* 7Zip writing
|
||||
* Zip64 (Need writing and extend Reading)
|
||||
* Multi-volume Zip support.
|
||||
* ZStandard writing
|
||||
|
||||
## Version Log
|
||||
|
||||
|
||||
@@ -20,8 +20,15 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Config", "Config", "{CDB425
|
||||
.editorconfig = .editorconfig
|
||||
Directory.Packages.props = Directory.Packages.props
|
||||
NuGet.config = NuGet.config
|
||||
.github\workflows\dotnetcore.yml = .github\workflows\dotnetcore.yml
|
||||
USAGE.md = USAGE.md
|
||||
README.md = README.md
|
||||
FORMATS.md = FORMATS.md
|
||||
AGENTS.md = AGENTS.md
|
||||
EndProjectSection
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SharpCompress.Performance", "tests\SharpCompress.Performance\SharpCompress.Performance.csproj", "{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|Any CPU = Debug|Any CPU
|
||||
@@ -40,6 +47,10 @@ Global
|
||||
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
EndGlobalSection
|
||||
GlobalSection(SolutionProperties) = preSolution
|
||||
HideSolutionNode = FALSE
|
||||
@@ -47,5 +58,6 @@ Global
|
||||
GlobalSection(NestedProjects) = preSolution
|
||||
{FD19DDD8-72B2-4024-8665-0D1F7A2AA998} = {3C5BE746-03E5-4895-9988-0B57F162F86C}
|
||||
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F} = {0F0901FF-E8D9-426A-B5A2-17C7F47C1529}
|
||||
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17} = {0F0901FF-E8D9-426A-B5A2-17C7F47C1529}
|
||||
EndGlobalSection
|
||||
EndGlobal
|
||||
|
||||
@@ -15,17 +15,17 @@
|
||||
|
||||
<s:String x:Key="/Default/CodeStyle/CodeCleanup/SilentCleanupProfile/@EntryValue">Basic Clean</s:String>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/APPLY_ON_COMPLETION/@EntryValue">True</s:Boolean>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/ARGUMENTS_NAMED/@EntryValue">Named</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/ARGUMENTS_NAMED/@EntryValue">Positional</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_FOR/@EntryValue">Required</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_FOREACH/@EntryValue">Required</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_IFELSE/@EntryValue">Required</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_WHILE/@EntryValue">Required</s:String>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_FIRST_ARG_BY_PAREN/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_FIRST_ARG_BY_PAREN/@EntryValue">False</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_LINQ_QUERY/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_ARGUMENT/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_ARRAY_AND_OBJECT_INITIALIZER/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_ARRAY_AND_OBJECT_INITIALIZER/@EntryValue">False</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_CALLS_CHAIN/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_EXPRESSION/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_EXPRESSION/@EntryValue">False</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_EXTENDS_LIST/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_FOR_STMT/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_PARAMETER/@EntryValue">True</s:Boolean>
|
||||
@@ -42,7 +42,7 @@
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_IFELSE_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_USING_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_WHILE_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/INDENT_ANONYMOUS_METHOD_BLOCK/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/INDENT_ANONYMOUS_METHOD_BLOCK/@EntryValue">False</s:Boolean>
|
||||
<s:Int64 x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/KEEP_BLANK_LINES_IN_CODE/@EntryValue">1</s:Int64>
|
||||
<s:Int64 x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/KEEP_BLANK_LINES_IN_DECLARATIONS/@EntryValue">1</s:Int64>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_ACCESSOR_ATTRIBUTE_ON_SAME_LINE_EX/@EntryValue">NEVER</s:String>
|
||||
@@ -50,12 +50,12 @@
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_CONSTRUCTOR_INITIALIZER_ON_SAME_LINE/@EntryValue">False</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_FIELD_ATTRIBUTE_ON_SAME_LINE/@EntryValue">False</s:Boolean>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_FIELD_ATTRIBUTE_ON_SAME_LINE_EX/@EntryValue">NEVER</s:String>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_ACCESSORHOLDER_ON_SINGLE_LINE/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_ACCESSORHOLDER_ON_SINGLE_LINE/@EntryValue">False</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_ACCESSOR_ATTRIBUTE_ON_SAME_LINE/@EntryValue">False</s:Boolean>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_EMBEDDED_STATEMENT_ON_SAME_LINE/@EntryValue">NEVER</s:String>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_INITIALIZER_ON_SINGLE_LINE/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_INITIALIZER_ON_SINGLE_LINE/@EntryValue">False</s:Boolean>
|
||||
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_WHILE_ON_NEW_LINE/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_WHILE_ON_NEW_LINE/@EntryValue">False</s:Boolean>
|
||||
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SIMPLE_EMBEDDED_STATEMENT_STYLE/@EntryValue">LINE_BREAK</s:String>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SPACE_AFTER_TYPECAST_PARENTHESES/@EntryValue">False</s:Boolean>
|
||||
@@ -67,13 +67,13 @@
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SPACE_BEFORE_TYPEOF_PARENTHESES/@EntryValue">False</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/STICK_COMMENT/@EntryValue">False</s:Boolean>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_ARGUMENTS_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_ARRAY_INITIALIZER_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_ARRAY_INITIALIZER_STYLE/@EntryValue">CHOP_ALWAYS</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_EXTENDS_LIST_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_LINES/@EntryValue">False</s:Boolean>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_PARAMETERS_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForBuiltInTypes/@EntryValue">UseVarWhenEvident</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForOtherTypes/@EntryValue">UseVarWhenEvident</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForSimpleTypes/@EntryValue">UseVarWhenEvident</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForBuiltInTypes/@EntryValue">UseVar</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForOtherTypes/@EntryValue">UseVar</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForSimpleTypes/@EntryValue">UseVar</s:String>
|
||||
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateInstanceFields/@EntryIndexedValue"><Policy Inspect="True" Prefix="_" Suffix="" Style="aaBb" /></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateStaticFields/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /></s:String>
|
||||
@@ -122,6 +122,7 @@
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/XamlNaming/UserRules/=NAMESPACE_005FALIAS/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/XamlNaming/UserRules/=XAML_005FFIELD/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/XamlNaming/UserRules/=XAML_005FRESOURCE/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /></s:String>
|
||||
<s:String x:Key="/Default/CustomTools/CustomToolsData/@EntryValue"></s:String>
|
||||
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpAttributeForSingleLineMethodUpgrade/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpKeepExistingMigration/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpPlaceEmbeddedOnSameLineMigration/@EntryIndexedValue">True</s:Boolean>
|
||||
|
||||
173
USAGE.md
173
USAGE.md
@@ -1,5 +1,18 @@
|
||||
# SharpCompress Usage
|
||||
|
||||
## Async/Await Support
|
||||
|
||||
SharpCompress now provides full async/await support for all I/O operations. All `Read`, `Write`, and extraction operations have async equivalents ending in `Async` that accept an optional `CancellationToken`. This enables better performance and scalability for I/O-bound operations.
|
||||
|
||||
**Key Async Methods:**
|
||||
- `reader.WriteEntryToAsync(stream, cancellationToken)` - Extract entry asynchronously
|
||||
- `reader.WriteAllToDirectoryAsync(path, options, cancellationToken)` - Extract all asynchronously
|
||||
- `writer.WriteAsync(filename, stream, modTime, cancellationToken)` - Write entry asynchronously
|
||||
- `writer.WriteAllAsync(directory, pattern, searchOption, cancellationToken)` - Write directory asynchronously
|
||||
- `entry.OpenEntryStreamAsync(cancellationToken)` - Open entry stream asynchronously
|
||||
|
||||
See [Async Examples](#async-examples) section below for usage patterns.
|
||||
|
||||
## Stream Rules (changed with 0.21)
|
||||
|
||||
When dealing with Streams, the rule should be that you don't close a stream you didn't create. This, in effect, should mean you should always put a Stream in a using block to dispose it.
|
||||
@@ -27,7 +40,7 @@ To deal with the "correct" rules as well as the expectations of users, I've deci
|
||||
|
||||
To be explicit though, consider always using the overloads that use `ReaderOptions` or `WriterOptions` and explicitly set `LeaveStreamOpen` the way you want.
|
||||
|
||||
If using Compression Stream classes directly and you don't want the wrapped stream to be closed. Use the `NonDisposingStream` as a wrapped to prevent the stream being disposed. The change in 0.21 simplified a lot even though the usage is a bit more convoluted.
|
||||
If using Compression Stream classes directly and you don't want the wrapped stream to be closed. Use the `NonDisposingStream` as a wrapper to prevent the stream being disposed. The change in 0.21 simplified a lot even though the usage is a bit more convoluted.
|
||||
|
||||
## Samples
|
||||
|
||||
@@ -71,18 +84,34 @@ using (var archive = ZipArchive.Create())
|
||||
memoryStream.Position = 0;
|
||||
```
|
||||
|
||||
### Extract all files from a Rar file to a directory using RarArchive
|
||||
### Extract all files from a rar file to a directory using RarArchive
|
||||
|
||||
Note: Extracting a solid rar or 7z file needs to be done in sequential order to get acceptable decompression speed.
|
||||
It is explicitly recommended to use `ExtractAllEntries` when extracting an entire `IArchive` instead of iterating over all its `Entries`.
|
||||
Alternatively, use `IArchive.WriteToDirectory`.
|
||||
|
||||
```C#
|
||||
using (var archive = RarArchive.Open("Test.rar"))
|
||||
{
|
||||
using (var reader = archive.ExtractAllEntries())
|
||||
{
|
||||
reader.WriteAllToDirectory(@"D:\temp", new ExtractionOptions()
|
||||
{
|
||||
ExtractFullPath = true,
|
||||
Overwrite = true
|
||||
});
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Iterate over all files from a Rar file using RarArchive
|
||||
|
||||
```C#
|
||||
using (var archive = RarArchive.Open("Test.rar"))
|
||||
{
|
||||
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
|
||||
{
|
||||
entry.WriteToDirectory("D:\\temp", new ExtractionOptions()
|
||||
{
|
||||
ExtractFullPath = true,
|
||||
Overwrite = true
|
||||
});
|
||||
Console.WriteLine($"{entry.Key}: {entry.Size} bytes");
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -156,3 +185,133 @@ foreach(var entry in tr.Entries)
|
||||
Console.WriteLine($"{entry.Key}");
|
||||
}
|
||||
```
|
||||
|
||||
## Async Examples
|
||||
|
||||
### Async Reader Examples
|
||||
|
||||
**Extract single entry asynchronously:**
|
||||
```C#
|
||||
using (Stream stream = File.OpenRead("archive.zip"))
|
||||
using (var reader = ReaderFactory.Open(stream))
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
if (!reader.Entry.IsDirectory)
|
||||
{
|
||||
using (var entryStream = reader.OpenEntryStream())
|
||||
{
|
||||
using (var outputStream = File.Create("output.bin"))
|
||||
{
|
||||
await reader.WriteEntryToAsync(outputStream, cancellationToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Extract all entries asynchronously:**
|
||||
```C#
|
||||
using (Stream stream = File.OpenRead("archive.tar.gz"))
|
||||
using (var reader = ReaderFactory.Open(stream))
|
||||
{
|
||||
await reader.WriteAllToDirectoryAsync(
|
||||
@"D:\temp",
|
||||
new ExtractionOptions()
|
||||
{
|
||||
ExtractFullPath = true,
|
||||
Overwrite = true
|
||||
},
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
**Open and process entry stream asynchronously:**
|
||||
```C#
|
||||
using (var archive = ZipArchive.Open("archive.zip"))
|
||||
{
|
||||
foreach (var entry in archive.Entries.Where(e => !e.IsDirectory))
|
||||
{
|
||||
using (var entryStream = await entry.OpenEntryStreamAsync(cancellationToken))
|
||||
{
|
||||
// Process the decompressed stream asynchronously
|
||||
await ProcessStreamAsync(entryStream, cancellationToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Async Writer Examples
|
||||
|
||||
**Write single file asynchronously:**
|
||||
```C#
|
||||
using (Stream archiveStream = File.OpenWrite("output.zip"))
|
||||
using (var writer = WriterFactory.Open(archiveStream, ArchiveType.Zip, CompressionType.Deflate))
|
||||
{
|
||||
using (Stream fileStream = File.OpenRead("input.txt"))
|
||||
{
|
||||
await writer.WriteAsync("entry.txt", fileStream, DateTime.Now, cancellationToken);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Write entire directory asynchronously:**
|
||||
```C#
|
||||
using (Stream stream = File.OpenWrite("backup.tar.gz"))
|
||||
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, new WriterOptions(CompressionType.GZip)))
|
||||
{
|
||||
await writer.WriteAllAsync(
|
||||
@"D:\files",
|
||||
"*",
|
||||
SearchOption.AllDirectories,
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
**Write with progress tracking and cancellation:**
|
||||
```C#
|
||||
var cts = new CancellationTokenSource();
|
||||
|
||||
// Set timeout or cancel from UI
|
||||
cts.CancelAfter(TimeSpan.FromMinutes(5));
|
||||
|
||||
using (Stream stream = File.OpenWrite("archive.zip"))
|
||||
using (var writer = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.Deflate))
|
||||
{
|
||||
try
|
||||
{
|
||||
await writer.WriteAllAsync(@"D:\data", "*", SearchOption.AllDirectories, cts.Token);
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
Console.WriteLine("Operation was cancelled");
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Archive Async Examples
|
||||
|
||||
**Extract from archive asynchronously:**
|
||||
```C#
|
||||
using (var archive = ZipArchive.Open("archive.zip"))
|
||||
{
|
||||
using (var reader = archive.ExtractAllEntries())
|
||||
{
|
||||
await reader.WriteAllToDirectoryAsync(
|
||||
@"C:\output",
|
||||
new ExtractionOptions() { ExtractFullPath = true, Overwrite = true },
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Benefits of Async Operations:**
|
||||
- Non-blocking I/O for better application responsiveness
|
||||
- Improved scalability for server applications
|
||||
- Support for cancellation via CancellationToken
|
||||
- Better resource utilization in async/await contexts
|
||||
- Compatible with modern .NET async patterns
|
||||
|
||||
@@ -15,7 +15,7 @@ const string Publish = "publish";
|
||||
|
||||
Target(
|
||||
Clean,
|
||||
ForEach("**/bin", "**/obj"),
|
||||
["**/bin", "**/obj"],
|
||||
dir =>
|
||||
{
|
||||
IEnumerable<string> GetDirectories(string d)
|
||||
@@ -44,14 +44,14 @@ Target(
|
||||
() =>
|
||||
{
|
||||
Run("dotnet", "tool restore");
|
||||
Run("dotnet", "csharpier --check .");
|
||||
Run("dotnet", "csharpier check .");
|
||||
}
|
||||
);
|
||||
Target(Restore, DependsOn(Format), () => Run("dotnet", "restore"));
|
||||
Target(Restore, [Format], () => Run("dotnet", "restore"));
|
||||
|
||||
Target(
|
||||
Build,
|
||||
DependsOn(Restore),
|
||||
[Restore],
|
||||
() =>
|
||||
{
|
||||
Run("dotnet", "build src/SharpCompress/SharpCompress.csproj -c Release --no-restore");
|
||||
@@ -60,8 +60,8 @@ Target(
|
||||
|
||||
Target(
|
||||
Test,
|
||||
DependsOn(Build),
|
||||
ForEach("net8.0", "net462"),
|
||||
[Build],
|
||||
["net8.0", "net48"],
|
||||
framework =>
|
||||
{
|
||||
IEnumerable<string> GetFiles(string d)
|
||||
@@ -69,7 +69,7 @@ Target(
|
||||
return Glob.Files(".", d);
|
||||
}
|
||||
|
||||
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows) && framework == "net462")
|
||||
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows) && framework == "net48")
|
||||
{
|
||||
return;
|
||||
}
|
||||
@@ -83,13 +83,13 @@ Target(
|
||||
|
||||
Target(
|
||||
Publish,
|
||||
DependsOn(Test),
|
||||
[Test],
|
||||
() =>
|
||||
{
|
||||
Run("dotnet", "pack src/SharpCompress/SharpCompress.csproj -c Release -o artifacts/");
|
||||
}
|
||||
);
|
||||
|
||||
Target("default", DependsOn(Publish), () => Console.WriteLine("Done!"));
|
||||
Target("default", [Publish], () => Console.WriteLine("Done!"));
|
||||
|
||||
await RunTargetsAndExitAsync(args);
|
||||
|
||||
@@ -1,14 +1,11 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Bullseye" />
|
||||
<PackageReference Include="Glob" />
|
||||
<PackageReference Include="SimpleExec" />
|
||||
<PackageReference Include="SimpleExec" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -4,9 +4,9 @@
|
||||
"net8.0": {
|
||||
"Bullseye": {
|
||||
"type": "Direct",
|
||||
"requested": "[5.0.0, )",
|
||||
"resolved": "5.0.0",
|
||||
"contentHash": "bqyt+m17ym+5aN45C5oZRAjuLDt8jKiCm/ys1XfymIXSkrTFwvI/QsbY3ucPSHDz7SF7uON7B57kXFv5H2k1ew=="
|
||||
"requested": "[6.0.0, )",
|
||||
"resolved": "6.0.0",
|
||||
"contentHash": "vgwwXfzs7jJrskWH7saHRMgPzziq/e86QZNWY1MnMxd7e+De7E7EX4K3C7yrvaK9y02SJoLxNxcLG/q5qUAghw=="
|
||||
},
|
||||
"Glob": {
|
||||
"type": "Direct",
|
||||
|
||||
@@ -141,7 +141,7 @@ internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/bl
|
||||
4,
|
||||
3,
|
||||
2,
|
||||
1 // tap2
|
||||
1, // tap2
|
||||
};
|
||||
#endif
|
||||
|
||||
|
||||
@@ -53,7 +53,7 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
|
||||
{
|
||||
if (!stream.CanSeek || !stream.CanRead)
|
||||
{
|
||||
throw new ArgumentException("Archive streams must be Readable and Seekable");
|
||||
throw new ArchiveException("Archive streams must be Readable and Seekable");
|
||||
}
|
||||
return stream;
|
||||
}
|
||||
@@ -144,6 +144,12 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
|
||||
/// <returns></returns>
|
||||
public IReader ExtractAllEntries()
|
||||
{
|
||||
if (!IsSolid && Type != ArchiveType.SevenZip)
|
||||
{
|
||||
throw new InvalidOperationException(
|
||||
"ExtractAllEntries can only be used on solid archives or 7Zip archives (which require random access)."
|
||||
);
|
||||
}
|
||||
((IArchiveExtractionListener)this).EnsureEntriesLoaded();
|
||||
return CreateReaderForSolidExtraction();
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Writers;
|
||||
@@ -94,6 +96,9 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
|
||||
DateTime? modified
|
||||
) => AddEntry(key, source, closeStream, size, modified);
|
||||
|
||||
IArchiveEntry IWritableArchive.AddDirectoryEntry(string key, DateTime? modified) =>
|
||||
AddDirectoryEntry(key, modified);
|
||||
|
||||
public TEntry AddEntry(
|
||||
string key,
|
||||
Stream source,
|
||||
@@ -134,6 +139,22 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
|
||||
return false;
|
||||
}
|
||||
|
||||
public TEntry AddDirectoryEntry(string key, DateTime? modified = null)
|
||||
{
|
||||
if (key.Length > 0 && key[0] is '/' or '\\')
|
||||
{
|
||||
key = key.Substring(1);
|
||||
}
|
||||
if (DoesKeyMatchExisting(key))
|
||||
{
|
||||
throw new ArchiveException("Cannot add entry with duplicate key: " + key);
|
||||
}
|
||||
var entry = CreateDirectoryEntry(key, modified);
|
||||
newEntries.Add(entry);
|
||||
RebuildModifiedCollection();
|
||||
return entry;
|
||||
}
|
||||
|
||||
public void SaveTo(Stream stream, WriterOptions options)
|
||||
{
|
||||
//reset streams of new entries
|
||||
@@ -141,6 +162,18 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
|
||||
SaveTo(stream, options, OldEntries, newEntries);
|
||||
}
|
||||
|
||||
public async Task SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
//reset streams of new entries
|
||||
newEntries.Cast<IWritableArchiveEntry>().ForEach(x => x.Stream.Seek(0, SeekOrigin.Begin));
|
||||
await SaveToAsync(stream, options, OldEntries, newEntries, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
protected TEntry CreateEntry(
|
||||
string key,
|
||||
Stream source,
|
||||
@@ -151,7 +184,7 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
|
||||
{
|
||||
if (!source.CanRead || !source.CanSeek)
|
||||
{
|
||||
throw new ArgumentException(
|
||||
throw new ArchiveException(
|
||||
"Streams must be readable and seekable to use the Writing Archive API"
|
||||
);
|
||||
}
|
||||
@@ -166,6 +199,8 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
|
||||
bool closeStream
|
||||
);
|
||||
|
||||
protected abstract TEntry CreateDirectoryEntry(string key, DateTime? modified);
|
||||
|
||||
protected abstract void SaveTo(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
@@ -173,6 +208,14 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
|
||||
IEnumerable<TEntry> newEntries
|
||||
);
|
||||
|
||||
protected abstract Task SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
IEnumerable<TEntry> oldEntries,
|
||||
IEnumerable<TEntry> newEntries,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
|
||||
public override void Dispose()
|
||||
{
|
||||
base.Dispose();
|
||||
|
||||
@@ -4,6 +4,7 @@ using System.IO;
|
||||
using System.Linq;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Factories;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
@@ -19,7 +20,7 @@ public static class ArchiveFactory
|
||||
public static IArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
readerOptions ??= new ReaderOptions();
|
||||
|
||||
stream = SharpCompressStream.Create(stream, bufferSize: readerOptions.BufferSize);
|
||||
return FindFactory<IArchiveFactory>(stream).Open(stream, readerOptions);
|
||||
}
|
||||
|
||||
@@ -44,7 +45,7 @@ public static class ArchiveFactory
|
||||
/// <param name="options"></param>
|
||||
public static IArchive Open(string filePath, ReaderOptions? options = null)
|
||||
{
|
||||
filePath.CheckNotNullOrEmpty(nameof(filePath));
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
return Open(new FileInfo(filePath), options);
|
||||
}
|
||||
|
||||
@@ -67,7 +68,7 @@ public static class ArchiveFactory
|
||||
/// <param name="options"></param>
|
||||
public static IArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? options = null)
|
||||
{
|
||||
fileInfos.CheckNotNull(nameof(fileInfos));
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var filesArray = fileInfos.ToArray();
|
||||
if (filesArray.Length == 0)
|
||||
{
|
||||
@@ -80,7 +81,7 @@ public static class ArchiveFactory
|
||||
return Open(fileInfo, options);
|
||||
}
|
||||
|
||||
fileInfo.CheckNotNull(nameof(fileInfo));
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
options ??= new ReaderOptions { LeaveStreamOpen = false };
|
||||
|
||||
return FindFactory<IMultiArchiveFactory>(fileInfo).Open(filesArray, options);
|
||||
@@ -93,7 +94,7 @@ public static class ArchiveFactory
|
||||
/// <param name="options"></param>
|
||||
public static IArchive Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
|
||||
{
|
||||
streams.CheckNotNull(nameof(streams));
|
||||
streams.NotNull(nameof(streams));
|
||||
var streamsArray = streams.ToArray();
|
||||
if (streamsArray.Length == 0)
|
||||
{
|
||||
@@ -106,7 +107,7 @@ public static class ArchiveFactory
|
||||
return Open(firstStream, options);
|
||||
}
|
||||
|
||||
firstStream.CheckNotNull(nameof(firstStream));
|
||||
firstStream.NotNull(nameof(firstStream));
|
||||
options ??= new ReaderOptions();
|
||||
|
||||
return FindFactory<IMultiArchiveFactory>(firstStream).Open(streamsArray, options);
|
||||
@@ -122,16 +123,13 @@ public static class ArchiveFactory
|
||||
)
|
||||
{
|
||||
using var archive = Open(sourceArchive);
|
||||
foreach (var entry in archive.Entries)
|
||||
{
|
||||
entry.WriteToDirectory(destinationDirectory, options);
|
||||
}
|
||||
archive.WriteToDirectory(destinationDirectory, options);
|
||||
}
|
||||
|
||||
private static T FindFactory<T>(FileInfo finfo)
|
||||
where T : IFactory
|
||||
{
|
||||
finfo.CheckNotNull(nameof(finfo));
|
||||
finfo.NotNull(nameof(finfo));
|
||||
using Stream stream = finfo.OpenRead();
|
||||
return FindFactory<T>(stream);
|
||||
}
|
||||
@@ -139,7 +137,7 @@ public static class ArchiveFactory
|
||||
private static T FindFactory<T>(Stream stream)
|
||||
where T : IFactory
|
||||
{
|
||||
stream.CheckNotNull(nameof(stream));
|
||||
stream.NotNull(nameof(stream));
|
||||
if (!stream.CanRead || !stream.CanSeek)
|
||||
{
|
||||
throw new ArgumentException("Stream should be readable and seekable");
|
||||
@@ -168,17 +166,25 @@ public static class ArchiveFactory
|
||||
);
|
||||
}
|
||||
|
||||
public static bool IsArchive(string filePath, out ArchiveType? type)
|
||||
public static bool IsArchive(
|
||||
string filePath,
|
||||
out ArchiveType? type,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
)
|
||||
{
|
||||
filePath.CheckNotNullOrEmpty(nameof(filePath));
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
using Stream s = File.OpenRead(filePath);
|
||||
return IsArchive(s, out type);
|
||||
return IsArchive(s, out type, bufferSize);
|
||||
}
|
||||
|
||||
public static bool IsArchive(Stream stream, out ArchiveType? type)
|
||||
public static bool IsArchive(
|
||||
Stream stream,
|
||||
out ArchiveType? type,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
)
|
||||
{
|
||||
type = null;
|
||||
stream.CheckNotNull(nameof(stream));
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
if (!stream.CanRead || !stream.CanSeek)
|
||||
{
|
||||
@@ -189,9 +195,10 @@ public static class ArchiveFactory
|
||||
|
||||
foreach (var factory in Factory.Factories)
|
||||
{
|
||||
var isArchive = factory.IsArchive(stream);
|
||||
stream.Position = startPosition;
|
||||
|
||||
if (factory.IsArchive(stream, null))
|
||||
if (isArchive)
|
||||
{
|
||||
type = factory.KnownArchiveType;
|
||||
return true;
|
||||
@@ -208,7 +215,7 @@ public static class ArchiveFactory
|
||||
/// <returns></returns>
|
||||
public static IEnumerable<string> GetFileParts(string part1)
|
||||
{
|
||||
part1.CheckNotNullOrEmpty(nameof(part1));
|
||||
part1.NotNullOrEmpty(nameof(part1));
|
||||
return GetFileParts(new FileInfo(part1)).Select(a => a.FullName);
|
||||
}
|
||||
|
||||
@@ -219,7 +226,7 @@ public static class ArchiveFactory
|
||||
/// <returns></returns>
|
||||
public static IEnumerable<FileInfo> GetFileParts(FileInfo part1)
|
||||
{
|
||||
part1.CheckNotNull(nameof(part1));
|
||||
part1.NotNull(nameof(part1));
|
||||
yield return part1;
|
||||
|
||||
foreach (var factory in Factory.Factories.OfType<IFactory>())
|
||||
@@ -239,4 +246,6 @@ public static class ArchiveFactory
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static IArchiveFactory AutoFactory { get; } = new AutoArchiveFactory();
|
||||
}
|
||||
|
||||
30
src/SharpCompress/Archives/AutoArchiveFactory.cs
Normal file
30
src/SharpCompress/Archives/AutoArchiveFactory.cs
Normal file
@@ -0,0 +1,30 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
class AutoArchiveFactory : IArchiveFactory
|
||||
{
|
||||
public string Name => nameof(AutoArchiveFactory);
|
||||
|
||||
public ArchiveType? KnownArchiveType => null;
|
||||
|
||||
public IEnumerable<string> GetSupportedExtensions() => throw new NotSupportedException();
|
||||
|
||||
public bool IsArchive(
|
||||
Stream stream,
|
||||
string? password = null,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
) => throw new NotSupportedException();
|
||||
|
||||
public FileInfo? GetFilePart(int index, FileInfo part1) => throw new NotSupportedException();
|
||||
|
||||
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
|
||||
ArchiveFactory.Open(stream, readerOptions);
|
||||
|
||||
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
|
||||
ArchiveFactory.Open(fileInfo, readerOptions);
|
||||
}
|
||||
@@ -2,6 +2,8 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.GZip;
|
||||
using SharpCompress.IO;
|
||||
@@ -21,7 +23,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static GZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
filePath.CheckNotNullOrEmpty(nameof(filePath));
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
}
|
||||
|
||||
@@ -32,7 +34,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
fileInfo.CheckNotNull(nameof(fileInfo));
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
return new GZipArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
@@ -52,7 +54,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.CheckNotNull(nameof(fileInfos));
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new GZipArchive(
|
||||
new SourceStream(
|
||||
@@ -70,7 +72,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static GZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
streams.CheckNotNull(nameof(streams));
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new GZipArchive(
|
||||
new SourceStream(
|
||||
@@ -88,7 +90,13 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static GZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
stream.CheckNotNull(nameof(stream));
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
return new GZipArchive(
|
||||
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
@@ -101,7 +109,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
/// </summary>
|
||||
/// <param name="sourceStream"></param>
|
||||
private GZipArchive(SourceStream sourceStream)
|
||||
: base(ArchiveType.Tar, sourceStream) { }
|
||||
: base(ArchiveType.GZip, sourceStream) { }
|
||||
|
||||
protected override IEnumerable<GZipVolume> LoadVolumes(SourceStream sourceStream)
|
||||
{
|
||||
@@ -130,6 +138,16 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
SaveTo(stream, new WriterOptions(CompressionType.GZip));
|
||||
}
|
||||
|
||||
public Task SaveToAsync(string filePath, CancellationToken cancellationToken = default) =>
|
||||
SaveToAsync(new FileInfo(filePath), cancellationToken);
|
||||
|
||||
public async Task SaveToAsync(FileInfo fileInfo, CancellationToken cancellationToken = default)
|
||||
{
|
||||
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
|
||||
await SaveToAsync(stream, new WriterOptions(CompressionType.GZip), cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public static bool IsGZipFile(Stream stream)
|
||||
{
|
||||
// read the header on the first read
|
||||
@@ -162,11 +180,16 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
{
|
||||
if (Entries.Any())
|
||||
{
|
||||
throw new InvalidOperationException("Only one entry is allowed in a GZip Archive");
|
||||
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
|
||||
}
|
||||
return new GZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
|
||||
}
|
||||
|
||||
protected override GZipArchiveEntry CreateDirectoryEntry(
|
||||
string directoryPath,
|
||||
DateTime? modified
|
||||
) => throw new NotSupportedException("GZip archives do not support directory entries.");
|
||||
|
||||
protected override void SaveTo(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
@@ -176,7 +199,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
{
|
||||
if (Entries.Count > 1)
|
||||
{
|
||||
throw new InvalidOperationException("Only one entry is allowed in a GZip Archive");
|
||||
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
|
||||
}
|
||||
using var writer = new GZipWriter(stream, new GZipWriterOptions(options));
|
||||
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
|
||||
@@ -190,6 +213,28 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
}
|
||||
}
|
||||
|
||||
protected override async Task SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
IEnumerable<GZipArchiveEntry> oldEntries,
|
||||
IEnumerable<GZipArchiveEntry> newEntries,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
if (Entries.Count > 1)
|
||||
{
|
||||
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
|
||||
}
|
||||
using var writer = new GZipWriter(stream, new GZipWriterOptions(options));
|
||||
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
await writer
|
||||
.WriteAsync(entry.Key.NotNull("Entry Key is null"), entryStream, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
|
||||
protected override IEnumerable<GZipArchiveEntry> LoadEntries(IEnumerable<GZipVolume> volumes)
|
||||
{
|
||||
var stream = volumes.Single().Stream;
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.GZip;
|
||||
|
||||
namespace SharpCompress.Archives.GZip;
|
||||
@@ -13,11 +15,18 @@ public class GZipArchiveEntry : GZipEntry, IArchiveEntry
|
||||
{
|
||||
//this is to reset the stream to be read multiple times
|
||||
var part = (GZipFilePart)Parts.Single();
|
||||
if (part.GetRawStream().Position != part.EntryStartPosition)
|
||||
var rawStream = part.GetRawStream();
|
||||
if (rawStream.CanSeek && rawStream.Position != part.EntryStartPosition)
|
||||
{
|
||||
part.GetRawStream().Position = part.EntryStartPosition;
|
||||
rawStream.Position = part.EntryStartPosition;
|
||||
}
|
||||
return Parts.Single().GetCompressedStream();
|
||||
return Parts.Single().GetCompressedStream().NotNull();
|
||||
}
|
||||
|
||||
public virtual Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
// GZip synchronous implementation is fast enough, just wrap it
|
||||
return Task.FromResult(OpenEntryStream());
|
||||
}
|
||||
|
||||
#region IArchiveEntry Members
|
||||
|
||||
@@ -58,7 +58,7 @@ internal sealed class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArch
|
||||
{
|
||||
//ensure new stream is at the start, this could be reset
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
return NonDisposingStream.Create(stream);
|
||||
return SharpCompressStream.Create(stream, leaveOpen: true);
|
||||
}
|
||||
|
||||
internal override void Close()
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
@@ -11,6 +13,12 @@ public interface IArchiveEntry : IEntry
|
||||
/// </summary>
|
||||
Stream OpenEntryStream();
|
||||
|
||||
/// <summary>
|
||||
/// Opens the current entry as a stream that will decompress as it is read asynchronously.
|
||||
/// Read the entire stream or use SkipEntry on EntryStream.
|
||||
/// </summary>
|
||||
Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// The archive can find all the parts of the archive needed to extract this entry.
|
||||
/// </summary>
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.IO;
|
||||
|
||||
@@ -25,7 +27,35 @@ public static class IArchiveEntryExtensions
|
||||
using (entryStream)
|
||||
{
|
||||
using Stream s = new ListeningStream(streamListener, entryStream);
|
||||
s.TransferTo(streamToWriteTo);
|
||||
s.CopyTo(streamToWriteTo);
|
||||
}
|
||||
streamListener.FireEntryExtractionEnd(archiveEntry);
|
||||
}
|
||||
|
||||
public static async Task WriteToAsync(
|
||||
this IArchiveEntry archiveEntry,
|
||||
Stream streamToWriteTo,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
if (archiveEntry.IsDirectory)
|
||||
{
|
||||
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
|
||||
}
|
||||
|
||||
var streamListener = (IArchiveExtractionListener)archiveEntry.Archive;
|
||||
streamListener.EnsureEntriesLoaded();
|
||||
streamListener.FireEntryExtractionBegin(archiveEntry);
|
||||
streamListener.FireFilePartExtractionBegin(
|
||||
archiveEntry.Key ?? "Key",
|
||||
archiveEntry.Size,
|
||||
archiveEntry.CompressedSize
|
||||
);
|
||||
var entryStream = archiveEntry.OpenEntryStream();
|
||||
using (entryStream)
|
||||
{
|
||||
using Stream s = new ListeningStream(streamListener, entryStream);
|
||||
await s.CopyToAsync(streamToWriteTo, 81920, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
streamListener.FireEntryExtractionEnd(archiveEntry);
|
||||
}
|
||||
@@ -45,6 +75,23 @@ public static class IArchiveEntryExtensions
|
||||
entry.WriteToFile
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
/// Extract to specific directory asynchronously, retaining filename
|
||||
/// </summary>
|
||||
public static Task WriteToDirectoryAsync(
|
||||
this IArchiveEntry entry,
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options = null,
|
||||
CancellationToken cancellationToken = default
|
||||
) =>
|
||||
ExtractionMethods.WriteEntryToDirectoryAsync(
|
||||
entry,
|
||||
destinationDirectory,
|
||||
options,
|
||||
(x, opt) => entry.WriteToFileAsync(x, opt, cancellationToken),
|
||||
cancellationToken
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
/// Extract to specific file
|
||||
/// </summary>
|
||||
@@ -63,4 +110,25 @@ public static class IArchiveEntryExtensions
|
||||
entry.WriteTo(fs);
|
||||
}
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
/// Extract to specific file asynchronously
|
||||
/// </summary>
|
||||
public static Task WriteToFileAsync(
|
||||
this IArchiveEntry entry,
|
||||
string destinationFileName,
|
||||
ExtractionOptions? options = null,
|
||||
CancellationToken cancellationToken = default
|
||||
) =>
|
||||
ExtractionMethods.WriteEntryToFileAsync(
|
||||
entry,
|
||||
destinationFileName,
|
||||
options,
|
||||
async (x, fm) =>
|
||||
{
|
||||
using var fs = File.Open(destinationFileName, fm);
|
||||
await entry.WriteToAsync(fs, cancellationToken).ConfigureAwait(false);
|
||||
},
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
@@ -18,10 +19,8 @@ public static class IArchiveExtensions
|
||||
ExtractionOptions? options = null
|
||||
)
|
||||
{
|
||||
foreach (var entry in archive.Entries.Where(x => !x.IsDirectory))
|
||||
{
|
||||
entry.WriteToDirectory(destinationDirectory, options);
|
||||
}
|
||||
using var reader = archive.ExtractAllEntries();
|
||||
reader.WriteAllToDirectory(destinationDirectory, options);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -46,27 +45,37 @@ public static class IArchiveExtensions
|
||||
var seenDirectories = new HashSet<string>();
|
||||
|
||||
// Extract
|
||||
var entries = archive.ExtractAllEntries();
|
||||
while (entries.MoveToNextEntry())
|
||||
foreach (var entry in archive.Entries)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var entry = entries.Entry;
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
var dirPath = Path.Combine(destination, entry.Key.NotNull("Entry Key is null"));
|
||||
if (
|
||||
Path.GetDirectoryName(dirPath + "/") is { } emptyDirectory
|
||||
&& seenDirectories.Add(dirPath)
|
||||
)
|
||||
{
|
||||
Directory.CreateDirectory(emptyDirectory);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Create each directory
|
||||
// Create each directory if not already created
|
||||
var path = Path.Combine(destination, entry.Key.NotNull("Entry Key is null"));
|
||||
if (Path.GetDirectoryName(path) is { } directory && seenDirectories.Add(path))
|
||||
if (Path.GetDirectoryName(path) is { } directory)
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
if (!Directory.Exists(directory) && !seenDirectories.Contains(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
seenDirectories.Add(directory);
|
||||
}
|
||||
}
|
||||
|
||||
// Write file
|
||||
using var fs = File.OpenWrite(path);
|
||||
entries.WriteEntryTo(fs);
|
||||
entry.WriteTo(fs);
|
||||
|
||||
// Update progress
|
||||
bytesRead += entry.Size;
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Writers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
@@ -16,8 +18,16 @@ public interface IWritableArchive : IArchive
|
||||
DateTime? modified = null
|
||||
);
|
||||
|
||||
IArchiveEntry AddDirectoryEntry(string key, DateTime? modified = null);
|
||||
|
||||
void SaveTo(Stream stream, WriterOptions options);
|
||||
|
||||
Task SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
/// Use this to pause entry rebuilding when adding large collections of entries. Dispose when complete. A using statement is recommended.
|
||||
/// </summary>
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Writers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
@@ -42,6 +44,24 @@ public static class IWritableArchiveExtensions
|
||||
writableArchive.SaveTo(stream, options);
|
||||
}
|
||||
|
||||
public static Task SaveToAsync(
|
||||
this IWritableArchive writableArchive,
|
||||
string filePath,
|
||||
WriterOptions options,
|
||||
CancellationToken cancellationToken = default
|
||||
) => writableArchive.SaveToAsync(new FileInfo(filePath), options, cancellationToken);
|
||||
|
||||
public static async Task SaveToAsync(
|
||||
this IWritableArchive writableArchive,
|
||||
FileInfo fileInfo,
|
||||
WriterOptions options,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
|
||||
await writableArchive.SaveToAsync(stream, options, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public static void AddAllFromDirectory(
|
||||
this IWritableArchive writableArchive,
|
||||
string filePath,
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.ObjectModel;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using SharpCompress.Common.Rar;
|
||||
|
||||
@@ -14,6 +14,7 @@ namespace SharpCompress.Archives.Rar;
|
||||
|
||||
public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
{
|
||||
private bool _disposed;
|
||||
internal Lazy<IRarUnpack> UnpackV2017 { get; } =
|
||||
new(() => new Compressors.Rar.UnpackV2017.Unpack());
|
||||
internal Lazy<IRarUnpack> UnpackV1 { get; } = new(() => new Compressors.Rar.UnpackV1.Unpack());
|
||||
@@ -25,6 +26,20 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
private RarArchive(SourceStream sourceStream)
|
||||
: base(ArchiveType.Rar, sourceStream) { }
|
||||
|
||||
public override void Dispose()
|
||||
{
|
||||
if (!_disposed)
|
||||
{
|
||||
if (UnpackV1.IsValueCreated && UnpackV1.Value is IDisposable unpackV1)
|
||||
{
|
||||
unpackV1.Dispose();
|
||||
}
|
||||
|
||||
_disposed = true;
|
||||
base.Dispose();
|
||||
}
|
||||
}
|
||||
|
||||
protected override IEnumerable<RarArchiveEntry> LoadEntries(IEnumerable<RarVolume> volumes) =>
|
||||
RarArchiveEntryFactory.GetEntries(this, volumes, ReaderOptions);
|
||||
|
||||
@@ -52,6 +67,16 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
|
||||
protected override IReader CreateReaderForSolidExtraction()
|
||||
{
|
||||
if (this.IsMultipartVolume())
|
||||
{
|
||||
var streams = Volumes.Select(volume =>
|
||||
{
|
||||
volume.Stream.Position = 0;
|
||||
return volume.Stream;
|
||||
});
|
||||
return RarReader.Open(streams, ReaderOptions);
|
||||
}
|
||||
|
||||
var stream = Volumes.First().Stream;
|
||||
stream.Position = 0;
|
||||
return RarReader.Open(stream, ReaderOptions);
|
||||
@@ -70,7 +95,7 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
/// <param name="options"></param>
|
||||
public static RarArchive Open(string filePath, ReaderOptions? options = null)
|
||||
{
|
||||
filePath.CheckNotNullOrEmpty(nameof(filePath));
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
var fileInfo = new FileInfo(filePath);
|
||||
return new RarArchive(
|
||||
new SourceStream(
|
||||
@@ -88,7 +113,7 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
/// <param name="options"></param>
|
||||
public static RarArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
|
||||
{
|
||||
fileInfo.CheckNotNull(nameof(fileInfo));
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
return new RarArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
@@ -105,7 +130,13 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
/// <param name="options"></param>
|
||||
public static RarArchive Open(Stream stream, ReaderOptions? options = null)
|
||||
{
|
||||
stream.CheckNotNull(nameof(stream));
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
return new RarArchive(new SourceStream(stream, _ => null, options ?? new ReaderOptions()));
|
||||
}
|
||||
|
||||
@@ -119,7 +150,7 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.CheckNotNull(nameof(fileInfos));
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new RarArchive(
|
||||
new SourceStream(
|
||||
@@ -137,7 +168,7 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static RarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
streams.CheckNotNull(nameof(streams));
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new RarArchive(
|
||||
new SourceStream(
|
||||
|
||||
@@ -2,6 +2,8 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Rar;
|
||||
using SharpCompress.Common.Rar.Headers;
|
||||
@@ -42,7 +44,7 @@ public class RarArchiveEntry : RarEntry, IArchiveEntry
|
||||
{
|
||||
CheckIncomplete();
|
||||
return BitConverter.ToUInt32(
|
||||
parts.Select(fp => fp.FileHeader).Single(fh => !fh.IsSplitAfter).FileCrc,
|
||||
parts.Select(fp => fp.FileHeader).Single(fh => !fh.IsSplitAfter).FileCrc.NotNull(),
|
||||
0
|
||||
);
|
||||
}
|
||||
@@ -68,20 +70,50 @@ public class RarArchiveEntry : RarEntry, IArchiveEntry
|
||||
|
||||
public Stream OpenEntryStream()
|
||||
{
|
||||
RarStream stream;
|
||||
if (IsRarV3)
|
||||
{
|
||||
return new RarStream(
|
||||
stream = new RarStream(
|
||||
archive.UnpackV1.Value,
|
||||
FileHeader,
|
||||
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
|
||||
);
|
||||
}
|
||||
else
|
||||
{
|
||||
stream = new RarStream(
|
||||
archive.UnpackV2017.Value,
|
||||
FileHeader,
|
||||
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
|
||||
);
|
||||
}
|
||||
|
||||
return new RarStream(
|
||||
archive.UnpackV2017.Value,
|
||||
FileHeader,
|
||||
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
|
||||
);
|
||||
stream.Initialize();
|
||||
return stream;
|
||||
}
|
||||
|
||||
public async Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
RarStream stream;
|
||||
if (IsRarV3)
|
||||
{
|
||||
stream = new RarStream(
|
||||
archive.UnpackV1.Value,
|
||||
FileHeader,
|
||||
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
|
||||
);
|
||||
}
|
||||
else
|
||||
{
|
||||
stream = new RarStream(
|
||||
archive.UnpackV2017.Value,
|
||||
FileHeader,
|
||||
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
|
||||
);
|
||||
}
|
||||
|
||||
await stream.InitializeAsync(cancellationToken);
|
||||
return stream;
|
||||
}
|
||||
|
||||
public bool IsComplete
|
||||
|
||||
@@ -21,7 +21,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
filePath.CheckNotNullOrEmpty("filePath");
|
||||
filePath.NotNullOrEmpty("filePath");
|
||||
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
}
|
||||
|
||||
@@ -32,7 +32,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
fileInfo.CheckNotNull("fileInfo");
|
||||
fileInfo.NotNull("fileInfo");
|
||||
return new SevenZipArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
@@ -52,7 +52,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.CheckNotNull(nameof(fileInfos));
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new SevenZipArchive(
|
||||
new SourceStream(
|
||||
@@ -73,7 +73,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
streams.CheckNotNull(nameof(streams));
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new SevenZipArchive(
|
||||
new SourceStream(
|
||||
@@ -91,7 +91,13 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
stream.CheckNotNull("stream");
|
||||
stream.NotNull("stream");
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
return new SevenZipArchive(
|
||||
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
@@ -163,7 +169,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
{
|
||||
stream.Position = 0;
|
||||
var reader = new ArchiveReader();
|
||||
reader.Open(stream);
|
||||
reader.Open(stream, lookForHeader: ReaderOptions.LookForHeader);
|
||||
_database = reader.ReadDatabase(new PasswordProvider(ReaderOptions.Password));
|
||||
}
|
||||
}
|
||||
@@ -194,7 +200,10 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
new SevenZipReader(ReaderOptions, this);
|
||||
|
||||
public override bool IsSolid =>
|
||||
Entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder).Count() > 1;
|
||||
Entries
|
||||
.Where(x => !x.IsDirectory)
|
||||
.GroupBy(x => x.FilePart.Folder)
|
||||
.Any(folder => folder.Count() > 1);
|
||||
|
||||
public override long TotalSize =>
|
||||
_database?._packSizes.Aggregate(0L, (total, packSize) => total + packSize) ?? 0;
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.SevenZip;
|
||||
|
||||
namespace SharpCompress.Archives.SevenZip;
|
||||
@@ -10,6 +12,9 @@ public class SevenZipArchiveEntry : SevenZipEntry, IArchiveEntry
|
||||
|
||||
public Stream OpenEntryStream() => FilePart.GetCompressedStream();
|
||||
|
||||
public Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default) =>
|
||||
Task.FromResult(OpenEntryStream());
|
||||
|
||||
public IArchive Archive { get; }
|
||||
|
||||
public bool IsComplete => true;
|
||||
|
||||
@@ -2,6 +2,8 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Tar;
|
||||
using SharpCompress.Common.Tar.Headers;
|
||||
@@ -22,7 +24,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static TarArchive Open(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
filePath.CheckNotNullOrEmpty(nameof(filePath));
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
}
|
||||
|
||||
@@ -33,7 +35,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static TarArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
fileInfo.CheckNotNull(nameof(fileInfo));
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
return new TarArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
@@ -53,7 +55,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.CheckNotNull(nameof(fileInfos));
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new TarArchive(
|
||||
new SourceStream(
|
||||
@@ -71,7 +73,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static TarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
streams.CheckNotNull(nameof(streams));
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new TarArchive(
|
||||
new SourceStream(
|
||||
@@ -89,7 +91,13 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static TarArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
stream.CheckNotNull(nameof(stream));
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
return new TarArchive(
|
||||
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
@@ -172,7 +180,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
using (var entryStream = entry.OpenEntryStream())
|
||||
{
|
||||
using var memoryStream = new MemoryStream();
|
||||
entryStream.TransferTo(memoryStream);
|
||||
entryStream.CopyTo(memoryStream);
|
||||
memoryStream.Position = 0;
|
||||
var bytes = memoryStream.ToArray();
|
||||
|
||||
@@ -216,6 +224,11 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
closeStream
|
||||
);
|
||||
|
||||
protected override TarArchiveEntry CreateDirectoryEntry(
|
||||
string directoryPath,
|
||||
DateTime? modified
|
||||
) => new TarWritableArchiveEntry(this, directoryPath, modified);
|
||||
|
||||
protected override void SaveTo(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
@@ -224,15 +237,62 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
)
|
||||
{
|
||||
using var writer = new TarWriter(stream, new TarWriterOptions(options));
|
||||
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
|
||||
foreach (var entry in oldEntries.Concat(newEntries))
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
writer.Write(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
entry.LastModifiedTime,
|
||||
entry.Size
|
||||
);
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
writer.WriteDirectory(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entry.LastModifiedTime
|
||||
);
|
||||
}
|
||||
else
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
writer.Write(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
entry.LastModifiedTime,
|
||||
entry.Size
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected override async Task SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
IEnumerable<TarArchiveEntry> oldEntries,
|
||||
IEnumerable<TarArchiveEntry> newEntries,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
using var writer = new TarWriter(stream, new TarWriterOptions(options));
|
||||
foreach (var entry in oldEntries.Concat(newEntries))
|
||||
{
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
await writer
|
||||
.WriteDirectoryAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entry.LastModifiedTime,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
await writer
|
||||
.WriteAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
entry.LastModifiedTime,
|
||||
entry.Size,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Tar;
|
||||
|
||||
@@ -10,7 +12,11 @@ public class TarArchiveEntry : TarEntry, IArchiveEntry
|
||||
internal TarArchiveEntry(TarArchive archive, TarFilePart? part, CompressionType compressionType)
|
||||
: base(part, compressionType) => Archive = archive;
|
||||
|
||||
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream();
|
||||
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();
|
||||
|
||||
public virtual Task<Stream> OpenEntryStreamAsync(
|
||||
CancellationToken cancellationToken = default
|
||||
) => Task.FromResult(OpenEntryStream());
|
||||
|
||||
#region IArchiveEntry Members
|
||||
|
||||
|
||||
@@ -9,7 +9,8 @@ namespace SharpCompress.Archives.Tar;
|
||||
internal sealed class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiveEntry
|
||||
{
|
||||
private readonly bool closeStream;
|
||||
private readonly Stream stream;
|
||||
private readonly Stream? stream;
|
||||
private readonly bool isDirectory;
|
||||
|
||||
internal TarWritableArchiveEntry(
|
||||
TarArchive archive,
|
||||
@@ -27,6 +28,22 @@ internal sealed class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiv
|
||||
Size = size;
|
||||
LastModifiedTime = lastModified;
|
||||
this.closeStream = closeStream;
|
||||
isDirectory = false;
|
||||
}
|
||||
|
||||
internal TarWritableArchiveEntry(
|
||||
TarArchive archive,
|
||||
string directoryPath,
|
||||
DateTime? lastModified
|
||||
)
|
||||
: base(archive, null, CompressionType.None)
|
||||
{
|
||||
stream = null;
|
||||
Key = directoryPath;
|
||||
Size = 0;
|
||||
LastModifiedTime = lastModified;
|
||||
closeStream = false;
|
||||
isDirectory = true;
|
||||
}
|
||||
|
||||
public override long Crc => 0;
|
||||
@@ -47,23 +64,27 @@ internal sealed class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiv
|
||||
|
||||
public override bool IsEncrypted => false;
|
||||
|
||||
public override bool IsDirectory => false;
|
||||
public override bool IsDirectory => isDirectory;
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
|
||||
Stream IWritableArchiveEntry.Stream => stream;
|
||||
Stream IWritableArchiveEntry.Stream => stream ?? Stream.Null;
|
||||
|
||||
public override Stream OpenEntryStream()
|
||||
{
|
||||
if (stream is null)
|
||||
{
|
||||
return Stream.Null;
|
||||
}
|
||||
//ensure new stream is at the start, this could be reset
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
return NonDisposingStream.Create(stream);
|
||||
return SharpCompressStream.Create(stream, leaveOpen: true);
|
||||
}
|
||||
|
||||
internal override void Close()
|
||||
{
|
||||
if (closeStream)
|
||||
if (closeStream && stream is not null)
|
||||
{
|
||||
stream.Dispose();
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Zip;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
@@ -43,7 +45,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static ZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
filePath.CheckNotNullOrEmpty(nameof(filePath));
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
}
|
||||
|
||||
@@ -54,7 +56,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
fileInfo.CheckNotNull(nameof(fileInfo));
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
return new ZipArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
@@ -74,7 +76,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.CheckNotNull(nameof(fileInfos));
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new ZipArchive(
|
||||
new SourceStream(
|
||||
@@ -92,7 +94,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static ZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
streams.CheckNotNull(nameof(streams));
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new ZipArchive(
|
||||
new SourceStream(
|
||||
@@ -110,30 +112,52 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static ZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
stream.CheckNotNull(nameof(stream));
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
return new ZipArchive(
|
||||
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
}
|
||||
|
||||
public static bool IsZipFile(string filePath, string? password = null) =>
|
||||
IsZipFile(new FileInfo(filePath), password);
|
||||
public static bool IsZipFile(
|
||||
string filePath,
|
||||
string? password = null,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
) => IsZipFile(new FileInfo(filePath), password, bufferSize);
|
||||
|
||||
public static bool IsZipFile(FileInfo fileInfo, string? password = null)
|
||||
public static bool IsZipFile(
|
||||
FileInfo fileInfo,
|
||||
string? password = null,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
using Stream stream = fileInfo.OpenRead();
|
||||
return IsZipFile(stream, password);
|
||||
return IsZipFile(stream, password, bufferSize);
|
||||
}
|
||||
|
||||
public static bool IsZipFile(Stream stream, string? password = null)
|
||||
public static bool IsZipFile(
|
||||
Stream stream,
|
||||
string? password = null,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
)
|
||||
{
|
||||
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
|
||||
try
|
||||
{
|
||||
if (stream is not SharpCompressStream)
|
||||
{
|
||||
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
|
||||
}
|
||||
|
||||
var header = headerFactory
|
||||
.ReadStreamHeader(stream)
|
||||
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
|
||||
@@ -153,11 +177,20 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
}
|
||||
}
|
||||
|
||||
public static bool IsZipMulti(Stream stream, string? password = null)
|
||||
public static bool IsZipMulti(
|
||||
Stream stream,
|
||||
string? password = null,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
)
|
||||
{
|
||||
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
|
||||
try
|
||||
{
|
||||
if (stream is not SharpCompressStream)
|
||||
{
|
||||
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
|
||||
}
|
||||
|
||||
var header = headerFactory
|
||||
.ReadStreamHeader(stream)
|
||||
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
|
||||
@@ -196,7 +229,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
if (streams.Count() > 1) //test part 2 - true = multipart not split
|
||||
{
|
||||
streams[1].Position += 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
|
||||
var isZip = IsZipFile(streams[1], ReaderOptions.Password);
|
||||
var isZip = IsZipFile(streams[1], ReaderOptions.Password, ReaderOptions.BufferSize);
|
||||
streams[1].Position -= 4;
|
||||
if (isZip)
|
||||
{
|
||||
@@ -275,14 +308,59 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
)
|
||||
{
|
||||
using var writer = new ZipWriter(stream, new ZipWriterOptions(options));
|
||||
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
|
||||
foreach (var entry in oldEntries.Concat(newEntries))
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
writer.Write(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
entry.LastModifiedTime
|
||||
);
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
writer.WriteDirectory(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entry.LastModifiedTime
|
||||
);
|
||||
}
|
||||
else
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
writer.Write(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
entry.LastModifiedTime
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected override async Task SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
IEnumerable<ZipArchiveEntry> oldEntries,
|
||||
IEnumerable<ZipArchiveEntry> newEntries,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
using var writer = new ZipWriter(stream, new ZipWriterOptions(options));
|
||||
foreach (var entry in oldEntries.Concat(newEntries))
|
||||
{
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
await writer
|
||||
.WriteDirectoryAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entry.LastModifiedTime,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
await writer
|
||||
.WriteAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -294,12 +372,17 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
bool closeStream
|
||||
) => new ZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
|
||||
|
||||
protected override ZipArchiveEntry CreateDirectoryEntry(
|
||||
string directoryPath,
|
||||
DateTime? modified
|
||||
) => new ZipWritableArchiveEntry(this, directoryPath, modified);
|
||||
|
||||
public static ZipArchive Create() => new();
|
||||
|
||||
protected override IReader CreateReaderForSolidExtraction()
|
||||
{
|
||||
var stream = Volumes.Single().Stream;
|
||||
stream.Position = 0;
|
||||
((IStreamStack)stream).StackSeek(0);
|
||||
return ZipReader.Open(stream, ReaderOptions, Entries);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Zip;
|
||||
|
||||
namespace SharpCompress.Archives.Zip;
|
||||
@@ -9,7 +11,11 @@ public class ZipArchiveEntry : ZipEntry, IArchiveEntry
|
||||
internal ZipArchiveEntry(ZipArchive archive, SeekableZipFilePart? part)
|
||||
: base(part) => Archive = archive;
|
||||
|
||||
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream();
|
||||
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();
|
||||
|
||||
public virtual Task<Stream> OpenEntryStreamAsync(
|
||||
CancellationToken cancellationToken = default
|
||||
) => Task.FromResult(OpenEntryStream());
|
||||
|
||||
#region IArchiveEntry Members
|
||||
|
||||
@@ -18,6 +24,4 @@ public class ZipArchiveEntry : ZipEntry, IArchiveEntry
|
||||
public bool IsComplete => true;
|
||||
|
||||
#endregion
|
||||
|
||||
public string? Comment => ((SeekableZipFilePart)Parts.Single()).Comment;
|
||||
}
|
||||
|
||||
@@ -9,7 +9,8 @@ namespace SharpCompress.Archives.Zip;
|
||||
internal class ZipWritableArchiveEntry : ZipArchiveEntry, IWritableArchiveEntry
|
||||
{
|
||||
private readonly bool closeStream;
|
||||
private readonly Stream stream;
|
||||
private readonly Stream? stream;
|
||||
private readonly bool isDirectory;
|
||||
private bool isDisposed;
|
||||
|
||||
internal ZipWritableArchiveEntry(
|
||||
@@ -27,6 +28,22 @@ internal class ZipWritableArchiveEntry : ZipArchiveEntry, IWritableArchiveEntry
|
||||
Size = size;
|
||||
LastModifiedTime = lastModified;
|
||||
this.closeStream = closeStream;
|
||||
isDirectory = false;
|
||||
}
|
||||
|
||||
internal ZipWritableArchiveEntry(
|
||||
ZipArchive archive,
|
||||
string directoryPath,
|
||||
DateTime? lastModified
|
||||
)
|
||||
: base(archive, null)
|
||||
{
|
||||
stream = null;
|
||||
Key = directoryPath;
|
||||
Size = 0;
|
||||
LastModifiedTime = lastModified;
|
||||
closeStream = false;
|
||||
isDirectory = true;
|
||||
}
|
||||
|
||||
public override long Crc => 0;
|
||||
@@ -47,24 +64,28 @@ internal class ZipWritableArchiveEntry : ZipArchiveEntry, IWritableArchiveEntry
|
||||
|
||||
public override bool IsEncrypted => false;
|
||||
|
||||
public override bool IsDirectory => false;
|
||||
public override bool IsDirectory => isDirectory;
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
|
||||
|
||||
Stream IWritableArchiveEntry.Stream => stream;
|
||||
Stream IWritableArchiveEntry.Stream => stream ?? Stream.Null;
|
||||
|
||||
public override Stream OpenEntryStream()
|
||||
{
|
||||
if (stream is null)
|
||||
{
|
||||
return Stream.Null;
|
||||
}
|
||||
//ensure new stream is at the start, this could be reset
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
return NonDisposingStream.Create(stream);
|
||||
return SharpCompressStream.Create(stream, leaveOpen: true);
|
||||
}
|
||||
|
||||
internal override void Close()
|
||||
{
|
||||
if (closeStream && !isDisposed)
|
||||
if (closeStream && !isDisposed && stream is not null)
|
||||
{
|
||||
stream.Dispose();
|
||||
isDisposed = true;
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
using System;
|
||||
using System.Runtime.CompilerServices;
|
||||
|
||||
[assembly: CLSCompliant(true)]
|
||||
[assembly: InternalsVisibleTo(
|
||||
"SharpCompress.Test,PublicKey=0024000004800000940000000602000000240000525341310004000001000100158bebf1433f76dffc356733c138babea7a47536c65ed8009b16372c6f4edbb20554db74a62687f56b97c20a6ce8c4b123280279e33c894e7b3aa93ab3c573656fde4db576cfe07dba09619ead26375b25d2c4a8e43f7be257d712b0dd2eb546f67adb09281338618a58ac834fc038dd7e2740a7ab3591826252e4f4516306dc"
|
||||
)]
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
using System.Buffers;
|
||||
|
||||
namespace SharpCompress;
|
||||
|
||||
internal static class BufferPool
|
||||
{
|
||||
/// <summary>
|
||||
/// gets a buffer from the pool
|
||||
/// </summary>
|
||||
/// <param name="bufferSize">size of the buffer</param>
|
||||
/// <returns>the buffer</returns>
|
||||
public static byte[] Rent(int bufferSize)
|
||||
{
|
||||
#if NETCOREAPP || NETSTANDARD2_1_OR_GREATER
|
||||
return ArrayPool<byte>.Shared.Rent(bufferSize);
|
||||
#else
|
||||
return new byte[bufferSize];
|
||||
#endif
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// returns a buffer to the pool
|
||||
/// </summary>
|
||||
/// <param name="buffer">the buffer to return</param>
|
||||
public static void Return(byte[] buffer)
|
||||
{
|
||||
#if NETCOREAPP || NETSTANDARD2_1_OR_GREATER
|
||||
ArrayPool<byte>.Shared.Return(buffer);
|
||||
#else
|
||||
// no-op
|
||||
#endif
|
||||
}
|
||||
}
|
||||
60
src/SharpCompress/Common/Arc/ArcEntry.cs
Normal file
60
src/SharpCompress/Common/Arc/ArcEntry.cs
Normal file
@@ -0,0 +1,60 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.GZip;
|
||||
using SharpCompress.Common.Tar;
|
||||
|
||||
namespace SharpCompress.Common.Arc
|
||||
{
|
||||
public class ArcEntry : Entry
|
||||
{
|
||||
private readonly ArcFilePart? _filePart;
|
||||
|
||||
internal ArcEntry(ArcFilePart? filePart)
|
||||
{
|
||||
_filePart = filePart;
|
||||
}
|
||||
|
||||
public override long Crc
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_filePart == null)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
return _filePart.Header.Crc16;
|
||||
}
|
||||
}
|
||||
|
||||
public override string? Key => _filePart?.Header.Name;
|
||||
|
||||
public override string? LinkTarget => null;
|
||||
|
||||
public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0;
|
||||
|
||||
public override CompressionType CompressionType =>
|
||||
_filePart?.Header.CompressionMethod ?? CompressionType.Unknown;
|
||||
|
||||
public override long Size => throw new NotImplementedException();
|
||||
|
||||
public override DateTime? LastModifiedTime => null;
|
||||
|
||||
public override DateTime? CreatedTime => null;
|
||||
|
||||
public override DateTime? LastAccessedTime => null;
|
||||
|
||||
public override DateTime? ArchivedTime => null;
|
||||
|
||||
public override bool IsEncrypted => false;
|
||||
|
||||
public override bool IsDirectory => false;
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
|
||||
}
|
||||
}
|
||||
76
src/SharpCompress/Common/Arc/ArcEntryHeader.cs
Normal file
76
src/SharpCompress/Common/Arc/ArcEntryHeader.cs
Normal file
@@ -0,0 +1,76 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
|
||||
namespace SharpCompress.Common.Arc
|
||||
{
|
||||
public class ArcEntryHeader
|
||||
{
|
||||
public ArchiveEncoding ArchiveEncoding { get; }
|
||||
public CompressionType CompressionMethod { get; private set; }
|
||||
public string? Name { get; private set; }
|
||||
public long CompressedSize { get; private set; }
|
||||
public DateTime DateTime { get; private set; }
|
||||
public int Crc16 { get; private set; }
|
||||
public long OriginalSize { get; private set; }
|
||||
public long DataStartPosition { get; private set; }
|
||||
|
||||
public ArcEntryHeader(ArchiveEncoding archiveEncoding)
|
||||
{
|
||||
this.ArchiveEncoding = archiveEncoding;
|
||||
}
|
||||
|
||||
public ArcEntryHeader? ReadHeader(Stream stream)
|
||||
{
|
||||
byte[] headerBytes = new byte[29];
|
||||
if (stream.Read(headerBytes, 0, headerBytes.Length) != headerBytes.Length)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
DataStartPosition = stream.Position;
|
||||
return LoadFrom(headerBytes);
|
||||
}
|
||||
|
||||
public ArcEntryHeader LoadFrom(byte[] headerBytes)
|
||||
{
|
||||
CompressionMethod = GetCompressionType(headerBytes[1]);
|
||||
|
||||
// Read name
|
||||
int nameEnd = Array.IndexOf(headerBytes, (byte)0, 1); // Find null terminator
|
||||
Name = Encoding.UTF8.GetString(headerBytes, 2, nameEnd > 0 ? nameEnd - 2 : 12);
|
||||
|
||||
int offset = 15;
|
||||
CompressedSize = BitConverter.ToUInt32(headerBytes, offset);
|
||||
offset += 4;
|
||||
uint rawDateTime = BitConverter.ToUInt32(headerBytes, offset);
|
||||
DateTime = ConvertToDateTime(rawDateTime);
|
||||
offset += 4;
|
||||
Crc16 = BitConverter.ToUInt16(headerBytes, offset);
|
||||
offset += 2;
|
||||
OriginalSize = BitConverter.ToUInt32(headerBytes, offset);
|
||||
return this;
|
||||
}
|
||||
|
||||
private CompressionType GetCompressionType(byte value)
|
||||
{
|
||||
return value switch
|
||||
{
|
||||
1 or 2 => CompressionType.None,
|
||||
3 => CompressionType.Packed,
|
||||
4 => CompressionType.Squeezed,
|
||||
5 or 6 or 7 or 8 => CompressionType.Crunched,
|
||||
9 => CompressionType.Squashed,
|
||||
10 => CompressionType.Crushed,
|
||||
11 => CompressionType.Distilled,
|
||||
_ => CompressionType.Unknown,
|
||||
};
|
||||
}
|
||||
|
||||
public static DateTime ConvertToDateTime(long rawDateTime)
|
||||
{
|
||||
// Convert Unix timestamp to DateTime (UTC)
|
||||
return DateTimeOffset.FromUnixTimeSeconds(rawDateTime).UtcDateTime;
|
||||
}
|
||||
}
|
||||
}
|
||||
83
src/SharpCompress/Common/Arc/ArcFilePart.cs
Normal file
83
src/SharpCompress/Common/Arc/ArcFilePart.cs
Normal file
@@ -0,0 +1,83 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.GZip;
|
||||
using SharpCompress.Common.Tar;
|
||||
using SharpCompress.Common.Tar.Headers;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
using SharpCompress.Compressors.Lzw;
|
||||
using SharpCompress.Compressors.RLE90;
|
||||
using SharpCompress.Compressors.Squeezed;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Common.Arc
|
||||
{
|
||||
public class ArcFilePart : FilePart
|
||||
{
|
||||
private readonly Stream? _stream;
|
||||
|
||||
internal ArcFilePart(ArcEntryHeader localArcHeader, Stream? seekableStream)
|
||||
: base(localArcHeader.ArchiveEncoding)
|
||||
{
|
||||
_stream = seekableStream;
|
||||
Header = localArcHeader;
|
||||
}
|
||||
|
||||
internal ArcEntryHeader Header { get; set; }
|
||||
|
||||
internal override string? FilePartName => Header.Name;
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
{
|
||||
if (_stream != null)
|
||||
{
|
||||
Stream compressedStream;
|
||||
switch (Header.CompressionMethod)
|
||||
{
|
||||
case CompressionType.None:
|
||||
compressedStream = new ReadOnlySubStream(
|
||||
_stream,
|
||||
Header.DataStartPosition,
|
||||
Header.CompressedSize
|
||||
);
|
||||
break;
|
||||
case CompressionType.Packed:
|
||||
compressedStream = new RunLength90Stream(
|
||||
_stream,
|
||||
(int)Header.CompressedSize
|
||||
);
|
||||
break;
|
||||
case CompressionType.Squeezed:
|
||||
compressedStream = new SqueezeStream(_stream, (int)Header.CompressedSize);
|
||||
break;
|
||||
case CompressionType.Crunched:
|
||||
if (Header.OriginalSize > 128 * 1024)
|
||||
{
|
||||
throw new NotSupportedException(
|
||||
"CompressionMethod: "
|
||||
+ Header.CompressionMethod
|
||||
+ " with size > 128KB"
|
||||
);
|
||||
}
|
||||
compressedStream = new ArcLzwStream(
|
||||
_stream,
|
||||
(int)Header.CompressedSize,
|
||||
true
|
||||
);
|
||||
break;
|
||||
default:
|
||||
throw new NotSupportedException(
|
||||
"CompressionMethod: " + Header.CompressionMethod
|
||||
);
|
||||
}
|
||||
return compressedStream;
|
||||
}
|
||||
return _stream.NotNull();
|
||||
}
|
||||
|
||||
internal override Stream? GetRawStream() => _stream;
|
||||
}
|
||||
}
|
||||
16
src/SharpCompress/Common/Arc/ArcVolume.cs
Normal file
16
src/SharpCompress/Common/Arc/ArcVolume.cs
Normal file
@@ -0,0 +1,16 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Common.Arc
|
||||
{
|
||||
public class ArcVolume : Volume
|
||||
{
|
||||
public ArcVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
|
||||
: base(stream, readerOptions, index) { }
|
||||
}
|
||||
}
|
||||
@@ -1,9 +0,0 @@
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public class ArchiveException : Exception
|
||||
{
|
||||
public ArchiveException(string message)
|
||||
: base(message) { }
|
||||
}
|
||||
@@ -6,5 +6,7 @@ public enum ArchiveType
|
||||
Zip,
|
||||
Tar,
|
||||
SevenZip,
|
||||
GZip
|
||||
GZip,
|
||||
Arc,
|
||||
Arj,
|
||||
}
|
||||
|
||||
58
src/SharpCompress/Common/Arj/ArjEntry.cs
Normal file
58
src/SharpCompress/Common/Arj/ArjEntry.cs
Normal file
@@ -0,0 +1,58 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Arc;
|
||||
using SharpCompress.Common.Arj.Headers;
|
||||
|
||||
namespace SharpCompress.Common.Arj
|
||||
{
|
||||
public class ArjEntry : Entry
|
||||
{
|
||||
private readonly ArjFilePart _filePart;
|
||||
|
||||
internal ArjEntry(ArjFilePart filePart)
|
||||
{
|
||||
_filePart = filePart;
|
||||
}
|
||||
|
||||
public override long Crc => _filePart.Header.OriginalCrc32;
|
||||
|
||||
public override string? Key => _filePart?.Header.Name;
|
||||
|
||||
public override string? LinkTarget => null;
|
||||
|
||||
public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0;
|
||||
|
||||
public override CompressionType CompressionType
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_filePart.Header.CompressionMethod == CompressionMethod.Stored)
|
||||
{
|
||||
return CompressionType.None;
|
||||
}
|
||||
return CompressionType.ArjLZ77;
|
||||
}
|
||||
}
|
||||
|
||||
public override long Size => _filePart?.Header.OriginalSize ?? 0;
|
||||
|
||||
public override DateTime? LastModifiedTime => _filePart.Header.DateTimeModified.DateTime;
|
||||
|
||||
public override DateTime? CreatedTime => _filePart.Header.DateTimeCreated.DateTime;
|
||||
|
||||
public override DateTime? LastAccessedTime => _filePart.Header.DateTimeAccessed.DateTime;
|
||||
|
||||
public override DateTime? ArchivedTime => null;
|
||||
|
||||
public override bool IsEncrypted => false;
|
||||
|
||||
public override bool IsDirectory => _filePart.Header.FileType == FileType.Directory;
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
|
||||
}
|
||||
}
|
||||
72
src/SharpCompress/Common/Arj/ArjFilePart.cs
Normal file
72
src/SharpCompress/Common/Arj/ArjFilePart.cs
Normal file
@@ -0,0 +1,72 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Arj.Headers;
|
||||
using SharpCompress.Compressors.Arj;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Common.Arj
|
||||
{
|
||||
public class ArjFilePart : FilePart
|
||||
{
|
||||
private readonly Stream _stream;
|
||||
internal ArjLocalHeader Header { get; set; }
|
||||
|
||||
internal ArjFilePart(ArjLocalHeader localArjHeader, Stream seekableStream)
|
||||
: base(localArjHeader.ArchiveEncoding)
|
||||
{
|
||||
_stream = seekableStream;
|
||||
Header = localArjHeader;
|
||||
}
|
||||
|
||||
internal override string? FilePartName => Header.Name;
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
{
|
||||
if (_stream != null)
|
||||
{
|
||||
Stream compressedStream;
|
||||
switch (Header.CompressionMethod)
|
||||
{
|
||||
case CompressionMethod.Stored:
|
||||
compressedStream = new ReadOnlySubStream(
|
||||
_stream,
|
||||
Header.DataStartPosition,
|
||||
Header.CompressedSize
|
||||
);
|
||||
break;
|
||||
case CompressionMethod.CompressedMost:
|
||||
case CompressionMethod.Compressed:
|
||||
case CompressionMethod.CompressedFaster:
|
||||
if (Header.OriginalSize > 128 * 1024)
|
||||
{
|
||||
throw new NotSupportedException(
|
||||
"CompressionMethod: "
|
||||
+ Header.CompressionMethod
|
||||
+ " with size > 128KB"
|
||||
);
|
||||
}
|
||||
compressedStream = new LhaStream<Lh7DecoderCfg>(
|
||||
_stream,
|
||||
(int)Header.OriginalSize
|
||||
);
|
||||
break;
|
||||
case CompressionMethod.CompressedFastest:
|
||||
compressedStream = new LHDecoderStream(_stream, (int)Header.OriginalSize);
|
||||
break;
|
||||
default:
|
||||
throw new NotSupportedException(
|
||||
"CompressionMethod: " + Header.CompressionMethod
|
||||
);
|
||||
}
|
||||
return compressedStream;
|
||||
}
|
||||
return _stream.NotNull();
|
||||
}
|
||||
|
||||
internal override Stream GetRawStream() => _stream;
|
||||
}
|
||||
}
|
||||
36
src/SharpCompress/Common/Arj/ArjVolume.cs
Normal file
36
src/SharpCompress/Common/Arj/ArjVolume.cs
Normal file
@@ -0,0 +1,36 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Rar;
|
||||
using SharpCompress.Common.Rar.Headers;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Common.Arj
|
||||
{
|
||||
public class ArjVolume : Volume
|
||||
{
|
||||
public ArjVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
|
||||
: base(stream, readerOptions, index) { }
|
||||
|
||||
public override bool IsFirstVolume
|
||||
{
|
||||
get { return true; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// ArjArchive is part of a multi-part archive.
|
||||
/// </summary>
|
||||
public override bool IsMultiVolume
|
||||
{
|
||||
get { return false; }
|
||||
}
|
||||
|
||||
internal IEnumerable<ArjFilePart> GetVolumeFileParts()
|
||||
{
|
||||
return new List<ArjFilePart>();
|
||||
}
|
||||
}
|
||||
}
|
||||
142
src/SharpCompress/Common/Arj/Headers/ArjHeader.cs
Normal file
142
src/SharpCompress/Common/Arj/Headers/ArjHeader.cs
Normal file
@@ -0,0 +1,142 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
using SharpCompress.Crypto;
|
||||
|
||||
namespace SharpCompress.Common.Arj.Headers
|
||||
{
|
||||
public enum ArjHeaderType
|
||||
{
|
||||
MainHeader,
|
||||
LocalHeader,
|
||||
}
|
||||
|
||||
public abstract class ArjHeader
|
||||
{
|
||||
private const int FIRST_HDR_SIZE = 34;
|
||||
private const ushort ARJ_MAGIC = 0xEA60;
|
||||
|
||||
public ArjHeader(ArjHeaderType type)
|
||||
{
|
||||
ArjHeaderType = type;
|
||||
}
|
||||
|
||||
public ArjHeaderType ArjHeaderType { get; }
|
||||
public byte Flags { get; set; }
|
||||
public FileType FileType { get; set; }
|
||||
|
||||
public abstract ArjHeader? Read(Stream reader);
|
||||
|
||||
public byte[] ReadHeader(Stream stream)
|
||||
{
|
||||
// check for magic bytes
|
||||
Span<byte> magic = stackalloc byte[2];
|
||||
if (stream.Read(magic) != 2)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
var magicValue = (ushort)(magic[0] | magic[1] << 8);
|
||||
if (magicValue != ARJ_MAGIC)
|
||||
{
|
||||
throw new InvalidDataException("Not an ARJ file (wrong magic bytes)");
|
||||
}
|
||||
|
||||
// read header_size
|
||||
byte[] headerBytes = new byte[2];
|
||||
stream.Read(headerBytes, 0, 2);
|
||||
var headerSize = (ushort)(headerBytes[0] | headerBytes[1] << 8);
|
||||
if (headerSize < 1)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
var body = new byte[headerSize];
|
||||
var read = stream.Read(body, 0, headerSize);
|
||||
if (read < headerSize)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
byte[] crc = new byte[4];
|
||||
read = stream.Read(crc, 0, 4);
|
||||
var checksum = Crc32Stream.Compute(body);
|
||||
// Compute the hash value
|
||||
if (checksum != BitConverter.ToUInt32(crc, 0))
|
||||
{
|
||||
throw new InvalidDataException("Header checksum is invalid");
|
||||
}
|
||||
return body;
|
||||
}
|
||||
|
||||
protected List<byte[]> ReadExtendedHeaders(Stream reader)
|
||||
{
|
||||
List<byte[]> extendedHeader = new List<byte[]>();
|
||||
byte[] buffer = new byte[2];
|
||||
|
||||
while (true)
|
||||
{
|
||||
int bytesRead = reader.Read(buffer, 0, 2);
|
||||
if (bytesRead < 2)
|
||||
{
|
||||
throw new EndOfStreamException(
|
||||
"Unexpected end of stream while reading extended header size."
|
||||
);
|
||||
}
|
||||
|
||||
var extHeaderSize = (ushort)(buffer[0] | (buffer[1] << 8));
|
||||
if (extHeaderSize == 0)
|
||||
{
|
||||
return extendedHeader;
|
||||
}
|
||||
|
||||
byte[] header = new byte[extHeaderSize];
|
||||
bytesRead = reader.Read(header, 0, extHeaderSize);
|
||||
if (bytesRead < extHeaderSize)
|
||||
{
|
||||
throw new EndOfStreamException(
|
||||
"Unexpected end of stream while reading extended header data."
|
||||
);
|
||||
}
|
||||
|
||||
byte[] crc = new byte[4];
|
||||
bytesRead = reader.Read(crc, 0, 4);
|
||||
if (bytesRead < 4)
|
||||
{
|
||||
throw new EndOfStreamException(
|
||||
"Unexpected end of stream while reading extended header CRC."
|
||||
);
|
||||
}
|
||||
|
||||
var checksum = Crc32Stream.Compute(header);
|
||||
if (checksum != BitConverter.ToUInt32(crc, 0))
|
||||
{
|
||||
throw new InvalidDataException("Extended header checksum is invalid");
|
||||
}
|
||||
|
||||
extendedHeader.Add(header);
|
||||
}
|
||||
}
|
||||
|
||||
// Flag helpers
|
||||
public bool IsGabled => (Flags & 0x01) != 0;
|
||||
public bool IsAnsiPage => (Flags & 0x02) != 0;
|
||||
public bool IsVolume => (Flags & 0x04) != 0;
|
||||
public bool IsArjProtected => (Flags & 0x08) != 0;
|
||||
public bool IsPathSym => (Flags & 0x10) != 0;
|
||||
public bool IsBackup => (Flags & 0x20) != 0;
|
||||
public bool IsSecured => (Flags & 0x40) != 0;
|
||||
public bool IsAltName => (Flags & 0x80) != 0;
|
||||
|
||||
public static FileType FileTypeFromByte(byte value)
|
||||
{
|
||||
return Enum.IsDefined(typeof(FileType), value)
|
||||
? (FileType)value
|
||||
: Headers.FileType.Unknown;
|
||||
}
|
||||
}
|
||||
}
|
||||
161
src/SharpCompress/Common/Arj/Headers/ArjLocalHeader.cs
Normal file
161
src/SharpCompress/Common/Arj/Headers/ArjLocalHeader.cs
Normal file
@@ -0,0 +1,161 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common.Arj.Headers
|
||||
{
|
||||
public class ArjLocalHeader : ArjHeader
|
||||
{
|
||||
public ArchiveEncoding ArchiveEncoding { get; }
|
||||
public long DataStartPosition { get; protected set; }
|
||||
|
||||
public byte ArchiverVersionNumber { get; set; }
|
||||
public byte MinVersionToExtract { get; set; }
|
||||
public HostOS HostOS { get; set; }
|
||||
public CompressionMethod CompressionMethod { get; set; }
|
||||
public DosDateTime DateTimeModified { get; set; } = new DosDateTime(0);
|
||||
public long CompressedSize { get; set; }
|
||||
public long OriginalSize { get; set; }
|
||||
public long OriginalCrc32 { get; set; }
|
||||
public int FileSpecPosition { get; set; }
|
||||
public int FileAccessMode { get; set; }
|
||||
public byte FirstChapter { get; set; }
|
||||
public byte LastChapter { get; set; }
|
||||
public long ExtendedFilePosition { get; set; }
|
||||
public DosDateTime DateTimeAccessed { get; set; } = new DosDateTime(0);
|
||||
public DosDateTime DateTimeCreated { get; set; } = new DosDateTime(0);
|
||||
public long OriginalSizeEvenForVolumes { get; set; }
|
||||
public string Name { get; set; } = string.Empty;
|
||||
public string Comment { get; set; } = string.Empty;
|
||||
|
||||
private const byte StdHdrSize = 30;
|
||||
private const byte R9HdrSize = 46;
|
||||
|
||||
public ArjLocalHeader(ArchiveEncoding archiveEncoding)
|
||||
: base(ArjHeaderType.LocalHeader)
|
||||
{
|
||||
ArchiveEncoding =
|
||||
archiveEncoding ?? throw new ArgumentNullException(nameof(archiveEncoding));
|
||||
}
|
||||
|
||||
public override ArjHeader? Read(Stream stream)
|
||||
{
|
||||
var body = ReadHeader(stream);
|
||||
if (body.Length > 0)
|
||||
{
|
||||
ReadExtendedHeaders(stream);
|
||||
var header = LoadFrom(body);
|
||||
header.DataStartPosition = stream.Position;
|
||||
return header;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public ArjLocalHeader LoadFrom(byte[] headerBytes)
|
||||
{
|
||||
int offset = 0;
|
||||
|
||||
int ReadInt16()
|
||||
{
|
||||
if (offset + 1 >= headerBytes.Length)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
var v = headerBytes[offset] & 0xFF | (headerBytes[offset + 1] & 0xFF) << 8;
|
||||
offset += 2;
|
||||
return v;
|
||||
}
|
||||
long ReadInt32()
|
||||
{
|
||||
if (offset + 3 >= headerBytes.Length)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
long v =
|
||||
headerBytes[offset] & 0xFF
|
||||
| (headerBytes[offset + 1] & 0xFF) << 8
|
||||
| (headerBytes[offset + 2] & 0xFF) << 16
|
||||
| (headerBytes[offset + 3] & 0xFF) << 24;
|
||||
offset += 4;
|
||||
return v;
|
||||
}
|
||||
|
||||
byte headerSize = headerBytes[offset++];
|
||||
ArchiverVersionNumber = headerBytes[offset++];
|
||||
MinVersionToExtract = headerBytes[offset++];
|
||||
HostOS hostOS = (HostOS)headerBytes[offset++];
|
||||
Flags = headerBytes[offset++];
|
||||
CompressionMethod = CompressionMethodFromByte(headerBytes[offset++]);
|
||||
FileType = FileTypeFromByte(headerBytes[offset++]);
|
||||
|
||||
offset++; // Skip 1 byte
|
||||
|
||||
var rawTimestamp = ReadInt32();
|
||||
DateTimeModified =
|
||||
rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : new DosDateTime(0);
|
||||
|
||||
CompressedSize = ReadInt32();
|
||||
OriginalSize = ReadInt32();
|
||||
OriginalCrc32 = ReadInt32();
|
||||
FileSpecPosition = ReadInt16();
|
||||
FileAccessMode = ReadInt16();
|
||||
|
||||
FirstChapter = headerBytes[offset++];
|
||||
LastChapter = headerBytes[offset++];
|
||||
|
||||
ExtendedFilePosition = 0;
|
||||
OriginalSizeEvenForVolumes = 0;
|
||||
|
||||
if (headerSize > StdHdrSize)
|
||||
{
|
||||
ExtendedFilePosition = ReadInt32();
|
||||
|
||||
if (headerSize >= R9HdrSize)
|
||||
{
|
||||
rawTimestamp = ReadInt32();
|
||||
DateTimeAccessed =
|
||||
rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : new DosDateTime(0);
|
||||
rawTimestamp = ReadInt32();
|
||||
DateTimeCreated =
|
||||
rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : new DosDateTime(0);
|
||||
OriginalSizeEvenForVolumes = ReadInt32();
|
||||
}
|
||||
}
|
||||
|
||||
Name = Encoding.ASCII.GetString(
|
||||
headerBytes,
|
||||
offset,
|
||||
Array.IndexOf(headerBytes, (byte)0, offset) - offset
|
||||
);
|
||||
offset += Name.Length + 1;
|
||||
|
||||
Comment = Encoding.ASCII.GetString(
|
||||
headerBytes,
|
||||
offset,
|
||||
Array.IndexOf(headerBytes, (byte)0, offset) - offset
|
||||
);
|
||||
offset += Comment.Length + 1;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
public static CompressionMethod CompressionMethodFromByte(byte value)
|
||||
{
|
||||
return value switch
|
||||
{
|
||||
0 => CompressionMethod.Stored,
|
||||
1 => CompressionMethod.CompressedMost,
|
||||
2 => CompressionMethod.Compressed,
|
||||
3 => CompressionMethod.CompressedFaster,
|
||||
4 => CompressionMethod.CompressedFastest,
|
||||
8 => CompressionMethod.NoDataNoCrc,
|
||||
9 => CompressionMethod.NoData,
|
||||
_ => CompressionMethod.Unknown,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
138
src/SharpCompress/Common/Arj/Headers/ArjMainHeader.cs
Normal file
138
src/SharpCompress/Common/Arj/Headers/ArjMainHeader.cs
Normal file
@@ -0,0 +1,138 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SharpCompress.Compressors.Deflate;
|
||||
using SharpCompress.Crypto;
|
||||
|
||||
namespace SharpCompress.Common.Arj.Headers
|
||||
{
|
||||
public class ArjMainHeader : ArjHeader
|
||||
{
|
||||
private const int FIRST_HDR_SIZE = 34;
|
||||
private const ushort ARJ_MAGIC = 0xEA60;
|
||||
|
||||
public ArchiveEncoding ArchiveEncoding { get; }
|
||||
|
||||
public int ArchiverVersionNumber { get; private set; }
|
||||
public int MinVersionToExtract { get; private set; }
|
||||
public HostOS HostOs { get; private set; }
|
||||
public int SecurityVersion { get; private set; }
|
||||
public DosDateTime CreationDateTime { get; private set; } = new DosDateTime(0);
|
||||
public long CompressedSize { get; private set; }
|
||||
public long ArchiveSize { get; private set; }
|
||||
public long SecurityEnvelope { get; private set; }
|
||||
public int FileSpecPosition { get; private set; }
|
||||
public int SecurityEnvelopeLength { get; private set; }
|
||||
public int EncryptionVersion { get; private set; }
|
||||
public int LastChapter { get; private set; }
|
||||
|
||||
public int ArjProtectionFactor { get; private set; }
|
||||
public int Flags2 { get; private set; }
|
||||
public string Name { get; private set; } = string.Empty;
|
||||
public string Comment { get; private set; } = string.Empty;
|
||||
|
||||
public ArjMainHeader(ArchiveEncoding archiveEncoding)
|
||||
: base(ArjHeaderType.MainHeader)
|
||||
{
|
||||
ArchiveEncoding =
|
||||
archiveEncoding ?? throw new ArgumentNullException(nameof(archiveEncoding));
|
||||
}
|
||||
|
||||
public override ArjHeader? Read(Stream stream)
|
||||
{
|
||||
var body = ReadHeader(stream);
|
||||
ReadExtendedHeaders(stream);
|
||||
return LoadFrom(body);
|
||||
}
|
||||
|
||||
public ArjMainHeader LoadFrom(byte[] headerBytes)
|
||||
{
|
||||
var offset = 1;
|
||||
|
||||
byte ReadByte()
|
||||
{
|
||||
if (offset >= headerBytes.Length)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
return (byte)(headerBytes[offset++] & 0xFF);
|
||||
}
|
||||
|
||||
int ReadInt16()
|
||||
{
|
||||
if (offset + 1 >= headerBytes.Length)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
var v = headerBytes[offset] & 0xFF | (headerBytes[offset + 1] & 0xFF) << 8;
|
||||
offset += 2;
|
||||
return v;
|
||||
}
|
||||
|
||||
long ReadInt32()
|
||||
{
|
||||
if (offset + 3 >= headerBytes.Length)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
long v =
|
||||
headerBytes[offset] & 0xFF
|
||||
| (headerBytes[offset + 1] & 0xFF) << 8
|
||||
| (headerBytes[offset + 2] & 0xFF) << 16
|
||||
| (headerBytes[offset + 3] & 0xFF) << 24;
|
||||
offset += 4;
|
||||
return v;
|
||||
}
|
||||
string ReadNullTerminatedString(byte[] x, int startIndex)
|
||||
{
|
||||
var result = new StringBuilder();
|
||||
int i = startIndex;
|
||||
|
||||
while (i < x.Length && x[i] != 0)
|
||||
{
|
||||
result.Append((char)x[i]);
|
||||
i++;
|
||||
}
|
||||
|
||||
// Skip the null terminator
|
||||
i++;
|
||||
if (i < x.Length)
|
||||
{
|
||||
byte[] remainder = new byte[x.Length - i];
|
||||
Array.Copy(x, i, remainder, 0, remainder.Length);
|
||||
x = remainder;
|
||||
}
|
||||
|
||||
return result.ToString();
|
||||
}
|
||||
|
||||
ArchiverVersionNumber = ReadByte();
|
||||
MinVersionToExtract = ReadByte();
|
||||
|
||||
var hostOsByte = ReadByte();
|
||||
HostOs = hostOsByte <= 11 ? (HostOS)hostOsByte : HostOS.Unknown;
|
||||
|
||||
Flags = ReadByte();
|
||||
SecurityVersion = ReadByte();
|
||||
FileType = FileTypeFromByte(ReadByte());
|
||||
|
||||
offset++; // skip reserved
|
||||
|
||||
CreationDateTime = new DosDateTime((int)ReadInt32());
|
||||
CompressedSize = ReadInt32();
|
||||
ArchiveSize = ReadInt32();
|
||||
|
||||
SecurityEnvelope = ReadInt32();
|
||||
FileSpecPosition = ReadInt16();
|
||||
SecurityEnvelopeLength = ReadInt16();
|
||||
|
||||
EncryptionVersion = ReadByte();
|
||||
LastChapter = ReadByte();
|
||||
|
||||
Name = ReadNullTerminatedString(headerBytes, offset);
|
||||
Comment = ReadNullTerminatedString(headerBytes, offset + 1 + Name.Length);
|
||||
|
||||
return this;
|
||||
}
|
||||
}
|
||||
}
|
||||
20
src/SharpCompress/Common/Arj/Headers/CompressionMethod.cs
Normal file
20
src/SharpCompress/Common/Arj/Headers/CompressionMethod.cs
Normal file
@@ -0,0 +1,20 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common.Arj.Headers
|
||||
{
|
||||
public enum CompressionMethod
|
||||
{
|
||||
Stored = 0,
|
||||
CompressedMost = 1,
|
||||
Compressed = 2,
|
||||
CompressedFaster = 3,
|
||||
CompressedFastest = 4,
|
||||
NoDataNoCrc = 8,
|
||||
NoData = 9,
|
||||
Unknown,
|
||||
}
|
||||
}
|
||||
37
src/SharpCompress/Common/Arj/Headers/DosDateTime.cs
Normal file
37
src/SharpCompress/Common/Arj/Headers/DosDateTime.cs
Normal file
@@ -0,0 +1,37 @@
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Common.Arj.Headers
|
||||
{
|
||||
public class DosDateTime
|
||||
{
|
||||
public DateTime DateTime { get; }
|
||||
|
||||
public DosDateTime(long dosValue)
|
||||
{
|
||||
// Ensure only the lower 32 bits are used
|
||||
int value = unchecked((int)(dosValue & 0xFFFFFFFF));
|
||||
|
||||
var date = (value >> 16) & 0xFFFF;
|
||||
var time = value & 0xFFFF;
|
||||
|
||||
var day = date & 0x1F;
|
||||
var month = (date >> 5) & 0x0F;
|
||||
var year = ((date >> 9) & 0x7F) + 1980;
|
||||
|
||||
var second = (time & 0x1F) * 2;
|
||||
var minute = (time >> 5) & 0x3F;
|
||||
var hour = (time >> 11) & 0x1F;
|
||||
|
||||
try
|
||||
{
|
||||
DateTime = new DateTime(year, month, day, hour, minute, second);
|
||||
}
|
||||
catch
|
||||
{
|
||||
DateTime = DateTime.MinValue;
|
||||
}
|
||||
}
|
||||
|
||||
public override string ToString() => DateTime.ToString("yyyy-MM-dd HH:mm:ss");
|
||||
}
|
||||
}
|
||||
13
src/SharpCompress/Common/Arj/Headers/FileType.cs
Normal file
13
src/SharpCompress/Common/Arj/Headers/FileType.cs
Normal file
@@ -0,0 +1,13 @@
|
||||
namespace SharpCompress.Common.Arj.Headers
|
||||
{
|
||||
public enum FileType : byte
|
||||
{
|
||||
Binary = 0,
|
||||
Text7Bit = 1,
|
||||
CommentHeader = 2,
|
||||
Directory = 3,
|
||||
VolumeLabel = 4,
|
||||
ChapterLabel = 5,
|
||||
Unknown = 255,
|
||||
}
|
||||
}
|
||||
19
src/SharpCompress/Common/Arj/Headers/HostOS.cs
Normal file
19
src/SharpCompress/Common/Arj/Headers/HostOS.cs
Normal file
@@ -0,0 +1,19 @@
|
||||
namespace SharpCompress.Common.Arj.Headers
|
||||
{
|
||||
public enum HostOS
|
||||
{
|
||||
MsDos = 0,
|
||||
PrimOS = 1,
|
||||
Unix = 2,
|
||||
Amiga = 3,
|
||||
MacOs = 4,
|
||||
OS2 = 5,
|
||||
AppleGS = 6,
|
||||
AtariST = 7,
|
||||
NeXT = 8,
|
||||
VaxVMS = 9,
|
||||
Win95 = 10,
|
||||
Win32 = 11,
|
||||
Unknown = 255,
|
||||
}
|
||||
}
|
||||
@@ -16,5 +16,18 @@ public enum CompressionType
|
||||
Unknown,
|
||||
Deflate64,
|
||||
Shrink,
|
||||
Lzw
|
||||
Lzw,
|
||||
Reduce1,
|
||||
Reduce2,
|
||||
Reduce3,
|
||||
Reduce4,
|
||||
Explode,
|
||||
Squeezed,
|
||||
Packed,
|
||||
Crunched,
|
||||
Squashed,
|
||||
Crushed,
|
||||
Distilled,
|
||||
ZStandard,
|
||||
ArjLZ77,
|
||||
}
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public class CryptographicException : Exception
|
||||
{
|
||||
public CryptographicException(string message)
|
||||
: base(message) { }
|
||||
}
|
||||
@@ -1,11 +1,35 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.IO.Compression;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public class EntryStream : Stream
|
||||
public class EntryStream : Stream, IStreamStack
|
||||
{
|
||||
#if DEBUG_STREAMS
|
||||
long IStreamStack.InstanceId { get; set; }
|
||||
#endif
|
||||
int IStreamStack.DefaultBufferSize { get; set; }
|
||||
|
||||
Stream IStreamStack.BaseStream() => _stream;
|
||||
|
||||
int IStreamStack.BufferSize
|
||||
{
|
||||
get => 0;
|
||||
set { }
|
||||
}
|
||||
int IStreamStack.BufferPosition
|
||||
{
|
||||
get => 0;
|
||||
set { }
|
||||
}
|
||||
|
||||
void IStreamStack.SetPosition(long position) { }
|
||||
|
||||
private readonly IReader _reader;
|
||||
private readonly Stream _stream;
|
||||
private bool _completed;
|
||||
@@ -15,6 +39,9 @@ public class EntryStream : Stream
|
||||
{
|
||||
_reader = reader;
|
||||
_stream = stream;
|
||||
#if DEBUG_STREAMS
|
||||
this.DebugConstruct(typeof(EntryStream));
|
||||
#endif
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -26,21 +53,79 @@ public class EntryStream : Stream
|
||||
_completed = true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Asynchronously skip the rest of the entry stream.
|
||||
/// </summary>
|
||||
public async Task SkipEntryAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
await this.SkipAsync(cancellationToken).ConfigureAwait(false);
|
||||
_completed = true;
|
||||
}
|
||||
|
||||
protected override void Dispose(bool disposing)
|
||||
{
|
||||
if (!(_completed || _reader.Cancelled))
|
||||
{
|
||||
SkipEntry();
|
||||
}
|
||||
if (_isDisposed)
|
||||
{
|
||||
return;
|
||||
}
|
||||
_isDisposed = true;
|
||||
if (!(_completed || _reader.Cancelled))
|
||||
{
|
||||
SkipEntry();
|
||||
}
|
||||
|
||||
//Need a safe standard approach to this - it's okay for compression to overreads. Handling needs to be standardised
|
||||
if (_stream is IStreamStack ss)
|
||||
{
|
||||
if (ss.BaseStream() is SharpCompress.Compressors.Deflate.DeflateStream deflateStream)
|
||||
{
|
||||
deflateStream.Flush(); //Deflate over reads. Knock it back
|
||||
}
|
||||
else if (ss.BaseStream() is SharpCompress.Compressors.LZMA.LzmaStream lzmaStream)
|
||||
{
|
||||
lzmaStream.Flush(); //Lzma over reads. Knock it back
|
||||
}
|
||||
}
|
||||
#if DEBUG_STREAMS
|
||||
this.DebugDispose(typeof(EntryStream));
|
||||
#endif
|
||||
base.Dispose(disposing);
|
||||
_stream.Dispose();
|
||||
}
|
||||
|
||||
#if !NETFRAMEWORK && !NETSTANDARD2_0
|
||||
public override async ValueTask DisposeAsync()
|
||||
{
|
||||
if (_isDisposed)
|
||||
{
|
||||
return;
|
||||
}
|
||||
_isDisposed = true;
|
||||
if (!(_completed || _reader.Cancelled))
|
||||
{
|
||||
await SkipEntryAsync().ConfigureAwait(false);
|
||||
}
|
||||
|
||||
//Need a safe standard approach to this - it's okay for compression to overreads. Handling needs to be standardised
|
||||
if (_stream is IStreamStack ss)
|
||||
{
|
||||
if (ss.BaseStream() is SharpCompress.Compressors.Deflate.DeflateStream deflateStream)
|
||||
{
|
||||
await deflateStream.FlushAsync().ConfigureAwait(false);
|
||||
}
|
||||
else if (ss.BaseStream() is SharpCompress.Compressors.LZMA.LzmaStream lzmaStream)
|
||||
{
|
||||
await lzmaStream.FlushAsync().ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
#if DEBUG_STREAMS
|
||||
this.DebugDispose(typeof(EntryStream));
|
||||
#endif
|
||||
await base.DisposeAsync().ConfigureAwait(false);
|
||||
await _stream.DisposeAsync().ConfigureAwait(false);
|
||||
}
|
||||
#endif
|
||||
|
||||
public override bool CanRead => true;
|
||||
|
||||
public override bool CanSeek => false;
|
||||
@@ -49,11 +134,13 @@ public class EntryStream : Stream
|
||||
|
||||
public override void Flush() { }
|
||||
|
||||
public override Task FlushAsync(CancellationToken cancellationToken) => Task.CompletedTask;
|
||||
|
||||
public override long Length => _stream.Length;
|
||||
|
||||
public override long Position
|
||||
{
|
||||
get => throw new NotSupportedException();
|
||||
get => _stream.Position; //throw new NotSupportedException();
|
||||
set => throw new NotSupportedException();
|
||||
}
|
||||
|
||||
@@ -67,6 +154,38 @@ public class EntryStream : Stream
|
||||
return read;
|
||||
}
|
||||
|
||||
public override async Task<int> ReadAsync(
|
||||
byte[] buffer,
|
||||
int offset,
|
||||
int count,
|
||||
CancellationToken cancellationToken
|
||||
)
|
||||
{
|
||||
var read = await _stream
|
||||
.ReadAsync(buffer, offset, count, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (read <= 0)
|
||||
{
|
||||
_completed = true;
|
||||
}
|
||||
return read;
|
||||
}
|
||||
|
||||
#if !NETFRAMEWORK && !NETSTANDARD2_0
|
||||
public override async ValueTask<int> ReadAsync(
|
||||
Memory<byte> buffer,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var read = await _stream.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
|
||||
if (read <= 0)
|
||||
{
|
||||
_completed = true;
|
||||
}
|
||||
return read;
|
||||
}
|
||||
#endif
|
||||
|
||||
public override int ReadByte()
|
||||
{
|
||||
var value = _stream.ReadByte();
|
||||
@@ -83,4 +202,11 @@ public class EntryStream : Stream
|
||||
|
||||
public override void Write(byte[] buffer, int offset, int count) =>
|
||||
throw new NotSupportedException();
|
||||
|
||||
public override Task WriteAsync(
|
||||
byte[] buffer,
|
||||
int offset,
|
||||
int count,
|
||||
CancellationToken cancellationToken
|
||||
) => throw new NotSupportedException();
|
||||
}
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public class ExtractionException : Exception
|
||||
{
|
||||
public ExtractionException(string message)
|
||||
: base(message) { }
|
||||
|
||||
public ExtractionException(string message, Exception inner)
|
||||
: base(message, inner) { }
|
||||
}
|
||||
@@ -1,10 +1,22 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
internal static class ExtractionMethods
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the appropriate StringComparison for path checks based on the file system.
|
||||
/// Windows uses case-insensitive file systems, while Unix-like systems use case-sensitive file systems.
|
||||
/// </summary>
|
||||
private static StringComparison PathComparison =>
|
||||
RuntimeInformation.IsOSPlatform(OSPlatform.Windows)
|
||||
? StringComparison.OrdinalIgnoreCase
|
||||
: StringComparison.Ordinal;
|
||||
|
||||
/// <summary>
|
||||
/// Extract to specific directory, retaining filename
|
||||
/// </summary>
|
||||
@@ -37,6 +49,7 @@ internal static class ExtractionMethods
|
||||
options ??= new ExtractionOptions() { Overwrite = true };
|
||||
|
||||
var file = Path.GetFileName(entry.Key.NotNull("Entry Key is null")).NotNull("File is null");
|
||||
file = Utility.ReplaceInvalidFileNameChars(file);
|
||||
if (options.ExtractFullPath)
|
||||
{
|
||||
var folder = Path.GetDirectoryName(entry.Key.NotNull("Entry Key is null"))
|
||||
@@ -45,7 +58,7 @@ internal static class ExtractionMethods
|
||||
|
||||
if (!Directory.Exists(destdir))
|
||||
{
|
||||
if (!destdir.StartsWith(fullDestinationDirectoryPath, StringComparison.Ordinal))
|
||||
if (!destdir.StartsWith(fullDestinationDirectoryPath, PathComparison))
|
||||
{
|
||||
throw new ExtractionException(
|
||||
"Entry is trying to create a directory outside of the destination directory."
|
||||
@@ -65,12 +78,7 @@ internal static class ExtractionMethods
|
||||
{
|
||||
destinationFileName = Path.GetFullPath(destinationFileName);
|
||||
|
||||
if (
|
||||
!destinationFileName.StartsWith(
|
||||
fullDestinationDirectoryPath,
|
||||
StringComparison.Ordinal
|
||||
)
|
||||
)
|
||||
if (!destinationFileName.StartsWith(fullDestinationDirectoryPath, PathComparison))
|
||||
{
|
||||
throw new ExtractionException(
|
||||
"Entry is trying to write a file outside of the destination directory."
|
||||
@@ -115,4 +123,110 @@ internal static class ExtractionMethods
|
||||
entry.PreserveExtractionOptions(destinationFileName, options);
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task WriteEntryToDirectoryAsync(
|
||||
IEntry entry,
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options,
|
||||
Func<string, ExtractionOptions?, Task> writeAsync,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
string destinationFileName;
|
||||
var fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
|
||||
|
||||
//check for trailing slash.
|
||||
if (
|
||||
fullDestinationDirectoryPath[fullDestinationDirectoryPath.Length - 1]
|
||||
!= Path.DirectorySeparatorChar
|
||||
)
|
||||
{
|
||||
fullDestinationDirectoryPath += Path.DirectorySeparatorChar;
|
||||
}
|
||||
|
||||
if (!Directory.Exists(fullDestinationDirectoryPath))
|
||||
{
|
||||
throw new ExtractionException(
|
||||
$"Directory does not exist to extract to: {fullDestinationDirectoryPath}"
|
||||
);
|
||||
}
|
||||
|
||||
options ??= new ExtractionOptions() { Overwrite = true };
|
||||
|
||||
var file = Path.GetFileName(entry.Key.NotNull("Entry Key is null")).NotNull("File is null");
|
||||
file = Utility.ReplaceInvalidFileNameChars(file);
|
||||
if (options.ExtractFullPath)
|
||||
{
|
||||
var folder = Path.GetDirectoryName(entry.Key.NotNull("Entry Key is null"))
|
||||
.NotNull("Directory is null");
|
||||
var destdir = Path.GetFullPath(Path.Combine(fullDestinationDirectoryPath, folder));
|
||||
|
||||
if (!Directory.Exists(destdir))
|
||||
{
|
||||
if (!destdir.StartsWith(fullDestinationDirectoryPath, PathComparison))
|
||||
{
|
||||
throw new ExtractionException(
|
||||
"Entry is trying to create a directory outside of the destination directory."
|
||||
);
|
||||
}
|
||||
|
||||
Directory.CreateDirectory(destdir);
|
||||
}
|
||||
destinationFileName = Path.Combine(destdir, file);
|
||||
}
|
||||
else
|
||||
{
|
||||
destinationFileName = Path.Combine(fullDestinationDirectoryPath, file);
|
||||
}
|
||||
|
||||
if (!entry.IsDirectory)
|
||||
{
|
||||
destinationFileName = Path.GetFullPath(destinationFileName);
|
||||
|
||||
if (!destinationFileName.StartsWith(fullDestinationDirectoryPath, PathComparison))
|
||||
{
|
||||
throw new ExtractionException(
|
||||
"Entry is trying to write a file outside of the destination directory."
|
||||
);
|
||||
}
|
||||
await writeAsync(destinationFileName, options).ConfigureAwait(false);
|
||||
}
|
||||
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
|
||||
{
|
||||
Directory.CreateDirectory(destinationFileName);
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task WriteEntryToFileAsync(
|
||||
IEntry entry,
|
||||
string destinationFileName,
|
||||
ExtractionOptions? options,
|
||||
Func<string, FileMode, Task> openAndWriteAsync,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
if (entry.LinkTarget != null)
|
||||
{
|
||||
if (options?.WriteSymbolicLink is null)
|
||||
{
|
||||
throw new ExtractionException(
|
||||
"Entry is a symbolic link but ExtractionOptions.WriteSymbolicLink delegate is null"
|
||||
);
|
||||
}
|
||||
options.WriteSymbolicLink(destinationFileName, entry.LinkTarget);
|
||||
}
|
||||
else
|
||||
{
|
||||
var fm = FileMode.Create;
|
||||
options ??= new ExtractionOptions() { Overwrite = true };
|
||||
|
||||
if (!options.Overwrite)
|
||||
{
|
||||
fm = FileMode.CreateNew;
|
||||
}
|
||||
|
||||
await openAndWriteAsync(destinationFileName, fm).ConfigureAwait(false);
|
||||
entry.PreserveExtractionOptions(destinationFileName, options);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ public abstract class FilePart
|
||||
internal abstract string? FilePartName { get; }
|
||||
public int Index { get; set; }
|
||||
|
||||
internal abstract Stream GetCompressedStream();
|
||||
internal abstract Stream? GetCompressedStream();
|
||||
internal abstract Stream? GetRawStream();
|
||||
internal bool Skipped { get; set; }
|
||||
}
|
||||
|
||||
@@ -24,8 +24,14 @@ internal sealed class GZipFilePart : FilePart
|
||||
stream.Position = stream.Length - 8;
|
||||
ReadTrailer();
|
||||
stream.Position = position;
|
||||
EntryStartPosition = position;
|
||||
}
|
||||
else
|
||||
{
|
||||
// For non-seekable streams, we can't read the trailer or track position.
|
||||
// Set to 0 since the stream will be read sequentially from its current position.
|
||||
EntryStartPosition = 0;
|
||||
}
|
||||
EntryStartPosition = stream.Position;
|
||||
}
|
||||
|
||||
internal long EntryStartPosition { get; }
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public class IncompleteArchiveException : ArchiveException
|
||||
{
|
||||
public IncompleteArchiveException(string message)
|
||||
: base(message) { }
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public class InvalidFormatException : ExtractionException
|
||||
{
|
||||
public InvalidFormatException(string message)
|
||||
: base(message) { }
|
||||
|
||||
public InvalidFormatException(string message, Exception inner)
|
||||
: base(message, inner) { }
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public class MultiVolumeExtractionException : ExtractionException
|
||||
{
|
||||
public MultiVolumeExtractionException(string message)
|
||||
: base(message) { }
|
||||
|
||||
public MultiVolumeExtractionException(string message, Exception inner)
|
||||
: base(message, inner) { }
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public class MultipartStreamRequiredException : ExtractionException
|
||||
{
|
||||
public MultipartStreamRequiredException(string message)
|
||||
: base(message) { }
|
||||
}
|
||||
@@ -17,7 +17,7 @@ internal class CryptKey5 : ICryptKey
|
||||
private byte[] _pswCheck = { };
|
||||
private byte[] _hashKey = { };
|
||||
|
||||
public CryptKey5(string password, Rar5CryptoInfo rar5CryptoInfo)
|
||||
public CryptKey5(string? password, Rar5CryptoInfo rar5CryptoInfo)
|
||||
{
|
||||
_password = password ?? "";
|
||||
_cryptoInfo = rar5CryptoInfo;
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Security.Cryptography;
|
||||
using SharpCompress.Common.Rar.Headers;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Common.Rar.Headers;
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using SharpCompress.IO;
|
||||
#if !Rar2017_64bit
|
||||
@@ -18,7 +15,7 @@ namespace SharpCompress.Common.Rar.Headers;
|
||||
|
||||
internal class FileHeader : RarHeader
|
||||
{
|
||||
private byte[] _hash;
|
||||
private byte[]? _hash;
|
||||
|
||||
public FileHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType)
|
||||
: base(header, reader, headerType) { }
|
||||
@@ -319,6 +316,10 @@ internal class FileHeader : RarHeader
|
||||
|
||||
if (NewSubHeaderType.SUBHEAD_TYPE_RR.Equals(fileNameBytes))
|
||||
{
|
||||
if (SubData is null)
|
||||
{
|
||||
throw new InvalidFormatException();
|
||||
}
|
||||
RecoverySectors =
|
||||
SubData[8]
|
||||
+ (SubData[9] << 8)
|
||||
@@ -340,12 +341,16 @@ internal class FileHeader : RarHeader
|
||||
if (RemainingHeaderBytes(reader) >= 2)
|
||||
{
|
||||
var extendedFlags = reader.ReadUInt16();
|
||||
FileLastModifiedTime = ProcessExtendedTimeV4(
|
||||
extendedFlags,
|
||||
FileLastModifiedTime,
|
||||
reader,
|
||||
0
|
||||
);
|
||||
if (FileLastModifiedTime is not null)
|
||||
{
|
||||
FileLastModifiedTime = ProcessExtendedTimeV4(
|
||||
extendedFlags,
|
||||
FileLastModifiedTime,
|
||||
reader,
|
||||
0
|
||||
);
|
||||
}
|
||||
|
||||
FileCreatedTime = ProcessExtendedTimeV4(extendedFlags, null, reader, 1);
|
||||
FileLastAccessedTime = ProcessExtendedTimeV4(extendedFlags, null, reader, 2);
|
||||
FileArchivedTime = ProcessExtendedTimeV4(extendedFlags, null, reader, 3);
|
||||
@@ -377,7 +382,7 @@ internal class FileHeader : RarHeader
|
||||
var dosTime = reader.ReadUInt32();
|
||||
time = Utility.DosDateToDateTime(dosTime);
|
||||
}
|
||||
if ((rmode & 4) == 0)
|
||||
if ((rmode & 4) == 0 && time is not null)
|
||||
{
|
||||
time = time.Value.AddSeconds(1);
|
||||
}
|
||||
@@ -390,7 +395,11 @@ internal class FileHeader : RarHeader
|
||||
}
|
||||
|
||||
//10^-7 to 10^-3
|
||||
return time.Value.AddMilliseconds(nanosecondHundreds * Math.Pow(10, -4));
|
||||
if (time is not null)
|
||||
{
|
||||
return time.Value.AddMilliseconds(nanosecondHundreds * Math.Pow(10, -4));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private static string ConvertPathV4(string path)
|
||||
@@ -406,13 +415,13 @@ internal class FileHeader : RarHeader
|
||||
return path;
|
||||
}
|
||||
|
||||
public override string ToString() => FileName;
|
||||
public override string ToString() => FileName ?? "FileHeader";
|
||||
|
||||
private ushort Flags { get; set; }
|
||||
|
||||
private bool HasFlag(ushort flag) => (Flags & flag) == flag;
|
||||
|
||||
internal byte[] FileCrc
|
||||
internal byte[]? FileCrc
|
||||
{
|
||||
get => _hash;
|
||||
private set => _hash = value;
|
||||
@@ -441,22 +450,22 @@ internal class FileHeader : RarHeader
|
||||
public bool IsRedir => RedirType != 0;
|
||||
public byte RedirFlags { get; private set; }
|
||||
public bool IsRedirDirectory => (RedirFlags & RedirFlagV5.DIRECTORY) != 0;
|
||||
public string RedirTargetName { get; private set; }
|
||||
public string? RedirTargetName { get; private set; }
|
||||
|
||||
// unused for UnpackV1 implementation (limitation)
|
||||
internal size_t WindowSize { get; private set; }
|
||||
|
||||
internal byte[] R4Salt { get; private set; }
|
||||
internal Rar5CryptoInfo Rar5CryptoInfo { get; private set; }
|
||||
internal byte[]? R4Salt { get; private set; }
|
||||
internal Rar5CryptoInfo? Rar5CryptoInfo { get; private set; }
|
||||
private byte HostOs { get; set; }
|
||||
internal uint FileAttributes { get; private set; }
|
||||
internal long CompressedSize { get; private set; }
|
||||
internal long UncompressedSize { get; private set; }
|
||||
internal string FileName { get; private set; }
|
||||
internal byte[] SubData { get; private set; }
|
||||
internal string? FileName { get; private set; }
|
||||
internal byte[]? SubData { get; private set; }
|
||||
internal int RecoverySectors { get; private set; }
|
||||
internal long DataStartPosition { get; set; }
|
||||
public Stream PackedStream { get; set; }
|
||||
public Stream? PackedStream { get; set; }
|
||||
|
||||
public bool IsSplitBefore =>
|
||||
IsRar5 ? HasHeaderFlag(HeaderFlagsV5.SPLIT_BEFORE) : HasFlag(FileFlagsV4.SPLIT_BEFORE);
|
||||
|
||||
@@ -13,7 +13,7 @@ public enum HeaderType : byte
|
||||
Sign,
|
||||
NewSub,
|
||||
EndArchive,
|
||||
Crypt
|
||||
Crypt,
|
||||
}
|
||||
|
||||
internal static class HeaderCodeV
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Common.Rar.Headers;
|
||||
@@ -21,7 +20,7 @@ internal class RarHeader : IRarHeader
|
||||
{
|
||||
return new RarHeader(reader, isRar5, archiveEncoding);
|
||||
}
|
||||
catch (EndOfStreamException)
|
||||
catch (InvalidFormatException)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using SharpCompress.Common.Rar;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
@@ -160,10 +158,15 @@ public class RarHeaderFactory
|
||||
{
|
||||
fh.PackedStream = new RarCryptoWrapper(
|
||||
ms,
|
||||
fh.R4Salt is null ? fh.Rar5CryptoInfo.Salt : fh.R4Salt,
|
||||
fh.R4Salt is null
|
||||
? new CryptKey5(Options.Password!, fh.Rar5CryptoInfo)
|
||||
: new CryptKey3(Options.Password!)
|
||||
? fh.Rar5CryptoInfo.NotNull().Salt
|
||||
: fh.R4Salt,
|
||||
fh.R4Salt is null
|
||||
? new CryptKey5(
|
||||
Options.Password,
|
||||
fh.Rar5CryptoInfo.NotNull()
|
||||
)
|
||||
: new CryptKey3(Options.Password)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,7 +20,7 @@ public abstract class RarEntry : Entry
|
||||
/// <summary>
|
||||
/// The File's 32 bit CRC Hash
|
||||
/// </summary>
|
||||
public override long Crc => BitConverter.ToUInt32(FileHeader.FileCrc, 0);
|
||||
public override long Crc => BitConverter.ToUInt32(FileHeader.FileCrc.NotNull(), 0);
|
||||
|
||||
/// <summary>
|
||||
/// The path of the file internal to the Rar Archive.
|
||||
@@ -68,7 +68,7 @@ public abstract class RarEntry : Entry
|
||||
|
||||
public bool IsRedir => FileHeader.IsRedir;
|
||||
|
||||
public string RedirTargetName => FileHeader.RedirTargetName;
|
||||
public string? RedirTargetName => FileHeader.RedirTargetName;
|
||||
|
||||
public override string ToString() =>
|
||||
string.Format(
|
||||
|
||||
@@ -61,9 +61,8 @@ public abstract class RarVolume : Volume
|
||||
var fh = (FileHeader)header;
|
||||
if (fh.FileName == "CMT")
|
||||
{
|
||||
var part = CreateFilePart(lastMarkHeader!, fh);
|
||||
var buffer = new byte[fh.CompressedSize];
|
||||
part.GetCompressedStream().Read(buffer, 0, buffer.Length);
|
||||
fh.PackedStream.NotNull().ReadFully(buffer);
|
||||
Comment = Encoding.UTF8.GetString(buffer, 0, buffer.Length - 1);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public class ReaderCancelledException : Exception
|
||||
{
|
||||
public ReaderCancelledException(string message)
|
||||
: base(message) { }
|
||||
|
||||
public ReaderCancelledException(string message, Exception inner)
|
||||
: base(message, inner) { }
|
||||
}
|
||||
@@ -784,7 +784,7 @@ internal class ArchiveReader
|
||||
);
|
||||
break;
|
||||
default:
|
||||
throw new InvalidOperationException();
|
||||
throw new InvalidFormatException();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -843,7 +843,7 @@ internal class ArchiveReader
|
||||
outStream.ReadExact(data, 0, data.Length);
|
||||
if (outStream.ReadByte() >= 0)
|
||||
{
|
||||
throw new InvalidOperationException("Decoded stream is longer than expected.");
|
||||
throw new InvalidFormatException("Decoded stream is longer than expected.");
|
||||
}
|
||||
dataVector.Add(data);
|
||||
|
||||
@@ -854,7 +854,7 @@ internal class ArchiveReader
|
||||
!= folder._unpackCrc
|
||||
)
|
||||
{
|
||||
throw new InvalidOperationException(
|
||||
throw new InvalidFormatException(
|
||||
"Decoded stream does not match expected CRC."
|
||||
);
|
||||
}
|
||||
@@ -996,6 +996,11 @@ internal class ArchiveReader
|
||||
numFiles,
|
||||
delegate(int i, uint? attr)
|
||||
{
|
||||
// Keep the original attribute value because it could potentially get
|
||||
// modified in the logic that follows. Some callers of the library may
|
||||
// find the original value useful.
|
||||
db._files[i].ExtendedAttrib = attr;
|
||||
|
||||
// Some third party implementations established an unofficial extension
|
||||
// of the 7z archive format by placing posix file attributes in the high
|
||||
// bits of the windows file attributes. This makes use of the fact that
|
||||
@@ -1220,23 +1225,46 @@ internal class ArchiveReader
|
||||
|
||||
#region Public Methods
|
||||
|
||||
public void Open(Stream stream)
|
||||
public void Open(Stream stream, bool lookForHeader)
|
||||
{
|
||||
Close();
|
||||
|
||||
_streamOrigin = stream.Position;
|
||||
_streamEnding = stream.Length;
|
||||
|
||||
// TODO: Check Signature!
|
||||
_header = new byte[0x20];
|
||||
for (var offset = 0; offset < 0x20; )
|
||||
var canScan = lookForHeader ? 0x80000 - 20 : 0;
|
||||
while (true)
|
||||
{
|
||||
var delta = stream.Read(_header, offset, 0x20 - offset);
|
||||
if (delta == 0)
|
||||
// TODO: Check Signature!
|
||||
_header = new byte[0x20];
|
||||
for (var offset = 0; offset < 0x20; )
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
var delta = stream.Read(_header, offset, 0x20 - offset);
|
||||
if (delta == 0)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
|
||||
offset += delta;
|
||||
}
|
||||
offset += delta;
|
||||
|
||||
if (
|
||||
!lookForHeader
|
||||
|| _header
|
||||
.AsSpan(0, length: 6)
|
||||
.SequenceEqual<byte>([0x37, 0x7A, 0xBC, 0xAF, 0x27, 0x1C])
|
||||
)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
if (canScan == 0)
|
||||
{
|
||||
throw new InvalidFormatException("Unable to find 7z signature");
|
||||
}
|
||||
|
||||
canScan--;
|
||||
stream.Position = ++_streamOrigin;
|
||||
}
|
||||
|
||||
_stream = stream;
|
||||
@@ -1435,7 +1463,7 @@ internal class ArchiveReader
|
||||
#if DEBUG
|
||||
Log.WriteLine(_db._files[index].Name);
|
||||
#endif
|
||||
if (_db._files[index].CrcDefined)
|
||||
if (_db._files[index].Crc.HasValue)
|
||||
{
|
||||
_stream = new CrcCheckStream(_db._files[index].Crc.Value);
|
||||
}
|
||||
|
||||
@@ -8,18 +8,13 @@ internal class CFileItem
|
||||
{
|
||||
public long Size { get; internal set; }
|
||||
public uint? Attrib { get; internal set; }
|
||||
public uint? ExtendedAttrib { get; internal set; }
|
||||
public uint? Crc { get; internal set; }
|
||||
public string Name { get; internal set; }
|
||||
|
||||
public bool HasStream { get; internal set; }
|
||||
public bool IsDir { get; internal set; }
|
||||
|
||||
public bool CrcDefined => Crc != null;
|
||||
|
||||
public bool AttribDefined => Attrib != null;
|
||||
|
||||
public void SetAttrib(uint attrib) => Attrib = attrib;
|
||||
|
||||
public DateTime? CTime { get; internal set; }
|
||||
public DateTime? ATime { get; internal set; }
|
||||
public DateTime? MTime { get; internal set; }
|
||||
|
||||
@@ -38,5 +38,8 @@ public class SevenZipEntry : Entry
|
||||
public override int? Attrib =>
|
||||
FilePart.Header.Attrib.HasValue ? (int?)FilePart.Header.Attrib.Value : null;
|
||||
|
||||
public int? ExtendedAttrib =>
|
||||
FilePart.Header.ExtendedAttrib.HasValue ? (int?)FilePart.Header.ExtendedAttrib.Value : null;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => FilePart.AsEnumerable<FilePart>();
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using SharpCompress.IO;
|
||||
@@ -41,7 +40,7 @@ internal class SevenZipFilePart : FilePart
|
||||
{
|
||||
if (!Header.HasStream)
|
||||
{
|
||||
throw new InvalidOperationException("File does not have a stream.");
|
||||
return Stream.Null;
|
||||
}
|
||||
var folderStream = _database.GetFolderStream(_stream, Folder!, _database.PasswordProvider);
|
||||
|
||||
@@ -68,6 +67,7 @@ internal class SevenZipFilePart : FilePart
|
||||
}
|
||||
}
|
||||
|
||||
private const uint K_COPY = 0x0;
|
||||
private const uint K_LZMA2 = 0x21;
|
||||
private const uint K_LZMA = 0x030101;
|
||||
private const uint K_PPMD = 0x030401;
|
||||
@@ -83,10 +83,11 @@ internal class SevenZipFilePart : FilePart
|
||||
var coder = Folder.NotNull()._coders.First();
|
||||
return coder._methodId._id switch
|
||||
{
|
||||
K_COPY => CompressionType.None,
|
||||
K_LZMA or K_LZMA2 => CompressionType.LZMA,
|
||||
K_PPMD => CompressionType.PPMd,
|
||||
K_B_ZIP2 => CompressionType.BZip2,
|
||||
_ => throw new NotImplementedException()
|
||||
_ => throw new InvalidFormatException(),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
48
src/SharpCompress/Common/SharpCompressException.cs
Normal file
48
src/SharpCompress/Common/SharpCompressException.cs
Normal file
@@ -0,0 +1,48 @@
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public class SharpCompressException : Exception
|
||||
{
|
||||
public SharpCompressException() { }
|
||||
|
||||
public SharpCompressException(string message)
|
||||
: base(message) { }
|
||||
|
||||
public SharpCompressException(string message, Exception inner)
|
||||
: base(message, inner) { }
|
||||
}
|
||||
|
||||
public class ArchiveException(string message) : SharpCompressException(message);
|
||||
|
||||
public class IncompleteArchiveException(string message) : ArchiveException(message);
|
||||
|
||||
public class CryptographicException(string message) : SharpCompressException(message);
|
||||
|
||||
public class ReaderCancelledException(string message) : SharpCompressException(message);
|
||||
|
||||
public class ExtractionException : SharpCompressException
|
||||
{
|
||||
public ExtractionException() { }
|
||||
|
||||
public ExtractionException(string message)
|
||||
: base(message) { }
|
||||
|
||||
public ExtractionException(string message, Exception inner)
|
||||
: base(message, inner) { }
|
||||
}
|
||||
|
||||
public class MultipartStreamRequiredException(string message) : ExtractionException(message);
|
||||
|
||||
public class MultiVolumeExtractionException(string message) : ExtractionException(message);
|
||||
|
||||
public class InvalidFormatException : ExtractionException
|
||||
{
|
||||
public InvalidFormatException() { }
|
||||
|
||||
public InvalidFormatException(string message)
|
||||
: base(message) { }
|
||||
|
||||
public InvalidFormatException(string message, Exception inner)
|
||||
: base(message, inner) { }
|
||||
}
|
||||
@@ -14,5 +14,5 @@ internal enum EntryType : byte
|
||||
LongName = (byte)'L',
|
||||
SparseFile = (byte)'S',
|
||||
VolumeHeader = (byte)'V',
|
||||
GlobalExtendedHeader = (byte)'g'
|
||||
GlobalExtendedHeader = (byte)'g',
|
||||
}
|
||||
|
||||
@@ -25,6 +25,10 @@ internal sealed class TarHeader
|
||||
|
||||
internal const int BLOCK_SIZE = 512;
|
||||
|
||||
// Maximum size for long name/link headers to prevent memory exhaustion attacks
|
||||
// This is generous enough for most real-world scenarios (32KB)
|
||||
private const int MAX_LONG_NAME_SIZE = 32768;
|
||||
|
||||
internal void Write(Stream output)
|
||||
{
|
||||
var buffer = new byte[BLOCK_SIZE];
|
||||
@@ -101,63 +105,100 @@ internal sealed class TarHeader
|
||||
|
||||
internal bool Read(BinaryReader reader)
|
||||
{
|
||||
var buffer = ReadBlock(reader);
|
||||
if (buffer.Length == 0)
|
||||
string? longName = null;
|
||||
string? longLinkName = null;
|
||||
var hasLongValue = true;
|
||||
byte[] buffer;
|
||||
EntryType entryType;
|
||||
|
||||
do
|
||||
{
|
||||
buffer = ReadBlock(reader);
|
||||
|
||||
if (buffer.Length == 0)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
entryType = ReadEntryType(buffer);
|
||||
|
||||
// LongName and LongLink headers can follow each other and need
|
||||
// to apply to the header that follows them.
|
||||
if (entryType == EntryType.LongName)
|
||||
{
|
||||
longName = ReadLongName(reader, buffer);
|
||||
continue;
|
||||
}
|
||||
else if (entryType == EntryType.LongLink)
|
||||
{
|
||||
longLinkName = ReadLongName(reader, buffer);
|
||||
continue;
|
||||
}
|
||||
|
||||
hasLongValue = false;
|
||||
} while (hasLongValue);
|
||||
|
||||
// Check header checksum
|
||||
if (!checkChecksum(buffer))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// for symlinks, additionally read the linkname
|
||||
if (ReadEntryType(buffer) == EntryType.SymLink)
|
||||
{
|
||||
LinkName = ArchiveEncoding.Decode(buffer, 157, 100).TrimNulls();
|
||||
}
|
||||
|
||||
if (ReadEntryType(buffer) == EntryType.LongName)
|
||||
{
|
||||
Name = ReadLongName(reader, buffer);
|
||||
buffer = ReadBlock(reader);
|
||||
}
|
||||
else
|
||||
{
|
||||
Name = ArchiveEncoding.Decode(buffer, 0, 100).TrimNulls();
|
||||
}
|
||||
|
||||
EntryType = ReadEntryType(buffer);
|
||||
Name = longName ?? ArchiveEncoding.Decode(buffer, 0, 100).TrimNulls();
|
||||
EntryType = entryType;
|
||||
Size = ReadSize(buffer);
|
||||
|
||||
// for symlinks, additionally read the linkname
|
||||
if (entryType == EntryType.SymLink || entryType == EntryType.HardLink)
|
||||
{
|
||||
LinkName = longLinkName ?? ArchiveEncoding.Decode(buffer, 157, 100).TrimNulls();
|
||||
}
|
||||
|
||||
Mode = ReadAsciiInt64Base8(buffer, 100, 7);
|
||||
if (EntryType == EntryType.Directory)
|
||||
|
||||
if (entryType == EntryType.Directory)
|
||||
{
|
||||
Mode |= 0b1_000_000_000;
|
||||
}
|
||||
|
||||
UserId = ReadAsciiInt64Base8oldGnu(buffer, 108, 7);
|
||||
GroupId = ReadAsciiInt64Base8oldGnu(buffer, 116, 7);
|
||||
var unixTimeStamp = ReadAsciiInt64Base8(buffer, 136, 11);
|
||||
LastModifiedTime = EPOCH.AddSeconds(unixTimeStamp).ToLocalTime();
|
||||
|
||||
var unixTimeStamp = ReadAsciiInt64Base8(buffer, 136, 11);
|
||||
|
||||
LastModifiedTime = EPOCH.AddSeconds(unixTimeStamp).ToLocalTime();
|
||||
Magic = ArchiveEncoding.Decode(buffer, 257, 6).TrimNulls();
|
||||
|
||||
if (!string.IsNullOrEmpty(Magic) && "ustar".Equals(Magic))
|
||||
{
|
||||
var namePrefix = ArchiveEncoding.Decode(buffer, 345, 157);
|
||||
namePrefix = namePrefix.TrimNulls();
|
||||
var namePrefix = ArchiveEncoding.Decode(buffer, 345, 157).TrimNulls();
|
||||
|
||||
if (!string.IsNullOrEmpty(namePrefix))
|
||||
{
|
||||
Name = namePrefix + "/" + Name;
|
||||
}
|
||||
}
|
||||
if (EntryType != EntryType.LongName && Name.Length == 0)
|
||||
|
||||
if (entryType != EntryType.LongName && Name.Length == 0)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private string ReadLongName(BinaryReader reader, byte[] buffer)
|
||||
{
|
||||
var size = ReadSize(buffer);
|
||||
|
||||
// Validate size to prevent memory exhaustion from malformed headers
|
||||
if (size < 0 || size > MAX_LONG_NAME_SIZE)
|
||||
{
|
||||
throw new InvalidFormatException(
|
||||
$"Long name size {size} is invalid or exceeds maximum allowed size of {MAX_LONG_NAME_SIZE} bytes"
|
||||
);
|
||||
}
|
||||
|
||||
var nameLength = (int)size;
|
||||
var nameBytes = reader.ReadBytes(nameLength);
|
||||
var remainingBytesToRead = BLOCK_SIZE - (nameLength % BLOCK_SIZE);
|
||||
@@ -188,7 +229,7 @@ internal sealed class TarHeader
|
||||
|
||||
if (buffer.Length != 0 && buffer.Length < BLOCK_SIZE)
|
||||
{
|
||||
throw new InvalidOperationException("Buffer is invalid size");
|
||||
throw new InvalidFormatException("Buffer is invalid size");
|
||||
}
|
||||
return buffer;
|
||||
}
|
||||
@@ -286,9 +327,45 @@ internal sealed class TarHeader
|
||||
(byte)' ',
|
||||
(byte)' ',
|
||||
(byte)' ',
|
||||
(byte)' '
|
||||
(byte)' ',
|
||||
};
|
||||
|
||||
internal static bool checkChecksum(byte[] buf)
|
||||
{
|
||||
const int eightSpacesChksum = 256;
|
||||
var buffer = new Span<byte>(buf).Slice(0, 512);
|
||||
int posix_sum = eightSpacesChksum;
|
||||
int sun_sum = eightSpacesChksum;
|
||||
|
||||
foreach (byte b in buffer)
|
||||
{
|
||||
posix_sum += b;
|
||||
sun_sum += unchecked((sbyte)b);
|
||||
}
|
||||
|
||||
// Special case, empty file header
|
||||
if (posix_sum == eightSpacesChksum)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
// Remove current checksum from calculation
|
||||
foreach (byte b in buffer.Slice(148, 8))
|
||||
{
|
||||
posix_sum -= b;
|
||||
sun_sum -= unchecked((sbyte)b);
|
||||
}
|
||||
|
||||
// Read and compare checksum for header
|
||||
var crc = ReadAsciiInt64Base8(buf, 148, 7);
|
||||
if (crc != posix_sum && crc != sun_sum)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
internal static int RecalculateChecksum(byte[] buf)
|
||||
{
|
||||
// Set default value for checksum. That is 8 spaces.
|
||||
|
||||
@@ -4,13 +4,38 @@ using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Common.Tar;
|
||||
|
||||
internal class TarReadOnlySubStream : NonDisposingStream
|
||||
internal class TarReadOnlySubStream : SharpCompressStream, IStreamStack
|
||||
{
|
||||
#if DEBUG_STREAMS
|
||||
long IStreamStack.InstanceId { get; set; }
|
||||
#endif
|
||||
|
||||
Stream IStreamStack.BaseStream() => base.Stream;
|
||||
|
||||
int IStreamStack.BufferSize
|
||||
{
|
||||
get => 0;
|
||||
set { }
|
||||
}
|
||||
int IStreamStack.BufferPosition
|
||||
{
|
||||
get => 0;
|
||||
set { }
|
||||
}
|
||||
|
||||
void IStreamStack.SetPosition(long position) { }
|
||||
|
||||
private bool _isDisposed;
|
||||
private long _amountRead;
|
||||
|
||||
public TarReadOnlySubStream(Stream stream, long bytesToRead)
|
||||
: base(stream, throwOnDispose: false) => BytesLeftToRead = bytesToRead;
|
||||
: base(stream, leaveOpen: true, throwOnDispose: false)
|
||||
{
|
||||
BytesLeftToRead = bytesToRead;
|
||||
#if DEBUG_STREAMS
|
||||
this.DebugConstruct(typeof(TarReadOnlySubStream));
|
||||
#endif
|
||||
}
|
||||
|
||||
protected override void Dispose(bool disposing)
|
||||
{
|
||||
@@ -20,7 +45,9 @@ internal class TarReadOnlySubStream : NonDisposingStream
|
||||
}
|
||||
|
||||
_isDisposed = true;
|
||||
|
||||
#if DEBUG_STREAMS
|
||||
this.DebugDispose(typeof(TarReadOnlySubStream));
|
||||
#endif
|
||||
if (disposing)
|
||||
{
|
||||
// Ensure we read all remaining blocks for this entry.
|
||||
@@ -39,6 +66,36 @@ internal class TarReadOnlySubStream : NonDisposingStream
|
||||
base.Dispose(disposing);
|
||||
}
|
||||
|
||||
#if !NETFRAMEWORK && !NETSTANDARD2_0
|
||||
public override async System.Threading.Tasks.ValueTask DisposeAsync()
|
||||
{
|
||||
if (_isDisposed)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
_isDisposed = true;
|
||||
#if DEBUG_STREAMS
|
||||
this.DebugDispose(typeof(TarReadOnlySubStream));
|
||||
#endif
|
||||
// Ensure we read all remaining blocks for this entry.
|
||||
await Stream.SkipAsync(BytesLeftToRead).ConfigureAwait(false);
|
||||
_amountRead += BytesLeftToRead;
|
||||
|
||||
// If the last block wasn't a full 512 bytes, skip the remaining padding bytes.
|
||||
var bytesInLastBlock = _amountRead % 512;
|
||||
|
||||
if (bytesInLastBlock != 0)
|
||||
{
|
||||
await Stream.SkipAsync(512 - bytesInLastBlock).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
// Call base Dispose instead of base DisposeAsync to avoid double disposal
|
||||
base.Dispose(true);
|
||||
GC.SuppressFinalize(this);
|
||||
}
|
||||
#endif
|
||||
|
||||
private long BytesLeftToRead { get; set; }
|
||||
|
||||
public override bool CanRead => true;
|
||||
@@ -49,6 +106,10 @@ internal class TarReadOnlySubStream : NonDisposingStream
|
||||
|
||||
public override void Flush() { }
|
||||
|
||||
public override System.Threading.Tasks.Task FlushAsync(
|
||||
System.Threading.CancellationToken cancellationToken
|
||||
) => System.Threading.Tasks.Task.CompletedTask;
|
||||
|
||||
public override long Length => throw new NotSupportedException();
|
||||
|
||||
public override long Position
|
||||
@@ -87,6 +148,48 @@ internal class TarReadOnlySubStream : NonDisposingStream
|
||||
return value;
|
||||
}
|
||||
|
||||
public override async System.Threading.Tasks.Task<int> ReadAsync(
|
||||
byte[] buffer,
|
||||
int offset,
|
||||
int count,
|
||||
System.Threading.CancellationToken cancellationToken
|
||||
)
|
||||
{
|
||||
if (BytesLeftToRead < count)
|
||||
{
|
||||
count = (int)BytesLeftToRead;
|
||||
}
|
||||
var read = await Stream
|
||||
.ReadAsync(buffer, offset, count, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (read > 0)
|
||||
{
|
||||
BytesLeftToRead -= read;
|
||||
_amountRead += read;
|
||||
}
|
||||
return read;
|
||||
}
|
||||
|
||||
#if !NETFRAMEWORK && !NETSTANDARD2_0
|
||||
public override async System.Threading.Tasks.ValueTask<int> ReadAsync(
|
||||
System.Memory<byte> buffer,
|
||||
System.Threading.CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
if (BytesLeftToRead < buffer.Length)
|
||||
{
|
||||
buffer = buffer.Slice(0, (int)BytesLeftToRead);
|
||||
}
|
||||
var read = await Stream.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
|
||||
if (read > 0)
|
||||
{
|
||||
BytesLeftToRead -= read;
|
||||
_amountRead += read;
|
||||
}
|
||||
return read;
|
||||
}
|
||||
#endif
|
||||
|
||||
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
|
||||
|
||||
public override void SetLength(long value) => throw new NotSupportedException();
|
||||
|
||||
@@ -7,16 +7,22 @@ namespace SharpCompress.Common;
|
||||
|
||||
public abstract class Volume : IVolume
|
||||
{
|
||||
private readonly Stream _baseStream;
|
||||
private readonly Stream _actualStream;
|
||||
|
||||
internal Volume(Stream stream, ReaderOptions? readerOptions, int index = 0)
|
||||
{
|
||||
Index = index;
|
||||
ReaderOptions = readerOptions ?? new ReaderOptions();
|
||||
_baseStream = stream;
|
||||
if (ReaderOptions.LeaveStreamOpen)
|
||||
{
|
||||
stream = NonDisposingStream.Create(stream);
|
||||
stream = SharpCompressStream.Create(stream, leaveOpen: true);
|
||||
}
|
||||
|
||||
if (stream is IStreamStack ss)
|
||||
ss.SetBuffer(ReaderOptions.BufferSize, true);
|
||||
|
||||
_actualStream = stream;
|
||||
}
|
||||
|
||||
@@ -32,7 +38,7 @@ public abstract class Volume : IVolume
|
||||
|
||||
public virtual int Index { get; internal set; }
|
||||
|
||||
public string? FileName => (_actualStream as FileStream)?.Name;
|
||||
public string? FileName => (_baseStream as FileStream)?.Name;
|
||||
|
||||
/// <summary>
|
||||
/// RarArchive is part of a multi-part archive.
|
||||
|
||||
@@ -123,11 +123,7 @@ internal class DirectoryEntryHeader : ZipFileEntry
|
||||
|
||||
public long RelativeOffsetOfEntryHeader { get; set; }
|
||||
|
||||
public uint ExternalFileAttributes { get; set; }
|
||||
|
||||
public ushort InternalFileAttributes { get; set; }
|
||||
|
||||
public ushort DiskNumberStart { get; set; }
|
||||
|
||||
public string? Comment { get; private set; }
|
||||
}
|
||||
|
||||
@@ -13,5 +13,5 @@ internal enum HeaderFlags : ushort
|
||||
EnhancedDeflate = 16,
|
||||
|
||||
//Bit 11: Language encoding flag
|
||||
Efs = 2048
|
||||
Efs = 2048,
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ internal enum ExtraDataType : ushort
|
||||
// -Info-ZIP Unicode Path Extra Field
|
||||
UnicodePathExtraField = 0x7075,
|
||||
Zip64ExtendedInformationExtraField = 0x0001,
|
||||
UnixTimeExtraField = 0x5455
|
||||
UnixTimeExtraField = 0x5455,
|
||||
}
|
||||
|
||||
internal class ExtraData
|
||||
@@ -166,10 +166,10 @@ internal sealed class UnixTimeExtraField : ExtraData
|
||||
return Tuple.Create<DateTime?, DateTime?, DateTime?>(null, null, null);
|
||||
}
|
||||
|
||||
var flags = DataBytes[0];
|
||||
var isModifiedTimeSpecified = (flags & 0x01) == 1;
|
||||
var isLastAccessTimeSpecified = (flags & 0x02) == 1;
|
||||
var isCreationTimeSpecified = (flags & 0x04) == 1;
|
||||
var flags = (RecordedTimeFlag)DataBytes[0];
|
||||
var isModifiedTimeSpecified = flags.HasFlag(RecordedTimeFlag.LastModified);
|
||||
var isLastAccessTimeSpecified = flags.HasFlag(RecordedTimeFlag.LastAccessed);
|
||||
var isCreationTimeSpecified = flags.HasFlag(RecordedTimeFlag.Created);
|
||||
var currentIndex = 1;
|
||||
DateTime? modifiedTime = null;
|
||||
DateTime? lastAccessTime = null;
|
||||
@@ -189,7 +189,7 @@ internal sealed class UnixTimeExtraField : ExtraData
|
||||
{
|
||||
if (currentIndex + 4 > DataBytes.Length)
|
||||
{
|
||||
throw new ArchiveException("Invalid UnicodeExtraTime field");
|
||||
return Tuple.Create<DateTime?, DateTime?, DateTime?>(null, null, null);
|
||||
}
|
||||
|
||||
var lastAccessEpochTime = BinaryPrimitives.ReadInt32LittleEndian(
|
||||
@@ -206,7 +206,7 @@ internal sealed class UnixTimeExtraField : ExtraData
|
||||
{
|
||||
if (currentIndex + 4 > DataBytes.Length)
|
||||
{
|
||||
throw new ArchiveException("Invalid UnicodeExtraTime field");
|
||||
return Tuple.Create<DateTime?, DateTime?, DateTime?>(null, null, null);
|
||||
}
|
||||
|
||||
var creationTimeEpochTime = BinaryPrimitives.ReadInt32LittleEndian(
|
||||
@@ -222,6 +222,15 @@ internal sealed class UnixTimeExtraField : ExtraData
|
||||
return Tuple.Create(modifiedTime, lastAccessTime, creationTime);
|
||||
}
|
||||
}
|
||||
|
||||
[Flags]
|
||||
private enum RecordedTimeFlag
|
||||
{
|
||||
None = 0,
|
||||
LastModified = 1,
|
||||
LastAccessed = 2,
|
||||
Created = 4,
|
||||
}
|
||||
}
|
||||
|
||||
internal static class LocalEntryHeaderExtraFactory
|
||||
@@ -229,11 +238,14 @@ internal static class LocalEntryHeaderExtraFactory
|
||||
internal static ExtraData Create(ExtraDataType type, ushort length, byte[] extraData) =>
|
||||
type switch
|
||||
{
|
||||
ExtraDataType.UnicodePathExtraField
|
||||
=> new ExtraUnicodePathExtraField(type, length, extraData),
|
||||
ExtraDataType.Zip64ExtendedInformationExtraField
|
||||
=> new Zip64ExtendedInformationExtraField(type, length, extraData),
|
||||
ExtraDataType.UnicodePathExtraField => new ExtraUnicodePathExtraField(
|
||||
type,
|
||||
length,
|
||||
extraData
|
||||
),
|
||||
ExtraDataType.Zip64ExtendedInformationExtraField =>
|
||||
new Zip64ExtendedInformationExtraField(type, length, extraData),
|
||||
ExtraDataType.UnixTimeExtraField => new UnixTimeExtraField(type, length, extraData),
|
||||
_ => new ExtraData(type, length, extraData)
|
||||
_ => new ExtraData(type, length, extraData),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -91,8 +91,15 @@ internal abstract class ZipFileEntry : ZipHeader
|
||||
|
||||
protected void LoadExtra(byte[] extra)
|
||||
{
|
||||
for (var i = 0; i < extra.Length - 4; )
|
||||
for (var i = 0; i < extra.Length; )
|
||||
{
|
||||
// Ensure we have at least a header (2-byte ID + 2-byte length)
|
||||
if (i + 4 > extra.Length)
|
||||
{
|
||||
// Incomplete header — stop parsing extras
|
||||
break;
|
||||
}
|
||||
|
||||
var type = (ExtraDataType)BinaryPrimitives.ReadUInt16LittleEndian(extra.AsSpan(i));
|
||||
if (!Enum.IsDefined(typeof(ExtraDataType), type))
|
||||
{
|
||||
@@ -106,7 +113,17 @@ internal abstract class ZipFileEntry : ZipHeader
|
||||
if (length > extra.Length)
|
||||
{
|
||||
// bad extras block
|
||||
return;
|
||||
break; // allow processing optional other blocks
|
||||
}
|
||||
// Some ZIP files contain vendor-specific or malformed extra fields where the declared
|
||||
// data length extends beyond the remaining buffer. This adjustment ensures that
|
||||
// we only read data within bounds (i + 4 + length <= extra.Length)
|
||||
// The example here is: 41 43 18 00 41 52 43 30 46 EB FF FF 51 29 03 C6 03 00 00 00 00 00 00 00 00
|
||||
// No existing zip utility uses 0x4341 ('AC')
|
||||
if (i + 4 + length > extra.Length)
|
||||
{
|
||||
// incomplete or corrupt field
|
||||
break; // allow processing optional other blocks
|
||||
}
|
||||
|
||||
var data = new byte[length];
|
||||
@@ -120,4 +137,8 @@ internal abstract class ZipFileEntry : ZipHeader
|
||||
internal ZipFilePart? Part { get; set; }
|
||||
|
||||
internal bool IsZip64 => CompressedSize >= uint.MaxValue;
|
||||
|
||||
internal uint ExternalFileAttributes { get; set; }
|
||||
|
||||
internal string? Comment { get; set; }
|
||||
}
|
||||
|
||||
@@ -8,5 +8,5 @@ internal enum ZipHeaderType
|
||||
DirectoryEnd,
|
||||
Split,
|
||||
Zip64DirectoryEnd,
|
||||
Zip64DirectoryEndLocator
|
||||
Zip64DirectoryEndLocator,
|
||||
}
|
||||
|
||||
@@ -1,16 +1,37 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Common.Zip;
|
||||
|
||||
internal enum CryptoMode
|
||||
{
|
||||
Encrypt,
|
||||
Decrypt
|
||||
Decrypt,
|
||||
}
|
||||
|
||||
internal class PkwareTraditionalCryptoStream : Stream
|
||||
internal class PkwareTraditionalCryptoStream : Stream, IStreamStack
|
||||
{
|
||||
#if DEBUG_STREAMS
|
||||
long IStreamStack.InstanceId { get; set; }
|
||||
#endif
|
||||
int IStreamStack.DefaultBufferSize { get; set; }
|
||||
|
||||
Stream IStreamStack.BaseStream() => _stream;
|
||||
|
||||
int IStreamStack.BufferSize
|
||||
{
|
||||
get => 0;
|
||||
set { return; }
|
||||
}
|
||||
int IStreamStack.BufferPosition
|
||||
{
|
||||
get => 0;
|
||||
set { return; }
|
||||
}
|
||||
|
||||
void IStreamStack.SetPosition(long position) { }
|
||||
|
||||
private readonly PkwareTraditionalEncryptionData _encryptor;
|
||||
private readonly CryptoMode _mode;
|
||||
private readonly Stream _stream;
|
||||
@@ -25,6 +46,10 @@ internal class PkwareTraditionalCryptoStream : Stream
|
||||
_encryptor = encryptor;
|
||||
_stream = stream;
|
||||
_mode = mode;
|
||||
|
||||
#if DEBUG_STREAMS
|
||||
this.DebugConstruct(typeof(PkwareTraditionalCryptoStream));
|
||||
#endif
|
||||
}
|
||||
|
||||
public override bool CanRead => (_mode == CryptoMode.Decrypt);
|
||||
@@ -100,6 +125,9 @@ internal class PkwareTraditionalCryptoStream : Stream
|
||||
return;
|
||||
}
|
||||
_isDisposed = true;
|
||||
#if DEBUG_STREAMS
|
||||
this.DebugDispose(typeof(PkwareTraditionalCryptoStream));
|
||||
#endif
|
||||
base.Dispose(disposing);
|
||||
_stream.Dispose();
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
using System.IO;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Common.Zip;
|
||||
|
||||
@@ -8,18 +7,13 @@ internal class SeekableZipFilePart : ZipFilePart
|
||||
{
|
||||
private bool _isLocalHeaderLoaded;
|
||||
private readonly SeekableZipHeaderFactory _headerFactory;
|
||||
private readonly DirectoryEntryHeader _directoryEntryHeader;
|
||||
|
||||
internal SeekableZipFilePart(
|
||||
SeekableZipHeaderFactory headerFactory,
|
||||
DirectoryEntryHeader header,
|
||||
Stream stream
|
||||
)
|
||||
: base(header, stream)
|
||||
{
|
||||
_headerFactory = headerFactory;
|
||||
_directoryEntryHeader = header;
|
||||
}
|
||||
: base(header, stream) => _headerFactory = headerFactory;
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
{
|
||||
@@ -31,29 +25,13 @@ internal class SeekableZipFilePart : ZipFilePart
|
||||
return base.GetCompressedStream();
|
||||
}
|
||||
|
||||
internal string? Comment => ((DirectoryEntryHeader)Header).Comment;
|
||||
|
||||
private void LoadLocalHeader()
|
||||
{
|
||||
var hasData = Header.HasData;
|
||||
Header = _headerFactory.GetLocalHeader(BaseStream, ((DirectoryEntryHeader)Header));
|
||||
Header.HasData = hasData;
|
||||
}
|
||||
private void LoadLocalHeader() =>
|
||||
Header = _headerFactory.GetLocalHeader(BaseStream, (DirectoryEntryHeader)Header);
|
||||
|
||||
protected override Stream CreateBaseStream()
|
||||
{
|
||||
BaseStream.Position = Header.DataStartPosition.NotNull();
|
||||
|
||||
if (
|
||||
(Header.CompressedSize == 0)
|
||||
&& FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor)
|
||||
&& _directoryEntryHeader.HasData
|
||||
&& (_directoryEntryHeader.CompressedSize != 0)
|
||||
)
|
||||
{
|
||||
return new ReadOnlySubStream(BaseStream, _directoryEntryHeader.CompressedSize);
|
||||
}
|
||||
|
||||
return BaseStream;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -149,6 +149,18 @@ internal sealed class SeekableZipHeaderFactory : ZipHeaderFactory
|
||||
{
|
||||
throw new InvalidOperationException();
|
||||
}
|
||||
|
||||
// populate fields only known from the DirectoryEntryHeader
|
||||
localEntryHeader.HasData = directoryEntryHeader.HasData;
|
||||
localEntryHeader.ExternalFileAttributes = directoryEntryHeader.ExternalFileAttributes;
|
||||
localEntryHeader.Comment = directoryEntryHeader.Comment;
|
||||
|
||||
if (FlagUtility.HasFlag(localEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor))
|
||||
{
|
||||
localEntryHeader.Crc = directoryEntryHeader.Crc;
|
||||
localEntryHeader.CompressedSize = directoryEntryHeader.CompressedSize;
|
||||
localEntryHeader.UncompressedSize = directoryEntryHeader.UncompressedSize;
|
||||
}
|
||||
return localEntryHeader;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
using System.IO;
|
||||
using System.Net.Sockets;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
using SharpCompress.Compressors.Deflate;
|
||||
using SharpCompress.IO;
|
||||
@@ -27,12 +26,12 @@ internal sealed class StreamingZipFilePart : ZipFilePart
|
||||
);
|
||||
if (LeaveStreamOpen)
|
||||
{
|
||||
return NonDisposingStream.Create(_decompressionStream);
|
||||
return SharpCompressStream.Create(_decompressionStream, leaveOpen: true);
|
||||
}
|
||||
return _decompressionStream;
|
||||
}
|
||||
|
||||
internal BinaryReader FixStreamedFileLocation(ref RewindableStream rewindableStream)
|
||||
internal BinaryReader FixStreamedFileLocation(ref SharpCompressStream rewindableStream)
|
||||
{
|
||||
if (Header.IsDirectory)
|
||||
{
|
||||
@@ -50,7 +49,7 @@ internal sealed class StreamingZipFilePart : ZipFilePart
|
||||
|
||||
if (_decompressionStream is DeflateStream deflateStream)
|
||||
{
|
||||
rewindableStream.Rewind(deflateStream.InputBuffer);
|
||||
((IStreamStack)rewindableStream).StackSeek(0);
|
||||
}
|
||||
|
||||
Skipped = true;
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
@@ -19,37 +20,42 @@ internal class StreamingZipHeaderFactory : ZipHeaderFactory
|
||||
|
||||
internal IEnumerable<ZipHeader> ReadStreamHeader(Stream stream)
|
||||
{
|
||||
RewindableStream rewindableStream;
|
||||
if (stream is not SharpCompressStream) //ensure the stream is already a SharpCompressStream. So the buffer/size will already be set
|
||||
{
|
||||
//the original code wrapped this with RewindableStream. Wrap with SharpCompressStream as we can get the buffer size
|
||||
if (stream is SourceStream src)
|
||||
{
|
||||
stream = new SharpCompressStream(
|
||||
stream,
|
||||
src.ReaderOptions.LeaveStreamOpen,
|
||||
bufferSize: src.ReaderOptions.BufferSize
|
||||
);
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new ArgumentException("Stream must be a SharpCompressStream", nameof(stream));
|
||||
}
|
||||
}
|
||||
var rewindableStream = (SharpCompressStream)stream;
|
||||
|
||||
if (stream is RewindableStream rs)
|
||||
{
|
||||
rewindableStream = rs;
|
||||
}
|
||||
else
|
||||
{
|
||||
rewindableStream = new RewindableStream(stream);
|
||||
}
|
||||
while (true)
|
||||
{
|
||||
ZipHeader? header;
|
||||
var reader = new BinaryReader(rewindableStream);
|
||||
uint headerBytes = 0;
|
||||
if (
|
||||
_lastEntryHeader != null
|
||||
&& (
|
||||
FlagUtility.HasFlag(_lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor)
|
||||
|| _lastEntryHeader.IsZip64
|
||||
)
|
||||
&& FlagUtility.HasFlag(_lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor)
|
||||
)
|
||||
{
|
||||
if (_lastEntryHeader.Part is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
reader = ((StreamingZipFilePart)_lastEntryHeader.Part).FixStreamedFileLocation(
|
||||
ref rewindableStream
|
||||
);
|
||||
|
||||
// removed requirement for FixStreamedFileLocation()
|
||||
|
||||
var pos = rewindableStream.CanSeek ? (long?)rewindableStream.Position : null;
|
||||
|
||||
var crc = reader.ReadUInt32();
|
||||
if (crc == POST_DATA_DESCRIPTOR)
|
||||
{
|
||||
@@ -57,12 +63,69 @@ internal class StreamingZipHeaderFactory : ZipHeaderFactory
|
||||
}
|
||||
_lastEntryHeader.Crc = crc;
|
||||
|
||||
//attempt 32bit read
|
||||
ulong compSize = reader.ReadUInt32();
|
||||
ulong uncompSize = reader.ReadUInt32();
|
||||
headerBytes = reader.ReadUInt32();
|
||||
|
||||
//check for zip64 sentinel or unexpected header
|
||||
bool isSentinel = compSize == 0xFFFFFFFF || uncompSize == 0xFFFFFFFF;
|
||||
bool isHeader = headerBytes == 0x04034b50 || headerBytes == 0x02014b50;
|
||||
|
||||
if (!isHeader && !isSentinel)
|
||||
{
|
||||
//reshuffle into 64-bit values
|
||||
compSize = (uncompSize << 32) | compSize;
|
||||
uncompSize = ((ulong)headerBytes << 32) | reader.ReadUInt32();
|
||||
headerBytes = reader.ReadUInt32();
|
||||
}
|
||||
else if (isSentinel)
|
||||
{
|
||||
//standards-compliant zip64 descriptor
|
||||
compSize = reader.ReadUInt64();
|
||||
uncompSize = reader.ReadUInt64();
|
||||
}
|
||||
|
||||
_lastEntryHeader.CompressedSize = (long)compSize;
|
||||
_lastEntryHeader.UncompressedSize = (long)uncompSize;
|
||||
|
||||
if (pos.HasValue)
|
||||
{
|
||||
_lastEntryHeader.DataStartPosition = pos - _lastEntryHeader.CompressedSize;
|
||||
}
|
||||
}
|
||||
else if (_lastEntryHeader != null && _lastEntryHeader.IsZip64)
|
||||
{
|
||||
if (_lastEntryHeader.Part is null)
|
||||
continue;
|
||||
|
||||
//reader = ((StreamingZipFilePart)_lastEntryHeader.Part).FixStreamedFileLocation(
|
||||
// ref rewindableStream
|
||||
//);
|
||||
|
||||
var pos = rewindableStream.CanSeek ? (long?)rewindableStream.Position : null;
|
||||
|
||||
headerBytes = reader.ReadUInt32();
|
||||
|
||||
var version = reader.ReadUInt16();
|
||||
var flags = (HeaderFlags)reader.ReadUInt16();
|
||||
var compressionMethod = (ZipCompressionMethod)reader.ReadUInt16();
|
||||
var lastModifiedDate = reader.ReadUInt16();
|
||||
var lastModifiedTime = reader.ReadUInt16();
|
||||
|
||||
var crc = reader.ReadUInt32();
|
||||
|
||||
if (crc == POST_DATA_DESCRIPTOR)
|
||||
{
|
||||
crc = reader.ReadUInt32();
|
||||
}
|
||||
_lastEntryHeader.Crc = crc;
|
||||
|
||||
// The DataDescriptor can be either 64bit or 32bit
|
||||
var compressed_size = reader.ReadUInt32();
|
||||
var uncompressed_size = reader.ReadUInt32();
|
||||
|
||||
// Check if we have header or 64bit DataDescriptor
|
||||
headerBytes = reader.ReadUInt32();
|
||||
var test_header = !(headerBytes == 0x04034b50 || headerBytes == 0x02014b50);
|
||||
|
||||
var test_64bit = ((long)uncompressed_size << 32) | compressed_size;
|
||||
@@ -80,6 +143,9 @@ internal class StreamingZipHeaderFactory : ZipHeaderFactory
|
||||
if (pos.HasValue)
|
||||
{
|
||||
_lastEntryHeader.DataStartPosition = pos - _lastEntryHeader.CompressedSize;
|
||||
|
||||
// 4 = First 4 bytes of the entry header (i.e. 50 4B 03 04)
|
||||
rewindableStream.Position = pos.Value + 4;
|
||||
}
|
||||
}
|
||||
else
|
||||
@@ -88,7 +154,7 @@ internal class StreamingZipHeaderFactory : ZipHeaderFactory
|
||||
}
|
||||
|
||||
_lastEntryHeader = null;
|
||||
header = ReadHeader(headerBytes, reader);
|
||||
var header = ReadHeader(headerBytes, reader);
|
||||
if (header is null)
|
||||
{
|
||||
yield break;
|
||||
@@ -120,16 +186,11 @@ internal class StreamingZipHeaderFactory : ZipHeaderFactory
|
||||
} // Check if zip is streaming ( Length is 0 and is declared in PostDataDescriptor )
|
||||
else if (local_header.Flags.HasFlag(HeaderFlags.UsePostDataDescriptor))
|
||||
{
|
||||
var isRecording = rewindableStream.IsRecording;
|
||||
if (!isRecording)
|
||||
{
|
||||
rewindableStream.StartRecording();
|
||||
}
|
||||
var nextHeaderBytes = reader.ReadUInt32();
|
||||
((IStreamStack)rewindableStream).Rewind(sizeof(uint));
|
||||
|
||||
// Check if next data is PostDataDescriptor, streamed file with 0 length
|
||||
header.HasData = !IsHeader(nextHeaderBytes);
|
||||
rewindableStream.Rewind(!isRecording);
|
||||
}
|
||||
else // We are not streaming and compressed size is 0, we have no data
|
||||
{
|
||||
|
||||
@@ -2,11 +2,32 @@ using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.IO;
|
||||
using System.Security.Cryptography;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Common.Zip;
|
||||
|
||||
internal class WinzipAesCryptoStream : Stream
|
||||
internal class WinzipAesCryptoStream : Stream, IStreamStack
|
||||
{
|
||||
#if DEBUG_STREAMS
|
||||
long IStreamStack.InstanceId { get; set; }
|
||||
#endif
|
||||
int IStreamStack.DefaultBufferSize { get; set; }
|
||||
|
||||
Stream IStreamStack.BaseStream() => _stream;
|
||||
|
||||
int IStreamStack.BufferSize
|
||||
{
|
||||
get => 0;
|
||||
set { }
|
||||
}
|
||||
int IStreamStack.BufferPosition
|
||||
{
|
||||
get => 0;
|
||||
set { }
|
||||
}
|
||||
|
||||
void IStreamStack.SetPosition(long position) { }
|
||||
|
||||
private const int BLOCK_SIZE_IN_BYTES = 16;
|
||||
private readonly SymmetricAlgorithm _cipher;
|
||||
private readonly byte[] _counter = new byte[BLOCK_SIZE_IN_BYTES];
|
||||
@@ -27,6 +48,10 @@ internal class WinzipAesCryptoStream : Stream
|
||||
_stream = stream;
|
||||
_totalBytesLeftToRead = length;
|
||||
|
||||
#if DEBUG_STREAMS
|
||||
this.DebugConstruct(typeof(WinzipAesCryptoStream));
|
||||
#endif
|
||||
|
||||
_cipher = CreateCipher(winzipAesEncryptionData);
|
||||
|
||||
var iv = new byte[BLOCK_SIZE_IN_BYTES];
|
||||
@@ -64,6 +89,9 @@ internal class WinzipAesCryptoStream : Stream
|
||||
return;
|
||||
}
|
||||
_isDisposed = true;
|
||||
#if DEBUG_STREAMS
|
||||
this.DebugDispose(typeof(WinzipAesCryptoStream));
|
||||
#endif
|
||||
if (disposing)
|
||||
{
|
||||
//read out last 10 auth bytes
|
||||
|
||||
@@ -4,5 +4,5 @@ internal enum WinzipAesKeySize
|
||||
{
|
||||
KeySize128 = 1,
|
||||
KeySize192 = 2,
|
||||
KeySize256 = 3
|
||||
KeySize256 = 3,
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user